Repository 'sklearn_mlxtend_association_rules'
hg clone https://toolshed.g2.bx.psu.edu/repos/bgruening/sklearn_mlxtend_association_rules

Changeset 0:af2624d5ab32 (2021-05-01)
Next changeset 1:77f046dad222 (2021-08-27)
Commit message:
"planemo upload for repository https://github.com/bgruening/galaxytools/tree/master/tools/sklearn commit ea12f973df4b97a2691d9e4ce6bf6fae59d57717"
added:
README.rst
association_rules.py
association_rules.xml
fitted_model_eval.py
keras_deep_learning.py
keras_macros.xml
keras_train_and_eval.py
label_encoder.py
main_macros.xml
ml_visualization_ex.py
model_prediction.py
pca.py
search_model_validation.py
simple_model_fit.py
stacking_ensembles.py
test-data/GridSearchCV.zip
test-data/LinearRegression01.zip
test-data/LinearRegression02.zip
test-data/RF01704.fasta
test-data/RFE.zip
test-data/RandomForestClassifier.zip
test-data/RandomForestRegressor01.zip
test-data/StackingCVRegressor01.zip
test-data/StackingCVRegressor02.zip
test-data/StackingRegressor02.zip
test-data/StackingVoting03.zip
test-data/XGBRegressor01.zip
test-data/abc_model01
test-data/abc_result01
test-data/abr_model01
test-data/abr_result01
test-data/accuracy_score.txt
test-data/auc.txt
test-data/average_precision_score.txt
test-data/best_estimator_.zip
test-data/best_params_.txt
test-data/best_score_.tabular
test-data/blobs.txt
test-data/brier_score_loss.txt
test-data/circles.txt
test-data/class.txt
test-data/classification_report.txt
test-data/cluster_result01.txt
test-data/cluster_result02.txt
test-data/cluster_result03.txt
test-data/cluster_result04.txt
test-data/cluster_result05.txt
test-data/cluster_result06.txt
test-data/cluster_result07.txt
test-data/cluster_result08.txt
test-data/cluster_result09.txt
test-data/cluster_result10.txt
test-data/cluster_result11.txt
test-data/cluster_result12.txt
test-data/cluster_result13.txt
test-data/cluster_result14.txt
test-data/cluster_result15.txt
test-data/cluster_result16.txt
test-data/cluster_result17.txt
test-data/cluster_result18.txt
test-data/cluster_result19.txt
test-data/cluster_result20.txt
test-data/cluster_result21.txt
test-data/confusion_matrix.txt
test-data/converter_result01.json
test-data/converter_result02.json
test-data/csc_sparse1.mtx
test-data/csc_sparse2.mtx
test-data/csc_stack_result01.mtx
test-data/csr_sparse1.mtx
test-data/csr_sparse2.mtx
test-data/csr_stack_result01.mtx
test-data/deepsear_1feature.json
test-data/empty_file.txt
test-data/f1_score.txt
test-data/fbeta_score.txt
test-data/feature_importances_.tabular
test-data/feature_selection_result01
test-data/feature_selection_result02
test-data/feature_selection_result03
test-data/feature_selection_result04
test-data/feature_selection_result05
test-data/feature_selection_result06
test-data/feature_selection_result07
test-data/feature_selection_result08
test-data/feature_selection_result09
test-data/feature_selection_result10
test-data/feature_selection_result11
test-data/feature_selection_result12
test-data/feature_selection_result13
test-data/final_estimator.zip
test-data/fitted_keras_g_regressor01.zip
test-data/fitted_model_eval01.tabular
test-data/friedman1.txt
test-data/friedman2.txt
test-data/friedman3.txt
test-data/gaus.txt
test-data/gbc_model01
test-data/gbc_result01
test-data/gbr_model01
test-data/gbr_prediction_result01.tabular
test-data/get_params.tabular
test-data/get_params01.tabular
test-data/get_params02.tabular
test-data/get_params03.tabular
test-data/get_params04.tabular
test-data/get_params05.tabular
test-data/get_params06.tabular
test-data/get_params07.tabular
test-data/get_params08.tabular
test-data/get_params09.tabular
test-data/get_params10.tabular
test-data/get_params11.tabular
test-data/get_params12.tabular
test-data/glm_model01
test-data/glm_model02
test-data/glm_model03
test-data/glm_model04
test-data/glm_model05
test-data/glm_model06
test-data/glm_model07
test-data/glm_model08
test-data/glm_result01
test-data/glm_result02
test-data/glm_result03
test-data/glm_result04
test-data/glm_result05
test-data/glm_result06
test-data/glm_result07
test-data/glm_result08
test-data/grid_scores_.tabular
test-data/hamming_loss.txt
test-data/hastie.txt
test-data/hinge_loss.txt
test-data/imblearn_X.tabular
test-data/imblearn_y.tabular
test-data/jaccard_similarity_score.txt
test-data/keras01.json
test-data/keras02.json
test-data/keras03.json
test-data/keras04.json
test-data/keras_batch_model01
test-data/keras_batch_model02
test-data/keras_batch_model03
test-data/keras_batch_model04
test-data/keras_batch_params01.tabular
test-data/keras_batch_params04.tabular
test-data/keras_model01
test-data/keras_model02
test-data/keras_model04
test-data/keras_params04.tabular
test-data/keras_prefitted01.zip
test-data/keras_save_weights01.h5
test-data/keras_train_eval_y_true02.tabular
test-data/lda_model01
test-data/lda_model02
test-data/lda_prediction_result01.tabular
test-data/lda_prediction_result02.tabular
test-data/le_input_w_header.tabular
test-data/le_input_wo_header.tabular
test-data/le_output.tabular
test-data/lgb_class_model.txt
test-data/lgb_prediction_result01.tabular
test-data/lgb_regr_model.txt
test-data/log_loss.txt
test-data/matthews_corrcoef.txt
test-data/mba_input_int_w.tabular
test-data/mba_input_int_wo.tabular
test-data/mba_input_str_w.tabular
test-data/mba_input_str_wo.tabular
test-data/mba_out_str.tabular
test-data/mba_output_int.tabular
test-data/mba_output_str.tabular
test-data/ml_confusion_predicted.tabular
test-data/ml_confusion_true.tabular
test-data/ml_confusion_viz.png
test-data/ml_vis01.html
test-data/ml_vis02.html
test-data/ml_vis03.html
test-data/ml_vis04.html
test-data/ml_vis05.html
test-data/ml_vis05.png
test-data/model_fit01
test-data/model_fit02
test-data/model_fit02.h5
test-data/model_pred01.tabular
test-data/model_pred02.tabular
test-data/moons.txt
test-data/mv_result02.tabular
test-data/mv_result03.tabular
test-data/mv_result05.tabular
test-data/named_steps.txt
test-data/nn_model01
test-data/nn_model02
test-data/nn_model03
test-data/nn_prediction_result01.tabular
test-data/nn_prediction_result02.tabular
test-data/nn_prediction_result03.tabular
test-data/numeric_values.tabular
test-data/ohe_in_w_header.tabular
test-data/ohe_in_wo_header.tabular
test-data/ohe_out_4.tabular
test-data/ohe_out_5.tabular
test-data/pca_classical_header_names_output.dat
test-data/pca_classical_output.dat
test-data/pca_incremental_header_names_output.dat
test-data/pca_incremental_output.dat
test-data/pca_input.dat
test-data/pca_input_with_headers.dat
test-data/pickle_blacklist
test-data/pipeline01
test-data/pipeline02
test-data/pipeline03
test-data/pipeline04
test-data/pipeline05
test-data/pipeline06
test-data/pipeline07
test-data/pipeline08
test-data/pipeline09
test-data/pipeline10
test-data/pipeline11
test-data/pipeline12
test-data/pipeline14
test-data/pipeline15
test-data/pipeline16
test-data/pipeline17
test-data/pipeline_params05.tabular
test-data/pipeline_params18
test-data/precision_recall_curve.txt
test-data/precision_recall_fscore_support.txt
test-data/precision_score.txt
test-data/predicted_header.tabular
test-data/prp_model01
test-data/prp_model02
test-data/prp_model03
test-data/prp_model04
test-data/prp_model05
test-data/prp_model06
test-data/prp_model07
test-data/prp_model08
test-data/prp_model09
test-data/prp_result01
test-data/prp_result02
test-data/prp_result03
test-data/prp_result04
test-data/prp_result05
test-data/prp_result06
test-data/prp_result07
test-data/prp_result08
test-data/prp_result09
test-data/prp_result10
test-data/pw_metric01.tabular
test-data/pw_metric02.tabular
test-data/pw_metric03.tabular
test-data/qda_model01
test-data/qda_prediction_result01.tabular
test-data/ranking_.tabular
test-data/recall_score.txt
test-data/regression.txt
test-data/regression_X.tabular
test-data/regression_groups.tabular
test-data/regression_metrics_result01
test-data/regression_metrics_result02
test-data/regression_metrics_result03
test-data/regression_metrics_result04
test-data/regression_metrics_result05
test-data/regression_metrics_result06
test-data/regression_test.tabular
test-data/regression_test_X.tabular
test-data/regression_test_y.tabular
test-data/regression_train.tabular
test-data/regression_y.tabular
test-data/regression_y_split_test01.tabular
test-data/rfc_model01
test-data/rfc_result01
test-data/rfc_result02
test-data/rfr_model01
test-data/rfr_result01
test-data/roc_auc_score.txt
test-data/roc_curve.txt
test-data/scurve.txt
test-data/searchCV01
test-data/searchCV02
test-data/sparse.mtx
test-data/sparse_u.txt
test-data/svc_model01
test-data/svc_model02
test-data/svc_model03
test-data/svc_prediction_result01.tabular
test-data/svc_prediction_result02.tabular
test-data/svc_prediction_result03.tabular
test-data/swiss_r.txt
test-data/test.tabular
test-data/test2.tabular
test-data/test3.tabular
test-data/test_set.tabular
test-data/train.tabular
test-data/train_set.tabular
test-data/train_test_eval01.tabular
test-data/train_test_eval03.tabular
test-data/train_test_eval_model01
test-data/train_test_eval_weights01.h5
test-data/train_test_eval_weights02.h5
test-data/train_test_split_test01.tabular
test-data/train_test_split_test02.tabular
test-data/train_test_split_test03.tabular
test-data/train_test_split_train01.tabular
test-data/train_test_split_train02.tabular
test-data/train_test_split_train03.tabular
test-data/true_header.tabular
test-data/vectorizer_result01.mtx
test-data/vectorizer_result02.mtx
test-data/vectorizer_result03.mtx
test-data/vectorizer_result04.mtx
test-data/y.tabular
test-data/y_score.tabular
test-data/y_true.tabular
test-data/zero_one_loss.txt
to_categorical.py
train_test_eval.py
train_test_split.py
b
diff -r 000000000000 -r af2624d5ab32 README.rst
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/README.rst Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,146 @@
+Galaxy wrapper for scikit-learn library
+***************************************
+
+Contents
+========
+
+- `What is scikit-learn?`_
+ - `Scikit-learn main package groups`_
+ - `Tools offered by this wrapper`_
+
+- `Machine learning workflows`_
+ - `Supervised learning workflows`_
+ - `Unsupervised learning workflows`_
+
+
+____________________________
+
+
+.. _What is scikit-learn?:
+
+What is scikit-learn?
+=====================
+
+Scikit-learn is an open-source machine learning library for the Python programming language. It offers various algorithms for performing supervised and unsupervised learning as well as data preprocessing and transformation, model selection and evaluation, and dataset utilities. It is built upon SciPy (Scientific Python) library.
+
+Scikit-learn source code can be accessed at https://github.com/scikit-learn/scikit-learn.
+Detailed installation instructions can be found at http://scikit-learn.org/stable/install.html
+
+
+.. _Scikit-learn main package groups:
+
+Scikit-learn main package groups
+================================
+
+Scikit-learn provides the users with several main groups of related operations.
+These are:
+
+- Classification
+    - Identifying to which category an object belongs.
+- Regression
+    - Predicting a continuous-valued attribute associated with an object.
+- Clustering
+    - Automatic grouping of similar objects into sets.
+- Preprocessing
+    - Feature extraction and normalization.
+- Model selection and evaluation
+    - Comparing, validating and choosing parameters and models.
+- Dimensionality reduction
+    - Reducing the number of random variables to consider.
+
+Each group consists of a number of well-known algorithms from the category. For example, one can find hierarchical, spectral, kmeans, and other clustering methods in sklearn.cluster package.
+
+
+.. _Tools offered by this wrapper:
+
+Available tools in the current wrapper
+======================================
+
+The current release of the wrapper offers a subset of the packages from scikit-learn library. You can find:
+
+- A subset of classification metric functions
+- Linear and quadratic discriminant classifiers
+- Random forest and Ada boost classifiers and regressors
+- All the clustering methods
+- All support vector machine classifiers
+- A subset of data preprocessing estimator classes
+- Pairwise metric measurement functions
+
+In addition, several tools for performing matrix operations, generating problem-specific datasets, and encoding text and extracting features have been prepared to help the user with more advanced operations.
+
+.. _Machine learning workflows:
+
+Machine learning workflows
+==========================
+
+Machine learning is about processes. No matter what machine learning algorithm we use, we can apply typical workflows and dataflows to produce more robust models and better predictions.
+Here we discuss supervised and unsupervised learning workflows.
+
+.. _Supervised learning workflows:
+
+Supervised machine learning workflows
+=====================================
+
+**What is supervised learning?**
+
+In this machine learning task, given sample data which are labeled, the aim is to build a model which can predict the labels for new observations.
+In practice, there are five steps which we can go through to start from raw input data and end up getting reasonable predictions for new samples:
+
+1. Preprocess the data::
+
+    * Change the collected data into the proper format and datatype.
+    * Adjust the data quality by filling the missing values, performing
+    required scaling and normalizations, etc.
+    * Extract features which are the most meaningfull for the learning task.
+    * Split the ready dataset into training and test samples.
+
+2. Choose an algorithm::
+
+    * These factors help one to choose a learning algorithm:
+        - Nature of the data (e.g. linear vs. nonlinear data)
+        - Structure of the predicted output (e.g. binary vs. multilabel classification)
+        - Memory and time usage of the training
+        - Predictive accuracy on new data
+        - Interpretability of the predictions
+
+3. Choose a validation method
+
+ Every machine learning model should be evaluated before being put into practicical use.
+ There are numerous performance metrics to evaluate machine learning models.
+ For supervised learning, usually classification or regression metrics are used.
+
+ A validation method helps to evaluate the performance metrics of a trained model in order
+ to optimize its performance or ultimately switch to a more efficient model.
+ Cross-validation is a known validation method.
+
+4. Fit a model
+
+   Given the learning algorithm, validation method, and performance metric(s)
+   repeat the following steps::
+
+    * Train the model.
+    * Evaluate based on metrics.
+    * Optimize unitl satisfied.
+
+5. Use fitted model for prediction::
+
+ This is a final evaluation in which, the optimized model is used to make predictions
+ on unseen (here test) samples. After this, the model is put into production.
+
+.. _Unsupervised learning workflows:
+
+Unsupervised machine learning workflows
+=======================================
+
+**What is unsupervised learning?**
+
+Unlike supervised learning and more liklely in real life, here the initial data is not labeled.
+The task is to extract the structure from the data and group the samples based on their similarities.
+Clustering and dimensionality reduction are two famous examples of unsupervised learning tasks.
+
+In this case, the workflow is as follows::
+
+    * Preprocess the data (without splitting to train and test).
+    * Train a model.
+    * Evaluate and tune parameters.
+    * Analyse the model and test on real data.
b
diff -r 000000000000 -r af2624d5ab32 association_rules.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/association_rules.py Sat May 01 01:24:32 2021 +0000
[
@@ -0,0 +1,116 @@
+import argparse
+import json
+import warnings
+
+import pandas as pd
+from mlxtend.frequent_patterns import association_rules, fpgrowth
+from mlxtend.preprocessing import TransactionEncoder
+
+
+def main(inputs, infile, outfile, min_support=0.5, min_confidence=0.5, min_lift=1.0, min_conviction=1.0, max_length=None):
+    """
+    Parameter
+    ---------
+    input : str
+        File path to galaxy tool parameter
+
+    infile : str
+        File paths of input vector
+
+    outfile : str
+        File path to output matrix
+
+    min_support: float
+        Minimum support
+
+    min_confidence: float
+        Minimum confidence
+
+    min_lift: float
+        Minimum lift
+
+    min_conviction: float
+        Minimum conviction
+
+    max_length: int
+        Maximum length
+
+    """
+    warnings.simplefilter('ignore')
+
+    with open(inputs, 'r') as param_handler:
+        params = json.load(param_handler)
+
+    input_header = params['header0']
+    header = 'infer' if input_header else None
+
+    with open(infile) as fp:
+        lines = fp.read().splitlines()
+
+    if header is not None:
+        lines = lines[1:]
+
+    dataset = []
+    for line in lines:
+        line_items = line.split("\t")
+        dataset.append(line_items)
+
+    # TransactionEncoder learns the unique labels in the dataset and transforms the
+    # input dataset (a Python list of lists) into a one-hot encoded NumPy boolean array
+    te = TransactionEncoder()
+    te_ary = te.fit_transform(dataset)
+
+    # Turn the encoded NumPy array into a DataFrame
+    df = pd.DataFrame(te_ary, columns=te.columns_)
+
+    # Extract frequent itemsets for association rule mining
+    # use_colnames: Use DataFrames' column names in the returned DataFrame instead of column indices
+    frequent_itemsets = fpgrowth(df, min_support=min_support, use_colnames=True, max_len=max_length)
+
+    # Get association rules, with confidence larger than min_confidence
+    rules = association_rules(frequent_itemsets, metric="confidence", min_threshold=min_confidence)
+
+    # Filter association rules, keeping rules with lift and conviction larger than min_liftand and min_conviction
+    rules = rules[(rules['lift'] >= min_lift) & (rules['conviction'] >= min_conviction)]
+
+    # Convert columns from frozenset to list (more readable)
+    rules['antecedents'] = rules['antecedents'].apply(list)
+    rules['consequents'] = rules['consequents'].apply(list)
+
+    # The next 3 steps are intended to fix the order of the association
+    # rules generated, so tests that rely on diff'ing a desired output
+    # with an expected output can pass
+
+    # 1) Sort entry in every row/column for columns 'antecedents' and 'consequents'
+    rules['antecedents'] = rules['antecedents'].apply(lambda row: sorted(row))
+    rules['consequents'] = rules['consequents'].apply(lambda row: sorted(row))
+
+    # 2) Create two temporary string columns to sort on
+    rules['ant_str'] = rules['antecedents'].apply(lambda row: " ".join(row))
+    rules['con_str'] = rules['consequents'].apply(lambda row: " ".join(row))
+
+    # 3) Sort results so they are re-producable
+    rules.sort_values(by=['ant_str', 'con_str'], inplace=True)
+    del rules['ant_str']
+    del rules['con_str']
+    rules.reset_index(drop=True, inplace=True)
+
+    # Write association rules and metrics to file
+    rules.to_csv(outfile, sep="\t", index=False)
+
+
+if __name__ == '__main__':
+    aparser = argparse.ArgumentParser()
+    aparser.add_argument("-i", "--inputs", dest="inputs", required=True)
+    aparser.add_argument("-y", "--infile", dest="infile", required=True)
+    aparser.add_argument("-o", "--outfile", dest="outfile", required=True)
+    aparser.add_argument("-s", "--support", dest="support", default=0.5)
+    aparser.add_argument("-c", "--confidence", dest="confidence", default=0.5)
+    aparser.add_argument("-l", "--lift", dest="lift", default=1.0)
+    aparser.add_argument("-v", "--conviction", dest="conviction", default=1.0)
+    aparser.add_argument("-t", "--length", dest="length", default=5)
+    args = aparser.parse_args()
+
+    main(args.inputs, args.infile, args.outfile,
+         min_support=float(args.support), min_confidence=float(args.confidence),
+         min_lift=float(args.lift), min_conviction=float(args.conviction), max_length=int(args.length))
b
diff -r 000000000000 -r af2624d5ab32 association_rules.xml
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/association_rules.xml Sat May 01 01:24:32 2021 +0000
[
@@ -0,0 +1,119 @@
+<tool id="sklearn_mlxtend_association_rules" name="Association rules" version="@VERSION@">
+    <description>Extract frequent itemsets and generate association rules</description>
+    <macros>
+        <import>main_macros.xml</import>
+    </macros>
+    <expand macro="python_requirements"/>
+    <expand macro="macro_stdio"/>
+    <version_command>echo "@VERSION@"</version_command>
+    <command detect_errors="exit_code"><![CDATA[
+        python '$__tool_directory__/association_rules.py'
+            --inputs '$inputs'
+            --infile '$infile'
+            --outfile '$outfile'
+            #if $support
+            --support '$support'
+            #end if
+            #if $confidence
+            --confidence '$confidence'
+            #end if
+            #if $lift
+            --lift '$lift'
+            #end if
+            #if $conviction
+            --conviction '$conviction'
+            #end if
+            #if $length
+            --length '$length'
+            #end if
+    ]]>
+    </command>
+    <configfiles>
+        <inputs name="inputs" />
+    </configfiles>
+    <inputs>
+        <param name="infile" type="data" format="tabular" label="Input file"/>
+        <param name="header0" type="boolean" optional="true" truevalue="booltrue" falsevalue="boolfalse" checked="true" label="Does the dataset contain header?"/>
+        <param name="support" type="float" optional="true" label="Minimum support"/>
+        <param name="confidence" type="float" optional="true" label="Minimum confidence"/>
+        <param name="lift" type="float" optional="true" label="Minimum lift"/>
+        <param name="conviction" type="float" optional="true" label="Minimum conviction"/>
+        <param name="length" type="integer" optional="true" label="Maximum length"/>
+    </inputs>
+    <outputs>
+        <data name="outfile" format="tabular"/>
+    </outputs>
+    <tests>
+        <test>
+            <param name="infile" value="mba_input_str_w.tabular" ftype="tabular"/>
+            <param name="header0" value="true"/>
+            <param name="support" value="0.5"/>
+            <param name="confidence" value="0.5"/>
+            <param name="lift" value="1.1"/>
+            <param name="conviction" value="1.1"/>
+            <param name="length" value="5"/>
+            <output name="outfile" file="mba_out_str.tabular" ftype="tabular"/>
+        </test>
+        <test>
+            <param name="infile" value="mba_input_int_w.tabular" ftype="tabular"/>
+            <param name="header0" value="true"/>
+            <param name="support" value="0.5"/>
+            <param name="confidence" value="0.5"/>
+            <param name="lift" value="1.1"/>
+            <param name="conviction" value="1.1"/>
+            <param name="length" value="5"/>
+            <output name="outfile" file="mba_output_int.tabular" ftype="tabular"/>
+        </test>
+        <test>
+            <param name="infile" value="mba_input_str_wo.tabular" ftype="tabular"/>
+            <param name="header0" value="false"/>
+            <param name="support" value="0.5"/>
+            <param name="confidence" value="0.5"/>
+            <param name="lift" value="1.1"/>
+            <param name="conviction" value="1.1"/>
+            <param name="length" value="5"/>
+            <output name="outfile" file="mba_output_str.tabular" ftype="tabular"/>
+        </test>
+        <test>
+            <param name="infile" value="mba_input_int_wo.tabular" ftype="tabular"/>
+            <param name="header0" value="false"/>
+            <param name="support" value="0.5"/>
+            <param name="confidence" value="0.5"/>
+            <param name="lift" value="1.1"/>
+            <param name="conviction" value="1.1"/>
+            <param name="length" value="5"/>
+            <output name="outfile" file="mba_output_int.tabular" ftype="tabular"/>
+        </test>
+    </tests>
+    <help><![CDATA[
+**What it does**
+
+Extract frequent itemsets and generate association rules
+
+from mlxtend.frequent_patterns import fpgrowth
+
+Extracts frequent itemsets for association rule mining. An itemset is considered as "frequent" if it 
+meets a user-specified support threshold. For instance, if the support threshold is set to 0.5 (50%), 
+a frequent itemset is defined as a set of items that occur together in at least 50% of all transactions 
+in the database. We can only get itemsets that have a maximum number of items via length input parameter.
+
+from mlxtend.frequent_patterns import association_rules
+
+Generates association rules from frequent itemsets. Rule generation is a common task in the mining of 
+frequent patterns. An association rule is an implication expression of the form X->Y, where X and Y 
+are disjoint itemsets. A more concrete example based on consumer behaviour would be {Diapers}->{Beer} 
+suggesting that people who buy diapers are also likely to buy beer. To evaluate the "interest" of 
+such an association rule, different metrics have been developed, e.g., confidence, lift, and conviction.
+
+Arguments
+
+infile: Each line in infile contains (tab-separated) items in a tranasaction. Different lines/transactions 
+can have differnt/varying number of items.
+
+Returns
+
+outfile: A tab separated file, that has an association rule on each line, with various metrics listed.
+
+    ]]></help>
+    <expand macro="sklearn_citation"/>
+</tool>
b
diff -r 000000000000 -r af2624d5ab32 fitted_model_eval.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/fitted_model_eval.py Sat May 01 01:24:32 2021 +0000
[
@@ -0,0 +1,183 @@
+import argparse
+import json
+import warnings
+
+import pandas as pd
+from galaxy_ml.utils import get_scoring, load_model, read_columns
+from scipy.io import mmread
+from sklearn.metrics.scorer import _check_multimetric_scoring
+from sklearn.model_selection._validation import _score
+from sklearn.pipeline import Pipeline
+
+
+def _get_X_y(params, infile1, infile2):
+    """read from inputs and output X and y
+
+    Parameters
+    ----------
+    params : dict
+        Tool inputs parameter
+    infile1 : str
+        File path to dataset containing features
+    infile2 : str
+        File path to dataset containing target values
+
+    """
+    # store read dataframe object
+    loaded_df = {}
+
+    input_type = params["input_options"]["selected_input"]
+    # tabular input
+    if input_type == "tabular":
+        header = "infer" if params["input_options"]["header1"] else None
+        column_option = params["input_options"]["column_selector_options_1"][
+            "selected_column_selector_option"
+        ]
+        if column_option in [
+            "by_index_number",
+            "all_but_by_index_number",
+            "by_header_name",
+            "all_but_by_header_name",
+        ]:
+            c = params["input_options"]["column_selector_options_1"]["col1"]
+        else:
+            c = None
+
+        df_key = infile1 + repr(header)
+        df = pd.read_csv(infile1, sep="\t", header=header, parse_dates=True)
+        loaded_df[df_key] = df
+
+        X = read_columns(df, c=c, c_option=column_option).astype(float)
+    # sparse input
+    elif input_type == "sparse":
+        X = mmread(open(infile1, "r"))
+
+    # Get target y
+    header = "infer" if params["input_options"]["header2"] else None
+    column_option = params["input_options"]["column_selector_options_2"][
+        "selected_column_selector_option2"
+    ]
+    if column_option in [
+        "by_index_number",
+        "all_but_by_index_number",
+        "by_header_name",
+        "all_but_by_header_name",
+    ]:
+        c = params["input_options"]["column_selector_options_2"]["col2"]
+    else:
+        c = None
+
+    df_key = infile2 + repr(header)
+    if df_key in loaded_df:
+        infile2 = loaded_df[df_key]
+    else:
+        infile2 = pd.read_csv(infile2, sep="\t", header=header, parse_dates=True)
+        loaded_df[df_key] = infile2
+
+    y = read_columns(
+        infile2, c=c, c_option=column_option, sep="\t", header=header, parse_dates=True
+    )
+    if len(y.shape) == 2 and y.shape[1] == 1:
+        y = y.ravel()
+
+    return X, y
+
+
+def main(
+    inputs,
+    infile_estimator,
+    outfile_eval,
+    infile_weights=None,
+    infile1=None,
+    infile2=None,
+):
+    """
+    Parameter
+    ---------
+    inputs : str
+        File path to galaxy tool parameter
+
+    infile_estimator : strgit
+        File path to trained estimator input
+
+    outfile_eval : str
+        File path to save the evalulation results, tabular
+
+    infile_weights : str
+        File path to weights input
+
+    infile1 : str
+        File path to dataset containing features
+
+    infile2 : str
+        File path to dataset containing target values
+    """
+    warnings.filterwarnings("ignore")
+
+    with open(inputs, "r") as param_handler:
+        params = json.load(param_handler)
+
+    X_test, y_test = _get_X_y(params, infile1, infile2)
+
+    # load model
+    with open(infile_estimator, "rb") as est_handler:
+        estimator = load_model(est_handler)
+
+    main_est = estimator
+    if isinstance(estimator, Pipeline):
+        main_est = estimator.steps[-1][-1]
+    if hasattr(main_est, "config") and hasattr(main_est, "load_weights"):
+        if not infile_weights or infile_weights == "None":
+            raise ValueError(
+                "The selected model skeleton asks for weights, "
+                "but no dataset for weights was provided!"
+            )
+        main_est.load_weights(infile_weights)
+
+    # handle scorer, convert to scorer dict
+    # Check if scoring is specified
+    scoring = params["scoring"]
+    if scoring is not None:
+        # get_scoring() expects secondary_scoring to be a comma separated string (not a list)
+        # Check if secondary_scoring is specified
+        secondary_scoring = scoring.get("secondary_scoring", None)
+        if secondary_scoring is not None:
+            # If secondary_scoring is specified, convert the list into comman separated string
+            scoring["secondary_scoring"] = ",".join(scoring["secondary_scoring"])
+
+    scorer = get_scoring(scoring)
+    scorer, _ = _check_multimetric_scoring(estimator, scoring=scorer)
+
+    if hasattr(estimator, "evaluate"):
+        scores = estimator.evaluate(
+            X_test, y_test=y_test, scorer=scorer, is_multimetric=True
+        )
+    else:
+        scores = _score(estimator, X_test, y_test, scorer, is_multimetric=True)
+
+    # handle output
+    for name, score in scores.items():
+        scores[name] = [score]
+    df = pd.DataFrame(scores)
+    df = df[sorted(df.columns)]
+    df.to_csv(path_or_buf=outfile_eval, sep="\t", header=True, index=False)
+
+
+if __name__ == "__main__":
+    aparser = argparse.ArgumentParser()
+    aparser.add_argument("-i", "--inputs", dest="inputs", required=True)
+    aparser.add_argument("-e", "--infile_estimator", dest="infile_estimator")
+    aparser.add_argument("-w", "--infile_weights", dest="infile_weights")
+    aparser.add_argument("-X", "--infile1", dest="infile1")
+    aparser.add_argument("-y", "--infile2", dest="infile2")
+    aparser.add_argument("-O", "--outfile_eval", dest="outfile_eval")
+    args = aparser.parse_args()
+
+    main(
+        args.inputs,
+        args.infile_estimator,
+        args.outfile_eval,
+        infile_weights=args.infile_weights,
+        infile1=args.infile1,
+        infile2=args.infile2,
+    )
b
diff -r 000000000000 -r af2624d5ab32 keras_deep_learning.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/keras_deep_learning.py Sat May 01 01:24:32 2021 +0000
[
b'@@ -0,0 +1,412 @@\n+import argparse\n+import json\n+import pickle\n+import warnings\n+from ast import literal_eval\n+\n+import keras\n+import pandas as pd\n+import six\n+from galaxy_ml.utils import get_search_params, SafeEval, try_get_attr\n+from keras.models import Model, Sequential\n+\n+safe_eval = SafeEval()\n+\n+\n+def _handle_shape(literal):\n+    """\n+    Eval integer or list/tuple of integers from string\n+\n+    Parameters:\n+    -----------\n+    literal : str.\n+    """\n+    literal = literal.strip()\n+    if not literal:\n+        return None\n+    try:\n+        return literal_eval(literal)\n+    except NameError as e:\n+        print(e)\n+        return literal\n+\n+\n+def _handle_regularizer(literal):\n+    """\n+    Construct regularizer from string literal\n+\n+    Parameters\n+    ----------\n+    literal : str. E.g. \'(0.1, 0)\'\n+    """\n+    literal = literal.strip()\n+    if not literal:\n+        return None\n+\n+    l1, l2 = literal_eval(literal)\n+\n+    if not l1 and not l2:\n+        return None\n+\n+    if l1 is None:\n+        l1 = 0.0\n+    if l2 is None:\n+        l2 = 0.0\n+\n+    return keras.regularizers.l1_l2(l1=l1, l2=l2)\n+\n+\n+def _handle_constraint(config):\n+    """\n+    Construct constraint from galaxy tool parameters.\n+    Suppose correct dictionary format\n+\n+    Parameters\n+    ----------\n+    config : dict. E.g.\n+        "bias_constraint":\n+            {"constraint_options":\n+                {"max_value":1.0,\n+                "min_value":0.0,\n+                "axis":"[0, 1, 2]"\n+                },\n+            "constraint_type":\n+                "MinMaxNorm"\n+            }\n+    """\n+    constraint_type = config["constraint_type"]\n+    if constraint_type in ("None", ""):\n+        return None\n+\n+    klass = getattr(keras.constraints, constraint_type)\n+    options = config.get("constraint_options", {})\n+    if "axis" in options:\n+        options["axis"] = literal_eval(options["axis"])\n+\n+    return klass(**options)\n+\n+\n+def _handle_lambda(literal):\n+    return None\n+\n+\n+def _handle_layer_parameters(params):\n+    """\n+    Access to handle all kinds of parameters\n+    """\n+    for key, value in six.iteritems(params):\n+        if value in ("None", ""):\n+            params[key] = None\n+            continue\n+\n+        if type(value) in [int, float, bool] or (\n+            type(value) is str and value.isalpha()\n+        ):\n+            continue\n+\n+        if (\n+            key\n+            in [\n+                "input_shape",\n+                "noise_shape",\n+                "shape",\n+                "batch_shape",\n+                "target_shape",\n+                "dims",\n+                "kernel_size",\n+                "strides",\n+                "dilation_rate",\n+                "output_padding",\n+                "cropping",\n+                "size",\n+                "padding",\n+                "pool_size",\n+                "axis",\n+                "shared_axes",\n+            ]\n+            and isinstance(value, str)\n+        ):\n+            params[key] = _handle_shape(value)\n+\n+        elif key.endswith("_regularizer") and isinstance(value, dict):\n+            params[key] = _handle_regularizer(value)\n+\n+        elif key.endswith("_constraint") and isinstance(value, dict):\n+            params[key] = _handle_constraint(value)\n+\n+        elif key == "function":  # No support for lambda/function eval\n+            params.pop(key)\n+\n+    return params\n+\n+\n+def get_sequential_model(config):\n+    """\n+    Construct keras Sequential model from Galaxy tool parameters\n+\n+    Parameters:\n+    -----------\n+    config : dictionary, galaxy tool parameters loaded by JSON\n+    """\n+    model = Sequential()\n+    input_shape = _handle_shape(config["input_shape"])\n+    layers = config["layers"]\n+    for layer in layers:\n+        options = layer["layer_selection"]\n+        layer_type = options.pop("layer_type")\n+        klass = getattr(keras.layers, layer_type)\n+        kwargs = options.pop("kwargs", "")\n+\n+        # parameters needs special care\n+        options = _ha'..b'nfig"]\n+\n+    options = {}\n+\n+    if json_model["class_name"] == "Sequential":\n+        options["model_type"] = "sequential"\n+        klass = Sequential\n+    elif json_model["class_name"] == "Model":\n+        options["model_type"] = "functional"\n+        klass = Model\n+    else:\n+        raise ValueError("Unknow Keras model class: %s" % json_model["class_name"])\n+\n+    # load prefitted model\n+    if inputs["mode_selection"]["mode_type"] == "prefitted":\n+        estimator = klass.from_config(config)\n+        estimator.load_weights(infile_weights)\n+    # build train model\n+    else:\n+        cls_name = inputs["mode_selection"]["learning_type"]\n+        klass = try_get_attr("galaxy_ml.keras_galaxy_models", cls_name)\n+\n+        options["loss"] = inputs["mode_selection"]["compile_params"]["loss"]\n+        options["optimizer"] = (\n+            inputs["mode_selection"]["compile_params"]["optimizer_selection"][\n+                "optimizer_type"\n+            ]\n+        ).lower()\n+\n+        options.update(\n+            (\n+                inputs["mode_selection"]["compile_params"]["optimizer_selection"][\n+                    "optimizer_options"\n+                ]\n+            )\n+        )\n+\n+        train_metrics = inputs["mode_selection"]["compile_params"]["metrics"]\n+        if train_metrics[-1] == "none":\n+            train_metrics = train_metrics[:-1]\n+        options["metrics"] = train_metrics\n+\n+        options.update(inputs["mode_selection"]["fit_params"])\n+        options["seed"] = inputs["mode_selection"]["random_seed"]\n+\n+        if batch_mode:\n+            generator = get_batch_generator(\n+                inputs["mode_selection"]["generator_selection"]\n+            )\n+            options["data_batch_generator"] = generator\n+            options["prediction_steps"] = inputs["mode_selection"]["prediction_steps"]\n+            options["class_positive_factor"] = inputs["mode_selection"][\n+                "class_positive_factor"\n+            ]\n+        estimator = klass(config, **options)\n+        if outfile_params:\n+            hyper_params = get_search_params(estimator)\n+            # TODO: remove this after making `verbose` tunable\n+            for h_param in hyper_params:\n+                if h_param[1].endswith("verbose"):\n+                    h_param[0] = "@"\n+            df = pd.DataFrame(hyper_params, columns=["", "Parameter", "Value"])\n+            df.to_csv(outfile_params, sep="\\t", index=False)\n+\n+    print(repr(estimator))\n+    # save model by pickle\n+    with open(outfile, "wb") as f:\n+        pickle.dump(estimator, f, pickle.HIGHEST_PROTOCOL)\n+\n+\n+if __name__ == "__main__":\n+    warnings.simplefilter("ignore")\n+\n+    aparser = argparse.ArgumentParser()\n+    aparser.add_argument("-i", "--inputs", dest="inputs", required=True)\n+    aparser.add_argument("-m", "--model_json", dest="model_json")\n+    aparser.add_argument("-t", "--tool_id", dest="tool_id")\n+    aparser.add_argument("-w", "--infile_weights", dest="infile_weights")\n+    aparser.add_argument("-o", "--outfile", dest="outfile")\n+    aparser.add_argument("-p", "--outfile_params", dest="outfile_params")\n+    args = aparser.parse_args()\n+\n+    input_json_path = args.inputs\n+    with open(input_json_path, "r") as param_handler:\n+        inputs = json.load(param_handler)\n+\n+    tool_id = args.tool_id\n+    outfile = args.outfile\n+    outfile_params = args.outfile_params\n+    model_json = args.model_json\n+    infile_weights = args.infile_weights\n+\n+    # for keras_model_config tool\n+    if tool_id == "keras_model_config":\n+        config_keras_model(inputs, outfile)\n+\n+    # for keras_model_builder tool\n+    else:\n+        batch_mode = False\n+        if tool_id == "keras_batch_models":\n+            batch_mode = True\n+\n+        build_keras_model(\n+            inputs=inputs,\n+            model_json=model_json,\n+            infile_weights=infile_weights,\n+            batch_mode=batch_mode,\n+            outfile=outfile,\n+            outfile_params=outfile_params,\n+        )\n'
b
diff -r 000000000000 -r af2624d5ab32 keras_macros.xml
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/keras_macros.xml Sat May 01 01:24:32 2021 +0000
[
b'@@ -0,0 +1,985 @@\n+<macros>\n+  <token name="@KERAS_VERSION@">0.5.0</token>\n+\n+  <xml name="macro_stdio">\n+    <stdio>\n+        <exit_code range="1:" level="fatal" description="Error occurred. Please check Tool Standard Error"/>\n+    </stdio>\n+  </xml>\n+\n+  <xml name="keras_optimizer_common" token_lr="0.01">\n+    <section name="optimizer_options" title="Optimizer Advanced Options" expanded="false">\n+      <param argument="lr" type="float" value="@LR@" optional="true" label="Learning rate" help="float >= 0"/>\n+      <yield/>\n+      <!--param argument="clipnorm" type="float" value="" optional="true" label="clipnorm" help="float >= 0"/-->\n+      <!--param argument="clipvalue" type="float" value="" optional="true" label="clipvalue" help="float >= 0"/-->\n+    </section>\n+  </xml>\n+\n+  <xml name="keras_optimizer_common_more" token_lr="0.001">\n+    <expand macro="keras_optimizer_common" lr="@LR@">\n+      <!--param argument="epsilon" type="float" value="" label="epsilon" optional="true" help="Fuzz factor. If `None`, defaults to `K.epsilon()`"/>-->\n+      <param argument="decay" type="float" value="0" optional="true" label="decay" help="Learning rate decay over each update."/>\n+      <yield/>\n+    </expand>\n+  </xml>\n+\n+  <xml name="keras_activations" token_none="true" token_tanh="false">\n+    <param argument="activation" type="select" label="Activation function">\n+      <option value="linear" selected="@NONE@">None / linear (default)</option>\n+      <option value="softmax">softmax</option>\n+      <option value="elu">elu</option>\n+      <option value="selu">selu</option>\n+      <option value="softplus">softplus</option>\n+      <option value="softsign">softsign</option>\n+      <option value="relu">relu</option>\n+      <option value="tanh" selected="@TANH@">tanh</option>\n+      <option value="sigmoid">sigmoid</option>\n+      <option value="hard_sigmoid">hard_sigmoid</option>\n+      <option value="exponential">tanh</option>\n+    </param>\n+  </xml>\n+\n+  <xml name="keras_initializers" token_argument="kernel_initializer" token_default_glorot_uniform="false" token_default_zeros="false" token_default_random_uniform="false" token_default_ones="false">\n+    <param argument="@ARGUMENT@" type="select" label="@ARGUMENT@">\n+      <option value="zeros" selected="@DEFAULT_ZEROS@">zero / zeros / Zeros</option>\n+      <option value="ones" selected="@DEFAULT_ONES@">one / ones / Ones</option>\n+      <option value="constant">constant / Constant</option>\n+      <option value="random_normal">normal / random_normal / RandomNormal</option>\n+      <option value="random_uniform" selected="@DEFAULT_RANDOM_UNIFORM@">uniform / random_uniform / RandomUniform</option>\n+      <option value="truncated_normal">truncated_normal / TruncatedNormal</option>\n+      <option value="orthogonal">orthogonal / Orthogonal</option>\n+      <option value="identity">identity / Identity</option>\n+      <option value="glorot_normal">glorot_normal</option>\n+      <option value="glorot_uniform" selected="@DEFAULT_GLOROT_UNIFORM@">glorot_uniform</option>\n+      <option value="he_normal">he_normal</option>\n+      <option value="he_uniform">he_uniform</option>\n+      <option value="lecun_normal">lecun_normal</option>\n+      <option value="lecun_uniform">lecun_uniform</option>\n+    </param>\n+  </xml>\n+\n+  <xml name="keras_regularizers" token_argument="kernel_regularizer">\n+    <param argument="@ARGUMENT@" type="text" value="(0. , 0.)" optional="true" label="@ARGUMENT@"\n+            help="(l1, l2). l1/l2: float; L1/l2 regularization factor. (0., 0.) is equivalent to `None`"/>\n+  </xml>\n+\n+  <xml name="keras_constraints_options">\n+    <section name="constraint_options" title="Constraint Advanced Options" expanded="false">\n+      <yield/>\n+      <param argument="axis" type="text" value="0" help="Integer or list of integers. axis along which to calculate weight norms">\n+        <sanitizer>\n+          <valid initial="default">\n+            <add value="["/>\n+            <add value="]"/>\n+          <'..b'ple="true" label="Select metrics">\n+        <option value="acc" selected="true">acc / accruracy</option>\n+        <option value="binary_accuracy">binary_accuracy</option>\n+        <option value="categorical_accuracy">categorical_accuracy</option>\n+        <option value="sparse_categorical_accuracy">sparse_categorical_accuracy</option>\n+        <option value="mse">mse / MSE / mean_squared_error</option>\n+        <option value="mae">mae / MAE / mean_absolute_error</option>\n+        <option value="mae">mape / MAPE / mean_absolute_percentage_error</option>\n+        <option value="cosine_proximity">cosine_proximity</option>\n+        <option value="cosine">cosine</option>\n+        <option value="none">none</option>\n+      </param>\n+    </section>\n+  </xml>\n+\n+  <xml name="keras_fit_params_section">\n+    <section name="fit_params" title="Fit Parameters" expanded="true">\n+      <param name="epochs" type="integer" value="1" min="1" label="epochs"/>\n+      <param name="batch_size" type="integer" value="32" optional="true" label="batch_size" help="Integer or blank for 32"/>\n+      <param name="steps_per_epoch" type="integer" value="" optional="true" label="steps_per_epoch" help="The number of steps (batches of samples) before declaring one epoch finished and starting the next epoch. The default None is equal to the number of samples in your dataset divided by the batch size, or 1 if that cannot be determined."/>\n+      <param name="validation_steps" type="integer" value="" optional="true" label="validation_steps" help="Default None. Total number of steps (batches of samples) to validate before stopping." />\n+      <!--`validation_freq` will be available in next keras version-->\n+      <!--param name="validation_freq" type="integer" value="1" optional="true" label="validation_freq" help="Integer only at current moment. If an integer, specifies how many training epochs to run before a new validation run is performed."/-->\n+      <expand macro="keras_callbacks"/>\n+    </section>\n+  </xml>\n+\n+ <!--Citation-->\n+  <xml name="keras_citation">\n+    <citation type="bibtex">\n+      @misc{chollet2015keras,\n+        title={Keras},\n+        url={https://keras.io},\n+        author={Chollet, Fran\\c{c}ois and others},\n+        year={2015},\n+        howpublished={https://keras.io},\n+      }\n+    </citation>\n+  </xml>\n+\n+  <xml name="tensorflow_citation">\n+    <citation type="bibtex">\n+      @misc{tensorflow2015-whitepaper,\n+        title={ {TensorFlow}: Large-Scale Machine Learning on Heterogeneous Systems},\n+        url={https://www.tensorflow.org/},\n+        note={Software available from tensorflow.org},\n+        author={\n+            Mart\\\'{\\i}n~Abadi and\n+            Ashish~Agarwal and\n+            Paul~Barham and\n+            Eugene~Brevdo and\n+            Zhifeng~Chen and\n+            Craig~Citro and\n+            Greg~S.~Corrado and\n+            Andy~Davis and\n+            Jeffrey~Dean and\n+            Matthieu~Devin and\n+            Sanjay~Ghemawat and\n+            Ian~Goodfellow and\n+            Andrew~Harp and\n+            Geoffrey~Irving and\n+            Michael~Isard and\n+            Yangqing Jia and\n+            Rafal~Jozefowicz and\n+            Lukasz~Kaiser and\n+            Manjunath~Kudlur and\n+            Josh~Levenberg and\n+            Dandelion~Man\\\'{e} and\n+            Rajat~Monga and\n+            Sherry~Moore and\n+            Derek~Murray and\n+            Chris~Olah and\n+            Mike~Schuster and\n+            Jonathon~Shlens and\n+            Benoit~Steiner and\n+            Ilya~Sutskever and\n+            Kunal~Talwar and\n+            Paul~Tucker and\n+            Vincent~Vanhoucke and\n+            Vijay~Vasudevan and\n+            Fernanda~Vi\\\'{e}gas and\n+            Oriol~Vinyals and\n+            Pete~Warden and\n+            Martin~Wattenberg and\n+            Martin~Wicke and\n+            Yuan~Yu and\n+            Xiaoqiang~Zheng},\n+          year={2015},\n+      }\n+    </citation>\n+  </xml>\n+\n+</macros>\n\\ No newline at end of file\n'
b
diff -r 000000000000 -r af2624d5ab32 keras_train_and_eval.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/keras_train_and_eval.py Sat May 01 01:24:32 2021 +0000
[
b'@@ -0,0 +1,552 @@\n+import argparse\n+import json\n+import os\n+import pickle\n+import warnings\n+from itertools import chain\n+\n+import joblib\n+import numpy as np\n+import pandas as pd\n+from galaxy_ml.externals.selene_sdk.utils import compute_score\n+from galaxy_ml.keras_galaxy_models import _predict_generator\n+from galaxy_ml.model_validations import train_test_split\n+from galaxy_ml.utils import (clean_params, get_main_estimator,\n+                             get_module, get_scoring, load_model, read_columns,\n+                             SafeEval, try_get_attr)\n+from scipy.io import mmread\n+from sklearn.metrics.scorer import _check_multimetric_scoring\n+from sklearn.model_selection import _search, _validation\n+from sklearn.model_selection._validation import _score\n+from sklearn.pipeline import Pipeline\n+from sklearn.utils import indexable, safe_indexing\n+\n+_fit_and_score = try_get_attr("galaxy_ml.model_validations", "_fit_and_score")\n+setattr(_search, "_fit_and_score", _fit_and_score)\n+setattr(_validation, "_fit_and_score", _fit_and_score)\n+\n+N_JOBS = int(os.environ.get("GALAXY_SLOTS", 1))\n+CACHE_DIR = os.path.join(os.getcwd(), "cached")\n+del os\n+NON_SEARCHABLE = ("n_jobs", "pre_dispatch", "memory", "_path", "nthread", "callbacks")\n+ALLOWED_CALLBACKS = (\n+    "EarlyStopping",\n+    "TerminateOnNaN",\n+    "ReduceLROnPlateau",\n+    "CSVLogger",\n+    "None",\n+)\n+\n+\n+def _eval_swap_params(params_builder):\n+    swap_params = {}\n+\n+    for p in params_builder["param_set"]:\n+        swap_value = p["sp_value"].strip()\n+        if swap_value == "":\n+            continue\n+\n+        param_name = p["sp_name"]\n+        if param_name.lower().endswith(NON_SEARCHABLE):\n+            warnings.warn(\n+                "Warning: `%s` is not eligible for search and was "\n+                "omitted!" % param_name\n+            )\n+            continue\n+\n+        if not swap_value.startswith(":"):\n+            safe_eval = SafeEval(load_scipy=True, load_numpy=True)\n+            ev = safe_eval(swap_value)\n+        else:\n+            # Have `:` before search list, asks for estimator evaluatio\n+            safe_eval_es = SafeEval(load_estimators=True)\n+            swap_value = swap_value[1:].strip()\n+            # TODO maybe add regular express check\n+            ev = safe_eval_es(swap_value)\n+\n+        swap_params[param_name] = ev\n+\n+    return swap_params\n+\n+\n+def train_test_split_none(*arrays, **kwargs):\n+    """extend train_test_split to take None arrays\n+    and support split by group names.\n+    """\n+    nones = []\n+    new_arrays = []\n+    for idx, arr in enumerate(arrays):\n+        if arr is None:\n+            nones.append(idx)\n+        else:\n+            new_arrays.append(arr)\n+\n+    if kwargs["shuffle"] == "None":\n+        kwargs["shuffle"] = None\n+\n+    group_names = kwargs.pop("group_names", None)\n+\n+    if group_names is not None and group_names.strip():\n+        group_names = [name.strip() for name in group_names.split(",")]\n+        new_arrays = indexable(*new_arrays)\n+        groups = kwargs["labels"]\n+        n_samples = new_arrays[0].shape[0]\n+        index_arr = np.arange(n_samples)\n+        test = index_arr[np.isin(groups, group_names)]\n+        train = index_arr[~np.isin(groups, group_names)]\n+        rval = list(\n+            chain.from_iterable(\n+                (safe_indexing(a, train), safe_indexing(a, test)) for a in new_arrays\n+            )\n+        )\n+    else:\n+        rval = train_test_split(*new_arrays, **kwargs)\n+\n+    for pos in nones:\n+        rval[pos * 2: 2] = [None, None]\n+\n+    return rval\n+\n+\n+def _evaluate(y_true, pred_probas, scorer, is_multimetric=True):\n+    """output scores based on input scorer\n+\n+    Parameters\n+    ----------\n+    y_true : array\n+        True label or target values\n+    pred_probas : array\n+        Prediction values, probability for classification problem\n+    scorer : dict\n+        dict of `sklearn.metrics.scorer.SCORER`\n+    is_multimetric : bool, default is True\n+    """\n+    if y_true.ndim == 1 or '..b'fit(X_train, y_train, validation_data=(X_val, y_val))\n+        else:\n+            estimator.fit(X_train, y_train, validation_data=(X_test, y_test))\n+    else:\n+        estimator.fit(X_train, y_train)\n+\n+    if hasattr(estimator, "evaluate"):\n+        steps = estimator.prediction_steps\n+        batch_size = estimator.batch_size\n+        generator = estimator.data_generator_.flow(\n+            X_test, y=y_test, batch_size=batch_size\n+        )\n+        predictions, y_true = _predict_generator(\n+            estimator.model_, generator, steps=steps\n+        )\n+        scores = _evaluate(y_true, predictions, scorer, is_multimetric=True)\n+\n+    else:\n+        if hasattr(estimator, "predict_proba"):\n+            predictions = estimator.predict_proba(X_test)\n+        else:\n+            predictions = estimator.predict(X_test)\n+\n+        y_true = y_test\n+        scores = _score(estimator, X_test, y_test, scorer, is_multimetric=True)\n+    if outfile_y_true:\n+        try:\n+            pd.DataFrame(y_true).to_csv(outfile_y_true, sep="\\t", index=False)\n+            pd.DataFrame(predictions).astype(np.float32).to_csv(\n+                outfile_y_preds,\n+                sep="\\t",\n+                index=False,\n+                float_format="%g",\n+                chunksize=10000,\n+            )\n+        except Exception as e:\n+            print("Error in saving predictions: %s" % e)\n+\n+    # handle output\n+    for name, score in scores.items():\n+        scores[name] = [score]\n+    df = pd.DataFrame(scores)\n+    df = df[sorted(df.columns)]\n+    df.to_csv(path_or_buf=outfile_result, sep="\\t", header=True, index=False)\n+\n+    memory.clear(warn=False)\n+\n+    if outfile_object:\n+        main_est = estimator\n+        if isinstance(estimator, Pipeline):\n+            main_est = estimator.steps[-1][-1]\n+\n+        if hasattr(main_est, "model_") and hasattr(main_est, "save_weights"):\n+            if outfile_weights:\n+                main_est.save_weights(outfile_weights)\n+            del main_est.model_\n+            del main_est.fit_params\n+            del main_est.model_class_\n+            if getattr(main_est, "validation_data", None):\n+                del main_est.validation_data\n+            if getattr(main_est, "data_generator_", None):\n+                del main_est.data_generator_\n+\n+        with open(outfile_object, "wb") as output_handler:\n+            pickle.dump(estimator, output_handler, pickle.HIGHEST_PROTOCOL)\n+\n+\n+if __name__ == "__main__":\n+    aparser = argparse.ArgumentParser()\n+    aparser.add_argument("-i", "--inputs", dest="inputs", required=True)\n+    aparser.add_argument("-e", "--estimator", dest="infile_estimator")\n+    aparser.add_argument("-X", "--infile1", dest="infile1")\n+    aparser.add_argument("-y", "--infile2", dest="infile2")\n+    aparser.add_argument("-O", "--outfile_result", dest="outfile_result")\n+    aparser.add_argument("-o", "--outfile_object", dest="outfile_object")\n+    aparser.add_argument("-w", "--outfile_weights", dest="outfile_weights")\n+    aparser.add_argument("-l", "--outfile_y_true", dest="outfile_y_true")\n+    aparser.add_argument("-p", "--outfile_y_preds", dest="outfile_y_preds")\n+    aparser.add_argument("-g", "--groups", dest="groups")\n+    aparser.add_argument("-r", "--ref_seq", dest="ref_seq")\n+    aparser.add_argument("-b", "--intervals", dest="intervals")\n+    aparser.add_argument("-t", "--targets", dest="targets")\n+    aparser.add_argument("-f", "--fasta_path", dest="fasta_path")\n+    args = aparser.parse_args()\n+\n+    main(\n+        args.inputs,\n+        args.infile_estimator,\n+        args.infile1,\n+        args.infile2,\n+        args.outfile_result,\n+        outfile_object=args.outfile_object,\n+        outfile_weights=args.outfile_weights,\n+        outfile_y_true=args.outfile_y_true,\n+        outfile_y_preds=args.outfile_y_preds,\n+        groups=args.groups,\n+        ref_seq=args.ref_seq,\n+        intervals=args.intervals,\n+        targets=args.targets,\n+        fasta_path=args.fasta_path,\n+    )\n'
b
diff -r 000000000000 -r af2624d5ab32 label_encoder.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/label_encoder.py Sat May 01 01:24:32 2021 +0000
[
@@ -0,0 +1,48 @@
+import argparse
+import json
+import warnings
+
+import numpy as np
+import pandas as pd
+from sklearn.preprocessing import LabelEncoder
+
+
+def main(inputs, infile, outfile):
+    """
+    Parameter
+    ---------
+    input : str
+        File path to galaxy tool parameter
+
+    infile : str
+        File paths of input vector
+
+    outfile : str
+        File path to output vector
+
+    """
+    warnings.simplefilter('ignore')
+
+    with open(inputs, 'r') as param_handler:
+        params = json.load(param_handler)
+
+    input_header = params['header0']
+    header = 'infer' if input_header else None
+
+    input_vector = pd.read_csv(infile, sep='\t', header=header)
+
+    le = LabelEncoder()
+
+    output_vector = le.fit_transform(input_vector)
+
+    np.savetxt(outfile, output_vector, fmt="%d", delimiter='\t')
+
+
+if __name__ == '__main__':
+    aparser = argparse.ArgumentParser()
+    aparser.add_argument("-i", "--inputs", dest="inputs", required=True)
+    aparser.add_argument("-y", "--infile", dest="infile")
+    aparser.add_argument("-o", "--outfile", dest="outfile")
+    args = aparser.parse_args()
+
+    main(args.inputs, args.infile, args.outfile)
b
diff -r 000000000000 -r af2624d5ab32 main_macros.xml
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/main_macros.xml Sat May 01 01:24:32 2021 +0000
[
b'@@ -0,0 +1,2015 @@\n+<macros>\n+    <token name="@VERSION@">1.0.8.3</token>\n+\n+    <xml name="python_requirements">\n+        <requirements>\n+            <requirement type="package" version="0.8.3">Galaxy-ML</requirement>\n+            <yield />\n+        </requirements>\n+    </xml>\n+\n+    <xml name="macro_stdio">\n+        <stdio>\n+            <exit_code range="1:" level="fatal" description="Error occurred. Please check Tool Standard Error" />\n+        </stdio>\n+    </xml>\n+\n+\n+    <!--Generic interface-->\n+\n+    <xml name="sl_Conditional" token_train="tabular" token_data="tabular" token_model="txt">\n+        <conditional name="selected_tasks">\n+            <param name="selected_task" type="select" label="Select a Classification Task">\n+                <option value="train" selected="true">Train a model</option>\n+                <option value="load">Load a model and predict</option>\n+            </param>\n+            <when value="load">\n+                <param name="infile_model" type="data" format="@MODEL@" label="Models" help="Select a model file." />\n+                <param name="infile_data" type="data" format="@DATA@" label="Data (tabular)" help="Select the dataset you want to classify." />\n+                <param name="header" type="boolean" optional="True" truevalue="booltrue" falsevalue="boolfalse" checked="False" label="Does the dataset contain header:" />\n+                <conditional name="prediction_options">\n+                    <param name="prediction_option" type="select" label="Select the type of prediction">\n+                        <option value="predict">Predict class labels</option>\n+                        <option value="advanced">Include advanced options</option>\n+                    </param>\n+                    <when value="predict">\n+                    </when>\n+                    <when value="advanced">\n+                    </when>\n+                </conditional>\n+            </when>\n+            <when value="train">\n+                <conditional name="selected_algorithms">\n+                    <yield />\n+                </conditional>\n+            </when>\n+        </conditional>\n+    </xml>\n+\n+    <xml name="advanced_section">\n+        <section name="options" title="Advanced Options" expanded="False">\n+            <yield />\n+        </section>\n+    </xml>\n+\n+\n+    <!--Generalized Linear Models-->\n+    <xml name="loss" token_help=" " token_select="false">\n+        <param argument="loss" type="select" label="Loss function" help="@HELP@">\n+            <option value="squared_loss" selected="@SELECT@">squared loss</option>\n+            <option value="huber">huber</option>\n+            <option value="epsilon_insensitive">epsilon insensitive</option>\n+            <option value="squared_epsilon_insensitive">squared epsilon insensitive</option>\n+            <yield />\n+        </param>\n+    </xml>\n+\n+    <xml name="penalty" token_help=" ">\n+        <param argument="penalty" type="select" label="Penalty (regularization term)" help="@HELP@">\n+            <option value="l2" selected="true">l2</option>\n+            <option value="l1">l1</option>\n+            <option value="elasticnet">elastic net</option>\n+            <option value="none">none</option>\n+            <yield />\n+        </param>\n+    </xml>\n+\n+    <xml name="l1_ratio" token_default_value="0.15" token_help=" ">\n+        <param argument="l1_ratio" type="float" value="@DEFAULT_VALUE@" label="Elastic Net mixing parameter" help="@HELP@" />\n+    </xml>\n+\n+    <xml name="epsilon" token_default_value="0.1" token_help="Used if loss is \xe2\x80\x98huber\xe2\x80\x99, \xe2\x80\x98epsilon_insensitive\xe2\x80\x99, or \xe2\x80\x98squared_epsilon_insensitive\xe2\x80\x99. ">\n+        <param argument="epsilon" type="float" value="@DEFAULT_VALUE@" label="Epsilon (epsilon-sensitive loss functions only)" help="@HELP@" />\n+    </xml>\n+\n+    <xml name="learning_rate_s" token_help=" " token_selected1="false" token_selected2="false">\n+        <param argument="learning_rate" type="select" optional="true" label="Learning rate schedu'..b' type="doi">10.5281/zenodo.15094</citation>\n+        </citations>\n+    </xml>\n+\n+    <xml name="sklearn_citation">\n+        <citations>\n+            <citation type="bibtex">\n+          @article{scikit-learn, title={Scikit-learn: Machine Learning in {P}ython}, author={Pedregosa, F. and Varoquaux, G. and Gramfort, A. and Michel, V.\n+                    and Thirion, B. and Grisel, O. and Blondel, M. and Prettenhofer, P.\n+                    and Weiss, R. and Dubourg, V. and Vanderplas, J. and Passos, A. and\n+                    Cournapeau, D. and Brucher, M. and Perrot, M. and Duchesnay, E.}, journal={Journal of Machine Learning Research}, volume={12}, pages={2825--2830}, year={2011}\n+          }\n+            </citation>\n+            <yield />\n+        </citations>\n+    </xml>\n+\n+    <xml name="scipy_citation">\n+        <citations>\n+            <citation type="bibtex">\n+          @Misc{,\n+          author =    {Eric Jones and Travis Oliphant and Pearu Peterson and others},\n+          title =     {{SciPy}: Open source scientific tools for {Python}},\n+          year =      {2001--},\n+          url = "http://www.scipy.org/",\n+          note = {[Online; accessed 2016-04-09]}\n+        }\n+            </citation>\n+        </citations>\n+    </xml>\n+\n+    <xml name="skrebate_citation">\n+        <citation type="bibtex">\n+      @article{DBLP:journals/corr/abs-1711-08477,\n+        author    = {Ryan J. Urbanowicz and\n+                    Randal S. Olson and\n+                    Peter Schmitt and\n+                    Melissa Meeker and\n+                    Jason H. Moore},\n+        title     = {Benchmarking Relief-Based Feature Selection Methods},\n+        journal   = {CoRR},\n+        volume    = {abs/1711.08477},\n+        year      = {2017},\n+        url       = {http://arxiv.org/abs/1711.08477},\n+        archivePrefix = {arXiv},\n+        eprint    = {1711.08477},\n+        timestamp = {Mon, 13 Aug 2018 16:46:04 +0200},\n+        biburl    = {https://dblp.org/rec/bib/journals/corr/abs-1711-08477},\n+        bibsource = {dblp computer science bibliography, https://dblp.org}\n+      }\n+        </citation>\n+    </xml>\n+\n+    <xml name="xgboost_citation">\n+        <citation type="bibtex">\n+      @inproceedings{Chen:2016:XST:2939672.2939785,\n+        author = {Chen, Tianqi and Guestrin, Carlos},\n+        title = {{XGBoost}: A Scalable Tree Boosting System},\n+        booktitle = {Proceedings of the 22nd ACM SIGKDD International Conference on Knowledge Discovery and Data Mining},\n+        series = {KDD \'16},\n+        year = {2016},\n+        isbn = {978-1-4503-4232-2},\n+        location = {San Francisco, California, USA},\n+        pages = {785--794},\n+        numpages = {10},\n+        url = {http://doi.acm.org/10.1145/2939672.2939785},\n+        doi = {10.1145/2939672.2939785},\n+        acmid = {2939785},\n+        publisher = {ACM},\n+        address = {New York, NY, USA},\n+        keywords = {large-scale machine learning},\n+      }\n+        </citation>\n+    </xml>\n+\n+    <xml name="imblearn_citation">\n+        <citation type="bibtex">\n+      @article{JMLR:v18:16-365,\n+        author  = {Guillaume  Lema{{\\^i}}tre and Fernando Nogueira and Christos K. Aridas},\n+        title   = {Imbalanced-learn: A Python Toolbox to Tackle the Curse of Imbalanced Datasets in Machine Learning},\n+        journal = {Journal of Machine Learning Research},\n+        year    = {2017},\n+        volume  = {18},\n+        number  = {17},\n+        pages   = {1-5},\n+        url     = {http://jmlr.org/papers/v18/16-365.html}\n+      }\n+        </citation>\n+    </xml>\n+\n+    <xml name="selene_citation">\n+        <citation type="bibtex">\n+      @article{chen2019selene, title={Selene: a PyTorch-based deep learning library for sequence data}, author={Chen, Kathleen M and Cofer, Evan M and Zhou, Jian and Troyanskaya, Olga G}, journal={Nature methods}, volume={16}, number={4}, pages={315}, year={2019}, publisher={Nature Publishing Group}\n+      }\n+        </citation>\n+    </xml>\n+\n+</macros>\n'
b
diff -r 000000000000 -r af2624d5ab32 ml_visualization_ex.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/ml_visualization_ex.py Sat May 01 01:24:32 2021 +0000
[
b'@@ -0,0 +1,645 @@\n+import argparse\n+import json\n+import os\n+import warnings\n+\n+import matplotlib\n+import matplotlib.pyplot as plt\n+import numpy as np\n+import pandas as pd\n+import plotly\n+import plotly.graph_objs as go\n+from galaxy_ml.utils import load_model, read_columns, SafeEval\n+from keras.models import model_from_json\n+from keras.utils import plot_model\n+from sklearn.feature_selection.base import SelectorMixin\n+from sklearn.metrics import (auc, average_precision_score, confusion_matrix,\n+                             precision_recall_curve, roc_curve)\n+from sklearn.pipeline import Pipeline\n+\n+safe_eval = SafeEval()\n+\n+# plotly default colors\n+default_colors = [\n+    "#1f77b4",  # muted blue\n+    "#ff7f0e",  # safety orange\n+    "#2ca02c",  # cooked asparagus green\n+    "#d62728",  # brick red\n+    "#9467bd",  # muted purple\n+    "#8c564b",  # chestnut brown\n+    "#e377c2",  # raspberry yogurt pink\n+    "#7f7f7f",  # middle gray\n+    "#bcbd22",  # curry yellow-green\n+    "#17becf",  # blue-teal\n+]\n+\n+\n+def visualize_pr_curve_plotly(df1, df2, pos_label, title=None):\n+    """output pr-curve in html using plotly\n+\n+    df1 : pandas.DataFrame\n+        Containing y_true\n+    df2 : pandas.DataFrame\n+        Containing y_score\n+    pos_label : None\n+        The label of positive class\n+    title : str\n+        Plot title\n+    """\n+    data = []\n+    for idx in range(df1.shape[1]):\n+        y_true = df1.iloc[:, idx].values\n+        y_score = df2.iloc[:, idx].values\n+\n+        precision, recall, _ = precision_recall_curve(\n+            y_true, y_score, pos_label=pos_label\n+        )\n+        ap = average_precision_score(y_true, y_score, pos_label=pos_label or 1)\n+\n+        trace = go.Scatter(\n+            x=recall,\n+            y=precision,\n+            mode="lines",\n+            marker=dict(color=default_colors[idx % len(default_colors)]),\n+            name="%s (area = %.3f)" % (idx, ap),\n+        )\n+        data.append(trace)\n+\n+    layout = go.Layout(\n+        xaxis=dict(title="Recall", linecolor="lightslategray", linewidth=1),\n+        yaxis=dict(title="Precision", linecolor="lightslategray", linewidth=1),\n+        title=dict(\n+            text=title or "Precision-Recall Curve",\n+            x=0.5,\n+            y=0.92,\n+            xanchor="center",\n+            yanchor="top",\n+        ),\n+        font=dict(family="sans-serif", size=11),\n+        # control backgroud colors\n+        plot_bgcolor="rgba(255,255,255,0)",\n+    )\n+    """\n+    legend=dict(\n+        x=0.95,\n+        y=0,\n+        traceorder="normal",\n+        font=dict(\n+            family="sans-serif",\n+            size=9,\n+            color="black"\n+        ),\n+        bgcolor="LightSteelBlue",\n+        bordercolor="Black",\n+        borderwidth=2\n+    ),"""\n+\n+    fig = go.Figure(data=data, layout=layout)\n+\n+    plotly.offline.plot(fig, filename="output.html", auto_open=False)\n+    # to be discovered by `from_work_dir`\n+    os.rename("output.html", "output")\n+\n+\n+def visualize_pr_curve_matplotlib(df1, df2, pos_label, title=None):\n+    """visualize pr-curve using matplotlib and output svg image"""\n+    backend = matplotlib.get_backend()\n+    if "inline" not in backend:\n+        matplotlib.use("SVG")\n+    plt.style.use("seaborn-colorblind")\n+    plt.figure()\n+\n+    for idx in range(df1.shape[1]):\n+        y_true = df1.iloc[:, idx].values\n+        y_score = df2.iloc[:, idx].values\n+\n+        precision, recall, _ = precision_recall_curve(\n+            y_true, y_score, pos_label=pos_label\n+        )\n+        ap = average_precision_score(y_true, y_score, pos_label=pos_label or 1)\n+\n+        plt.step(\n+            recall,\n+            precision,\n+            "r-",\n+            color="black",\n+            alpha=0.3,\n+            lw=1,\n+            where="post",\n+            label="%s (area = %.3f)" % (idx, ap),\n+        )\n+\n+    plt.xlim([0.0, 1.0])\n+    plt.ylim([0.0, 1.05])\n+    plt.xlabel("Recall")\n+    plt.ylabel("Precision")\n+    title = title or "Precision-Recall Curve'..b'erif",\n+                #    size=9,\n+                #    color="black"\n+                # ),\n+                # bgcolor="LightSteelBlue",\n+                # bordercolor="Black",\n+                # borderwidth=2\n+            # ),\n+        """\n+\n+        fig = go.Figure(data=[data1, data2], layout=layout)\n+        plotly.offline.plot(fig, filename="output.html", auto_open=False)\n+        # to be discovered by `from_work_dir`\n+        os.rename("output.html", "output")\n+\n+        return 0\n+\n+    elif plot_type == "keras_plot_model":\n+        with open(model_config, "r") as f:\n+            model_str = f.read()\n+        model = model_from_json(model_str)\n+        plot_model(model, to_file="output.png")\n+        os.rename("output.png", "output")\n+\n+        return 0\n+\n+    elif plot_type == "classification_confusion_matrix":\n+        plot_selection = params["plotting_selection"]\n+        input_true = get_dataframe(\n+            true_labels, plot_selection, "header_true", "column_selector_options_true"\n+        )\n+        header_predicted = "infer" if plot_selection["header_predicted"] else None\n+        input_predicted = pd.read_csv(\n+            predicted_labels, sep="\\t", parse_dates=True, header=header_predicted\n+        )\n+        true_classes = input_true.iloc[:, -1].copy()\n+        predicted_classes = input_predicted.iloc[:, -1].copy()\n+        axis_labels = list(set(true_classes))\n+        c_matrix = confusion_matrix(true_classes, predicted_classes)\n+        fig, ax = plt.subplots(figsize=(7, 7))\n+        im = plt.imshow(c_matrix, cmap=plot_color)\n+        for i in range(len(c_matrix)):\n+            for j in range(len(c_matrix)):\n+                ax.text(j, i, c_matrix[i, j], ha="center", va="center", color="k")\n+        ax.set_ylabel("True class labels")\n+        ax.set_xlabel("Predicted class labels")\n+        ax.set_title(title)\n+        ax.set_xticks(axis_labels)\n+        ax.set_yticks(axis_labels)\n+        fig.colorbar(im, ax=ax)\n+        fig.tight_layout()\n+        plt.savefig("output.png", dpi=125)\n+        os.rename("output.png", "output")\n+\n+        return 0\n+\n+    # save pdf file to disk\n+    # fig.write_image("image.pdf", format=\'pdf\')\n+    # fig.write_image("image.pdf", format=\'pdf\', width=340*2, height=226*2)\n+\n+\n+if __name__ == "__main__":\n+    aparser = argparse.ArgumentParser()\n+    aparser.add_argument("-i", "--inputs", dest="inputs", required=True)\n+    aparser.add_argument("-e", "--estimator", dest="infile_estimator")\n+    aparser.add_argument("-X", "--infile1", dest="infile1")\n+    aparser.add_argument("-y", "--infile2", dest="infile2")\n+    aparser.add_argument("-O", "--outfile_result", dest="outfile_result")\n+    aparser.add_argument("-o", "--outfile_object", dest="outfile_object")\n+    aparser.add_argument("-g", "--groups", dest="groups")\n+    aparser.add_argument("-r", "--ref_seq", dest="ref_seq")\n+    aparser.add_argument("-b", "--intervals", dest="intervals")\n+    aparser.add_argument("-t", "--targets", dest="targets")\n+    aparser.add_argument("-f", "--fasta_path", dest="fasta_path")\n+    aparser.add_argument("-c", "--model_config", dest="model_config")\n+    aparser.add_argument("-tl", "--true_labels", dest="true_labels")\n+    aparser.add_argument("-pl", "--predicted_labels", dest="predicted_labels")\n+    aparser.add_argument("-pc", "--plot_color", dest="plot_color")\n+    aparser.add_argument("-pt", "--title", dest="title")\n+    args = aparser.parse_args()\n+\n+    main(\n+        args.inputs,\n+        args.infile_estimator,\n+        args.infile1,\n+        args.infile2,\n+        args.outfile_result,\n+        outfile_object=args.outfile_object,\n+        groups=args.groups,\n+        ref_seq=args.ref_seq,\n+        intervals=args.intervals,\n+        targets=args.targets,\n+        fasta_path=args.fasta_path,\n+        model_config=args.model_config,\n+        true_labels=args.true_labels,\n+        predicted_labels=args.predicted_labels,\n+        plot_color=args.plot_color,\n+        title=args.title,\n+    )\n'
b
diff -r 000000000000 -r af2624d5ab32 model_prediction.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/model_prediction.py Sat May 01 01:24:32 2021 +0000
[
b'@@ -0,0 +1,241 @@\n+import argparse\n+import json\n+import warnings\n+\n+import numpy as np\n+import pandas as pd\n+from galaxy_ml.utils import get_module, load_model, read_columns, try_get_attr\n+from scipy.io import mmread\n+from sklearn.pipeline import Pipeline\n+\n+N_JOBS = int(__import__("os").environ.get("GALAXY_SLOTS", 1))\n+\n+\n+def main(\n+    inputs,\n+    infile_estimator,\n+    outfile_predict,\n+    infile_weights=None,\n+    infile1=None,\n+    fasta_path=None,\n+    ref_seq=None,\n+    vcf_path=None,\n+):\n+    """\n+    Parameter\n+    ---------\n+    inputs : str\n+        File path to galaxy tool parameter\n+\n+    infile_estimator : strgit\n+        File path to trained estimator input\n+\n+    outfile_predict : str\n+        File path to save the prediction results, tabular\n+\n+    infile_weights : str\n+        File path to weights input\n+\n+    infile1 : str\n+        File path to dataset containing features\n+\n+    fasta_path : str\n+        File path to dataset containing fasta file\n+\n+    ref_seq : str\n+        File path to dataset containing the reference genome sequence.\n+\n+    vcf_path : str\n+        File path to dataset containing variants info.\n+    """\n+    warnings.filterwarnings("ignore")\n+\n+    with open(inputs, "r") as param_handler:\n+        params = json.load(param_handler)\n+\n+    # load model\n+    with open(infile_estimator, "rb") as est_handler:\n+        estimator = load_model(est_handler)\n+\n+    main_est = estimator\n+    if isinstance(estimator, Pipeline):\n+        main_est = estimator.steps[-1][-1]\n+    if hasattr(main_est, "config") and hasattr(main_est, "load_weights"):\n+        if not infile_weights or infile_weights == "None":\n+            raise ValueError(\n+                "The selected model skeleton asks for weights, "\n+                "but dataset for weights wan not selected!"\n+            )\n+        main_est.load_weights(infile_weights)\n+\n+    # handle data input\n+    input_type = params["input_options"]["selected_input"]\n+    # tabular input\n+    if input_type == "tabular":\n+        header = "infer" if params["input_options"]["header1"] else None\n+        column_option = params["input_options"]["column_selector_options_1"][\n+            "selected_column_selector_option"\n+        ]\n+        if column_option in [\n+            "by_index_number",\n+            "all_but_by_index_number",\n+            "by_header_name",\n+            "all_but_by_header_name",\n+        ]:\n+            c = params["input_options"]["column_selector_options_1"]["col1"]\n+        else:\n+            c = None\n+\n+        df = pd.read_csv(infile1, sep="\\t", header=header, parse_dates=True)\n+\n+        X = read_columns(df, c=c, c_option=column_option).astype(float)\n+\n+        if params["method"] == "predict":\n+            preds = estimator.predict(X)\n+        else:\n+            preds = estimator.predict_proba(X)\n+\n+    # sparse input\n+    elif input_type == "sparse":\n+        X = mmread(open(infile1, "r"))\n+        if params["method"] == "predict":\n+            preds = estimator.predict(X)\n+        else:\n+            preds = estimator.predict_proba(X)\n+\n+    # fasta input\n+    elif input_type == "seq_fasta":\n+        if not hasattr(estimator, "data_batch_generator"):\n+            raise ValueError(\n+                "To do prediction on sequences in fasta input, "\n+                "the estimator must be a `KerasGBatchClassifier`"\n+                "equipped with data_batch_generator!"\n+            )\n+        pyfaidx = get_module("pyfaidx")\n+        sequences = pyfaidx.Fasta(fasta_path)\n+        n_seqs = len(sequences.keys())\n+        X = np.arange(n_seqs)[:, np.newaxis]\n+        seq_length = estimator.data_batch_generator.seq_length\n+        batch_size = getattr(estimator, "batch_size", 32)\n+        steps = (n_seqs + batch_size - 1) // batch_size\n+\n+        seq_type = params["input_options"]["seq_type"]\n+        klass = try_get_attr("galaxy_ml.preprocessors", seq_type)\n+\n+        pred_data_generator = klass(fasta_path, seq_length=seq_length)\n+\n+        i'..b'        )\n+\n+    # vcf input\n+    elif input_type == "variant_effect":\n+        klass = try_get_attr("galaxy_ml.preprocessors", "GenomicVariantBatchGenerator")\n+\n+        options = params["input_options"]\n+        options.pop("selected_input")\n+        if options["blacklist_regions"] == "none":\n+            options["blacklist_regions"] = None\n+\n+        pred_data_generator = klass(\n+            ref_genome_path=ref_seq, vcf_path=vcf_path, **options\n+        )\n+\n+        pred_data_generator.set_processing_attrs()\n+\n+        variants = pred_data_generator.variants\n+\n+        # predict 1600 sample at once then write to file\n+        gen_flow = pred_data_generator.flow(batch_size=1600)\n+\n+        file_writer = open(outfile_predict, "w")\n+        header_row = "\\t".join(["chrom", "pos", "name", "ref", "alt", "strand"])\n+        file_writer.write(header_row)\n+        header_done = False\n+\n+        steps_done = 0\n+\n+        # TODO: multiple threading\n+        try:\n+            while steps_done < len(gen_flow):\n+                index_array = next(gen_flow.index_generator)\n+                batch_X = gen_flow._get_batches_of_transformed_samples(index_array)\n+\n+                if params["method"] == "predict":\n+                    batch_preds = estimator.predict(\n+                        batch_X,\n+                        # The presence of `pred_data_generator` below is to\n+                        # override model carrying data_generator if there\n+                        # is any.\n+                        data_generator=pred_data_generator,\n+                    )\n+                else:\n+                    batch_preds = estimator.predict_proba(\n+                        batch_X,\n+                        # The presence of `pred_data_generator` below is to\n+                        # override model carrying data_generator if there\n+                        # is any.\n+                        data_generator=pred_data_generator,\n+                    )\n+\n+                if batch_preds.ndim == 1:\n+                    batch_preds = batch_preds[:, np.newaxis]\n+\n+                batch_meta = variants[index_array]\n+                batch_out = np.column_stack([batch_meta, batch_preds])\n+\n+                if not header_done:\n+                    heads = np.arange(batch_preds.shape[-1]).astype(str)\n+                    heads_str = "\\t".join(heads)\n+                    file_writer.write("\\t%s\\n" % heads_str)\n+                    header_done = True\n+\n+                for row in batch_out:\n+                    row_str = "\\t".join(row)\n+                    file_writer.write("%s\\n" % row_str)\n+\n+                steps_done += 1\n+\n+        finally:\n+            file_writer.close()\n+            # TODO: make api `pred_data_generator.close()`\n+            pred_data_generator.close()\n+        return 0\n+    # end input\n+\n+    # output\n+    if len(preds.shape) == 1:\n+        rval = pd.DataFrame(preds, columns=["Predicted"])\n+    else:\n+        rval = pd.DataFrame(preds)\n+\n+    rval.to_csv(outfile_predict, sep="\\t", header=True, index=False)\n+\n+\n+if __name__ == "__main__":\n+    aparser = argparse.ArgumentParser()\n+    aparser.add_argument("-i", "--inputs", dest="inputs", required=True)\n+    aparser.add_argument("-e", "--infile_estimator", dest="infile_estimator")\n+    aparser.add_argument("-w", "--infile_weights", dest="infile_weights")\n+    aparser.add_argument("-X", "--infile1", dest="infile1")\n+    aparser.add_argument("-O", "--outfile_predict", dest="outfile_predict")\n+    aparser.add_argument("-f", "--fasta_path", dest="fasta_path")\n+    aparser.add_argument("-r", "--ref_seq", dest="ref_seq")\n+    aparser.add_argument("-v", "--vcf_path", dest="vcf_path")\n+    args = aparser.parse_args()\n+\n+    main(\n+        args.inputs,\n+        args.infile_estimator,\n+        args.outfile_predict,\n+        infile_weights=args.infile_weights,\n+        infile1=args.infile1,\n+        fasta_path=args.fasta_path,\n+        ref_seq=args.ref_seq,\n+        vcf_path=args.vcf_path,\n+    )\n'
b
diff -r 000000000000 -r af2624d5ab32 pca.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/pca.py Sat May 01 01:24:32 2021 +0000
[
@@ -0,0 +1,186 @@
+import argparse
+
+import numpy as np
+from galaxy_ml.utils import read_columns
+from sklearn.decomposition import IncrementalPCA, KernelPCA, PCA
+
+
+def main():
+    parser = argparse.ArgumentParser(description="RDKit screen")
+    parser.add_argument("-i", "--infile", help="Input file")
+    parser.add_argument(
+        "--header", action="store_true", help="Include the header row or skip it"
+    )
+    parser.add_argument(
+        "-c",
+        "--columns",
+        type=str.lower,
+        default="all",
+        choices=[
+            "by_index_number",
+            "all_but_by_index_number",
+            "by_header_name",
+            "all_but_by_header_name",
+            "all_columns",
+        ],
+        help="Choose to select all columns, or exclude/include some",
+    )
+    parser.add_argument(
+        "-ci",
+        "--column_indices",
+        type=str.lower,
+        help="Choose to select all columns, or exclude/include some",
+    )
+    parser.add_argument(
+        "-n",
+        "--number",
+        nargs="?",
+        type=int,
+        default=None,
+        help="Number of components to keep. If not set, all components are kept",
+    )
+    parser.add_argument("--whiten", action="store_true", help="Whiten the components")
+    parser.add_argument(
+        "-t",
+        "--pca_type",
+        type=str.lower,
+        default="classical",
+        choices=["classical", "incremental", "kernel"],
+        help="Choose which flavour of PCA to use",
+    )
+    parser.add_argument(
+        "-s",
+        "--svd_solver",
+        type=str.lower,
+        default="auto",
+        choices=["auto", "full", "arpack", "randomized"],
+        help="Choose the type of svd solver.",
+    )
+    parser.add_argument(
+        "-b",
+        "--batch_size",
+        nargs="?",
+        type=int,
+        default=None,
+        help="The number of samples to use for each batch",
+    )
+    parser.add_argument(
+        "-k",
+        "--kernel",
+        type=str.lower,
+        default="linear",
+        choices=["linear", "poly", "rbf", "sigmoid", "cosine", "precomputed"],
+        help="Choose the type of kernel.",
+    )
+    parser.add_argument(
+        "-g",
+        "--gamma",
+        nargs="?",
+        type=float,
+        default=None,
+        help="Kernel coefficient for rbf, poly and sigmoid kernels. Ignored by other kernels",
+    )
+    parser.add_argument(
+        "-tol",
+        "--tolerance",
+        type=float,
+        default=0.0,
+        help="Convergence tolerance for arpack. If 0, optimal value will be chosen by arpack",
+    )
+    parser.add_argument(
+        "-mi",
+        "--max_iter",
+        nargs="?",
+        type=int,
+        default=None,
+        help="Maximum number of iterations for arpack",
+    )
+    parser.add_argument(
+        "-d",
+        "--degree",
+        type=int,
+        default=3,
+        help="Degree for poly kernels. Ignored by other kernels",
+    )
+    parser.add_argument(
+        "-cf",
+        "--coef0",
+        type=float,
+        default=1.0,
+        help="Independent term in poly and sigmoid kernels",
+    )
+    parser.add_argument(
+        "-e",
+        "--eigen_solver",
+        type=str.lower,
+        default="auto",
+        choices=["auto", "dense", "arpack"],
+        help="Choose the type of eigen solver.",
+    )
+    parser.add_argument(
+        "-o", "--outfile", help="Base name for output file (no extension)."
+    )
+    args = parser.parse_args()
+
+    usecols = None
+    pca_params = {}
+
+    if args.columns == "by_index_number" or args.columns == "all_but_by_index_number":
+        usecols = [int(i) for i in args.column_indices.split(",")]
+    elif args.columns == "by_header_name" or args.columns == "all_but_by_header_name":
+        usecols = args.column_indices
+
+    header = "infer" if args.header else None
+
+    pca_input = read_columns(
+        f=args.infile,
+        c=usecols,
+        c_option=args.columns,
+        sep="\t",
+        header=header,
+        parse_dates=True,
+        encoding=None,
+        index_col=None,
+    )
+
+    pca_params.update({"n_components": args.number})
+
+    if args.pca_type == "classical":
+        pca_params.update({"svd_solver": args.svd_solver, "whiten": args.whiten})
+        if args.svd_solver == "arpack":
+            pca_params.update({"tol": args.tolerance})
+        pca = PCA()
+
+    elif args.pca_type == "incremental":
+        pca_params.update({"batch_size": args.batch_size, "whiten": args.whiten})
+        pca = IncrementalPCA()
+
+    elif args.pca_type == "kernel":
+        pca_params.update(
+            {
+                "kernel": args.kernel,
+                "eigen_solver": args.eigen_solver,
+                "gamma": args.gamma,
+            }
+        )
+
+        if args.kernel == "poly":
+            pca_params.update({"degree": args.degree, "coef0": args.coef0})
+        elif args.kernel == "sigmoid":
+            pca_params.update({"coef0": args.coef0})
+        elif args.kernel == "precomputed":
+            pca_input = np.dot(pca_input, pca_input.T)
+
+        if args.eigen_solver == "arpack":
+            pca_params.update({"tol": args.tolerance, "max_iter": args.max_iter})
+
+        pca = KernelPCA()
+
+    print(pca_params)
+    pca.set_params(**pca_params)
+    pca_output = pca.fit_transform(pca_input)
+    np.savetxt(fname=args.outfile, X=pca_output, fmt="%.4f", delimiter="\t")
+
+
+if __name__ == "__main__":
+    main()
b
diff -r 000000000000 -r af2624d5ab32 search_model_validation.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/search_model_validation.py Sat May 01 01:24:32 2021 +0000
[
b'@@ -0,0 +1,784 @@\n+import argparse\n+import collections\n+import json\n+import os\n+import pickle\n+import sys\n+import warnings\n+\n+import imblearn\n+import joblib\n+import numpy as np\n+import pandas as pd\n+import skrebate\n+from galaxy_ml.utils import (clean_params, get_cv,\n+                             get_main_estimator, get_module, get_scoring,\n+                             load_model, read_columns, SafeEval, try_get_attr)\n+from scipy.io import mmread\n+from sklearn import (cluster, decomposition, feature_selection,\n+                     kernel_approximation, model_selection, preprocessing)\n+from sklearn.exceptions import FitFailedWarning\n+from sklearn.model_selection import _search, _validation\n+from sklearn.model_selection._validation import _score, cross_validate\n+\n+_fit_and_score = try_get_attr("galaxy_ml.model_validations", "_fit_and_score")\n+setattr(_search, "_fit_and_score", _fit_and_score)\n+setattr(_validation, "_fit_and_score", _fit_and_score)\n+\n+N_JOBS = int(os.environ.get("GALAXY_SLOTS", 1))\n+# handle  disk cache\n+CACHE_DIR = os.path.join(os.getcwd(), "cached")\n+del os\n+NON_SEARCHABLE = ("n_jobs", "pre_dispatch", "memory", "_path", "nthread", "callbacks")\n+\n+\n+def _eval_search_params(params_builder):\n+    search_params = {}\n+\n+    for p in params_builder["param_set"]:\n+        search_list = p["sp_list"].strip()\n+        if search_list == "":\n+            continue\n+\n+        param_name = p["sp_name"]\n+        if param_name.lower().endswith(NON_SEARCHABLE):\n+            print(\n+                "Warning: `%s` is not eligible for search and was "\n+                "omitted!" % param_name\n+            )\n+            continue\n+\n+        if not search_list.startswith(":"):\n+            safe_eval = SafeEval(load_scipy=True, load_numpy=True)\n+            ev = safe_eval(search_list)\n+            search_params[param_name] = ev\n+        else:\n+            # Have `:` before search list, asks for estimator evaluatio\n+            safe_eval_es = SafeEval(load_estimators=True)\n+            search_list = search_list[1:].strip()\n+            # TODO maybe add regular express check\n+            ev = safe_eval_es(search_list)\n+            preprocessings = (\n+                preprocessing.StandardScaler(),\n+                preprocessing.Binarizer(),\n+                preprocessing.MaxAbsScaler(),\n+                preprocessing.Normalizer(),\n+                preprocessing.MinMaxScaler(),\n+                preprocessing.PolynomialFeatures(),\n+                preprocessing.RobustScaler(),\n+                feature_selection.SelectKBest(),\n+                feature_selection.GenericUnivariateSelect(),\n+                feature_selection.SelectPercentile(),\n+                feature_selection.SelectFpr(),\n+                feature_selection.SelectFdr(),\n+                feature_selection.SelectFwe(),\n+                feature_selection.VarianceThreshold(),\n+                decomposition.FactorAnalysis(random_state=0),\n+                decomposition.FastICA(random_state=0),\n+                decomposition.IncrementalPCA(),\n+                decomposition.KernelPCA(random_state=0, n_jobs=N_JOBS),\n+                decomposition.LatentDirichletAllocation(random_state=0, n_jobs=N_JOBS),\n+                decomposition.MiniBatchDictionaryLearning(\n+                    random_state=0, n_jobs=N_JOBS\n+                ),\n+                decomposition.MiniBatchSparsePCA(random_state=0, n_jobs=N_JOBS),\n+                decomposition.NMF(random_state=0),\n+                decomposition.PCA(random_state=0),\n+                decomposition.SparsePCA(random_state=0, n_jobs=N_JOBS),\n+                decomposition.TruncatedSVD(random_state=0),\n+                kernel_approximation.Nystroem(random_state=0),\n+                kernel_approximation.RBFSampler(random_state=0),\n+                kernel_approximation.AdditiveChi2Sampler(),\n+                kernel_approximation.SkewedChi2Sampler(random_state=0),\n+                cluster.FeatureAgglomeration(),\n+                skrebate.Re'..b'f k.startswith("test"):\n+                rval["mean_" + k] = np.mean(rval[k])\n+                rval["std_" + k] = np.std(rval[k])\n+            if k.endswith("time"):\n+                rval.pop(k)\n+        rval = pd.DataFrame(rval)\n+        rval = rval[sorted(rval.columns)]\n+        rval.to_csv(path_or_buf=outfile_result, sep="\\t", header=True, index=False)\n+        # deprecate train test split mode\n+        """searcher = _do_train_test_split_val(\n+            searcher, X, y, params,\n+            primary_scoring=primary_scoring,\n+            error_score=options[\'error_score\'],\n+            groups=groups,\n+            outfile=outfile_result)"""\n+        return 0\n+\n+    # no outer split\n+    else:\n+        searcher.set_params(n_jobs=N_JOBS)\n+        if options["error_score"] == "raise":\n+            searcher.fit(X, y, groups=groups)\n+        else:\n+            warnings.simplefilter("always", FitFailedWarning)\n+            with warnings.catch_warnings(record=True) as w:\n+                try:\n+                    searcher.fit(X, y, groups=groups)\n+                except ValueError:\n+                    pass\n+                for warning in w:\n+                    print(repr(warning.message))\n+\n+        cv_results = pd.DataFrame(searcher.cv_results_)\n+        cv_results = cv_results[sorted(cv_results.columns)]\n+        cv_results.to_csv(\n+            path_or_buf=outfile_result, sep="\\t", header=True, index=False\n+        )\n+\n+    memory.clear(warn=False)\n+\n+    # output best estimator, and weights if applicable\n+    if outfile_object:\n+        best_estimator_ = getattr(searcher, "best_estimator_", None)\n+        if not best_estimator_:\n+            warnings.warn(\n+                "GridSearchCV object has no attribute "\n+                "\'best_estimator_\', because either it\'s "\n+                "nested gridsearch or `refit` is False!"\n+            )\n+            return\n+\n+        # clean prams\n+        best_estimator_ = clean_params(best_estimator_)\n+\n+        main_est = get_main_estimator(best_estimator_)\n+\n+        if hasattr(main_est, "model_") and hasattr(main_est, "save_weights"):\n+            if outfile_weights:\n+                main_est.save_weights(outfile_weights)\n+            del main_est.model_\n+            del main_est.fit_params\n+            del main_est.model_class_\n+            del main_est.validation_data\n+            if getattr(main_est, "data_generator_", None):\n+                del main_est.data_generator_\n+\n+        with open(outfile_object, "wb") as output_handler:\n+            print("Best estimator is saved: %s " % repr(best_estimator_))\n+            pickle.dump(best_estimator_, output_handler, pickle.HIGHEST_PROTOCOL)\n+\n+\n+if __name__ == "__main__":\n+    aparser = argparse.ArgumentParser()\n+    aparser.add_argument("-i", "--inputs", dest="inputs", required=True)\n+    aparser.add_argument("-e", "--estimator", dest="infile_estimator")\n+    aparser.add_argument("-X", "--infile1", dest="infile1")\n+    aparser.add_argument("-y", "--infile2", dest="infile2")\n+    aparser.add_argument("-O", "--outfile_result", dest="outfile_result")\n+    aparser.add_argument("-o", "--outfile_object", dest="outfile_object")\n+    aparser.add_argument("-w", "--outfile_weights", dest="outfile_weights")\n+    aparser.add_argument("-g", "--groups", dest="groups")\n+    aparser.add_argument("-r", "--ref_seq", dest="ref_seq")\n+    aparser.add_argument("-b", "--intervals", dest="intervals")\n+    aparser.add_argument("-t", "--targets", dest="targets")\n+    aparser.add_argument("-f", "--fasta_path", dest="fasta_path")\n+    args = aparser.parse_args()\n+\n+    main(\n+        args.inputs,\n+        args.infile_estimator,\n+        args.infile1,\n+        args.infile2,\n+        args.outfile_result,\n+        outfile_object=args.outfile_object,\n+        outfile_weights=args.outfile_weights,\n+        groups=args.groups,\n+        ref_seq=args.ref_seq,\n+        intervals=args.intervals,\n+        targets=args.targets,\n+        fasta_path=args.fasta_path,\n+    )\n'
b
diff -r 000000000000 -r af2624d5ab32 simple_model_fit.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/simple_model_fit.py Sat May 01 01:24:32 2021 +0000
[
@@ -0,0 +1,194 @@
+import argparse
+import json
+import pickle
+
+import pandas as pd
+from galaxy_ml.utils import load_model, read_columns
+from scipy.io import mmread
+from sklearn.pipeline import Pipeline
+
+N_JOBS = int(__import__("os").environ.get("GALAXY_SLOTS", 1))
+
+
+# TODO import from galaxy_ml.utils in future versions
+def clean_params(estimator, n_jobs=None):
+    """clean unwanted hyperparameter settings
+
+    If n_jobs is not None, set it into the estimator, if applicable
+
+    Return
+    ------
+    Cleaned estimator object
+    """
+    ALLOWED_CALLBACKS = (
+        "EarlyStopping",
+        "TerminateOnNaN",
+        "ReduceLROnPlateau",
+        "CSVLogger",
+        "None",
+    )
+
+    estimator_params = estimator.get_params()
+
+    for name, p in estimator_params.items():
+        # all potential unauthorized file write
+        if name == "memory" or name.endswith("__memory") or name.endswith("_path"):
+            new_p = {name: None}
+            estimator.set_params(**new_p)
+        elif n_jobs is not None and (name == "n_jobs" or name.endswith("__n_jobs")):
+            new_p = {name: n_jobs}
+            estimator.set_params(**new_p)
+        elif name.endswith("callbacks"):
+            for cb in p:
+                cb_type = cb["callback_selection"]["callback_type"]
+                if cb_type not in ALLOWED_CALLBACKS:
+                    raise ValueError("Prohibited callback type: %s!" % cb_type)
+
+    return estimator
+
+
+def _get_X_y(params, infile1, infile2):
+    """read from inputs and output X and y
+
+    Parameters
+    ----------
+    params : dict
+        Tool inputs parameter
+    infile1 : str
+        File path to dataset containing features
+    infile2 : str
+        File path to dataset containing target values
+
+    """
+    # store read dataframe object
+    loaded_df = {}
+
+    input_type = params["input_options"]["selected_input"]
+    # tabular input
+    if input_type == "tabular":
+        header = "infer" if params["input_options"]["header1"] else None
+        column_option = params["input_options"]["column_selector_options_1"][
+            "selected_column_selector_option"
+        ]
+        if column_option in [
+            "by_index_number",
+            "all_but_by_index_number",
+            "by_header_name",
+            "all_but_by_header_name",
+        ]:
+            c = params["input_options"]["column_selector_options_1"]["col1"]
+        else:
+            c = None
+
+        df_key = infile1 + repr(header)
+        df = pd.read_csv(infile1, sep="\t", header=header, parse_dates=True)
+        loaded_df[df_key] = df
+
+        X = read_columns(df, c=c, c_option=column_option).astype(float)
+    # sparse input
+    elif input_type == "sparse":
+        X = mmread(open(infile1, "r"))
+
+    # Get target y
+    header = "infer" if params["input_options"]["header2"] else None
+    column_option = params["input_options"]["column_selector_options_2"][
+        "selected_column_selector_option2"
+    ]
+    if column_option in [
+        "by_index_number",
+        "all_but_by_index_number",
+        "by_header_name",
+        "all_but_by_header_name",
+    ]:
+        c = params["input_options"]["column_selector_options_2"]["col2"]
+    else:
+        c = None
+
+    df_key = infile2 + repr(header)
+    if df_key in loaded_df:
+        infile2 = loaded_df[df_key]
+    else:
+        infile2 = pd.read_csv(infile2, sep="\t", header=header, parse_dates=True)
+        loaded_df[df_key] = infile2
+
+    y = read_columns(
+        infile2, c=c, c_option=column_option, sep="\t", header=header, parse_dates=True
+    )
+    if len(y.shape) == 2 and y.shape[1] == 1:
+        y = y.ravel()
+
+    return X, y
+
+
+def main(inputs, infile_estimator, infile1, infile2, out_object, out_weights=None):
+    """main
+
+    Parameters
+    ----------
+    inputs : str
+        File path to galaxy tool parameter
+
+    infile_estimator : str
+        File paths of input estimator
+
+    infile1 : str
+        File path to dataset containing features
+
+    infile2 : str
+        File path to dataset containing target labels
+
+    out_object : str
+        File path for output of fitted model or skeleton
+
+    out_weights : str
+        File path for output of weights
+
+    """
+    with open(inputs, "r") as param_handler:
+        params = json.load(param_handler)
+
+    # load model
+    with open(infile_estimator, "rb") as est_handler:
+        estimator = load_model(est_handler)
+    estimator = clean_params(estimator, n_jobs=N_JOBS)
+
+    X_train, y_train = _get_X_y(params, infile1, infile2)
+
+    estimator.fit(X_train, y_train)
+
+    main_est = estimator
+    if isinstance(main_est, Pipeline):
+        main_est = main_est.steps[-1][-1]
+    if hasattr(main_est, "model_") and hasattr(main_est, "save_weights"):
+        if out_weights:
+            main_est.save_weights(out_weights)
+        del main_est.model_
+        del main_est.fit_params
+        del main_est.model_class_
+        if getattr(main_est, "validation_data", None):
+            del main_est.validation_data
+        if getattr(main_est, "data_generator_", None):
+            del main_est.data_generator_
+
+    with open(out_object, "wb") as output_handler:
+        pickle.dump(estimator, output_handler, pickle.HIGHEST_PROTOCOL)
+
+
+if __name__ == "__main__":
+    aparser = argparse.ArgumentParser()
+    aparser.add_argument("-i", "--inputs", dest="inputs", required=True)
+    aparser.add_argument("-X", "--infile_estimator", dest="infile_estimator")
+    aparser.add_argument("-y", "--infile1", dest="infile1")
+    aparser.add_argument("-g", "--infile2", dest="infile2")
+    aparser.add_argument("-o", "--out_object", dest="out_object")
+    aparser.add_argument("-t", "--out_weights", dest="out_weights")
+    args = aparser.parse_args()
+
+    main(
+        args.inputs,
+        args.infile_estimator,
+        args.infile1,
+        args.infile2,
+        args.out_object,
+        args.out_weights,
+    )
b
diff -r 000000000000 -r af2624d5ab32 stacking_ensembles.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/stacking_ensembles.py Sat May 01 01:24:32 2021 +0000
[
@@ -0,0 +1,131 @@
+import argparse
+import ast
+import json
+import pickle
+import sys
+import warnings
+
+import mlxtend.classifier
+import mlxtend.regressor
+import pandas as pd
+from galaxy_ml.utils import (get_cv, get_estimator, get_search_params,
+                             load_model)
+
+warnings.filterwarnings("ignore")
+
+N_JOBS = int(__import__("os").environ.get("GALAXY_SLOTS", 1))
+
+
+def main(inputs_path, output_obj, base_paths=None, meta_path=None, outfile_params=None):
+    """
+    Parameter
+    ---------
+    inputs_path : str
+        File path for Galaxy parameters
+
+    output_obj : str
+        File path for ensemble estimator ouput
+
+    base_paths : str
+        File path or paths concatenated by comma.
+
+    meta_path : str
+        File path
+
+    outfile_params : str
+        File path for params output
+    """
+    with open(inputs_path, "r") as param_handler:
+        params = json.load(param_handler)
+
+    estimator_type = params["algo_selection"]["estimator_type"]
+    # get base estimators
+    base_estimators = []
+    for idx, base_file in enumerate(base_paths.split(",")):
+        if base_file and base_file != "None":
+            with open(base_file, "rb") as handler:
+                model = load_model(handler)
+        else:
+            estimator_json = params["base_est_builder"][idx]["estimator_selector"]
+            model = get_estimator(estimator_json)
+
+        if estimator_type.startswith("sklearn"):
+            named = model.__class__.__name__.lower()
+            named = "base_%d_%s" % (idx, named)
+            base_estimators.append((named, model))
+        else:
+            base_estimators.append(model)
+
+    # get meta estimator, if applicable
+    if estimator_type.startswith("mlxtend"):
+        if meta_path:
+            with open(meta_path, "rb") as f:
+                meta_estimator = load_model(f)
+        else:
+            estimator_json = params["algo_selection"]["meta_estimator"][
+                "estimator_selector"
+            ]
+            meta_estimator = get_estimator(estimator_json)
+
+    options = params["algo_selection"]["options"]
+
+    cv_selector = options.pop("cv_selector", None)
+    if cv_selector:
+        splitter, _groups = get_cv(cv_selector)
+        options["cv"] = splitter
+        # set n_jobs
+        options["n_jobs"] = N_JOBS
+
+    weights = options.pop("weights", None)
+    if weights:
+        weights = ast.literal_eval(weights)
+        if weights:
+            options["weights"] = weights
+
+    mod_and_name = estimator_type.split("_")
+    mod = sys.modules[mod_and_name[0]]
+    klass = getattr(mod, mod_and_name[1])
+
+    if estimator_type.startswith("sklearn"):
+        options["n_jobs"] = N_JOBS
+        ensemble_estimator = klass(base_estimators, **options)
+
+    elif mod == mlxtend.classifier:
+        ensemble_estimator = klass(
+            classifiers=base_estimators, meta_classifier=meta_estimator, **options
+        )
+
+    else:
+        ensemble_estimator = klass(
+            regressors=base_estimators, meta_regressor=meta_estimator, **options
+        )
+
+    print(ensemble_estimator)
+    for base_est in base_estimators:
+        print(base_est)
+
+    with open(output_obj, "wb") as out_handler:
+        pickle.dump(ensemble_estimator, out_handler, pickle.HIGHEST_PROTOCOL)
+
+    if params["get_params"] and outfile_params:
+        results = get_search_params(ensemble_estimator)
+        df = pd.DataFrame(results, columns=["", "Parameter", "Value"])
+        df.to_csv(outfile_params, sep="\t", index=False)
+
+
+if __name__ == "__main__":
+    aparser = argparse.ArgumentParser()
+    aparser.add_argument("-b", "--bases", dest="bases")
+    aparser.add_argument("-m", "--meta", dest="meta")
+    aparser.add_argument("-i", "--inputs", dest="inputs")
+    aparser.add_argument("-o", "--outfile", dest="outfile")
+    aparser.add_argument("-p", "--outfile_params", dest="outfile_params")
+    args = aparser.parse_args()
+
+    main(
+        args.inputs,
+        args.outfile,
+        base_paths=args.bases,
+        meta_path=args.meta,
+        outfile_params=args.outfile_params,
+    )
b
diff -r 000000000000 -r af2624d5ab32 test-data/GridSearchCV.zip
b
Binary file test-data/GridSearchCV.zip has changed
b
diff -r 000000000000 -r af2624d5ab32 test-data/LinearRegression01.zip
b
Binary file test-data/LinearRegression01.zip has changed
b
diff -r 000000000000 -r af2624d5ab32 test-data/LinearRegression02.zip
b
Binary file test-data/LinearRegression02.zip has changed
b
diff -r 000000000000 -r af2624d5ab32 test-data/RF01704.fasta
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/RF01704.fasta Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,4 @@
+>CP000097.1/1411351-1411410
+CAACGUUCACCUCACAUUUGUGAGGCGCAGACAACCCAGGCCAAGGAACGGGGACCUGGA
+>ACNY01000002.1/278641-278580
+GAUCGUUCACUUCGCAUCGCGCGAAGCGCAGUUCGCCUCAGGCCAUGGAACGGGGACCUGAG
b
diff -r 000000000000 -r af2624d5ab32 test-data/RFE.zip
b
Binary file test-data/RFE.zip has changed
b
diff -r 000000000000 -r af2624d5ab32 test-data/RandomForestClassifier.zip
b
Binary file test-data/RandomForestClassifier.zip has changed
b
diff -r 000000000000 -r af2624d5ab32 test-data/RandomForestRegressor01.zip
b
Binary file test-data/RandomForestRegressor01.zip has changed
b
diff -r 000000000000 -r af2624d5ab32 test-data/StackingCVRegressor01.zip
b
Binary file test-data/StackingCVRegressor01.zip has changed
b
diff -r 000000000000 -r af2624d5ab32 test-data/StackingCVRegressor02.zip
b
Binary file test-data/StackingCVRegressor02.zip has changed
b
diff -r 000000000000 -r af2624d5ab32 test-data/StackingRegressor02.zip
b
Binary file test-data/StackingRegressor02.zip has changed
b
diff -r 000000000000 -r af2624d5ab32 test-data/StackingVoting03.zip
b
Binary file test-data/StackingVoting03.zip has changed
b
diff -r 000000000000 -r af2624d5ab32 test-data/XGBRegressor01.zip
b
Binary file test-data/XGBRegressor01.zip has changed
b
diff -r 000000000000 -r af2624d5ab32 test-data/abc_model01
b
Binary file test-data/abc_model01 has changed
b
diff -r 000000000000 -r af2624d5ab32 test-data/abc_result01
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/abc_result01 Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,6 @@
+0 1 2 3 predicted
+3.68258022948 2.82110345641 -3.9901407239999998 -1.9523364774 1
+0.015942057224 -0.7119585943469999 0.125502976978 -0.972218263337 0
+2.0869076882499997 0.929399321468 -2.1292408448400004 -1.9971402218799998 1
+1.4132105208399999 0.523750660422 -1.4210539291 -1.49298569451 1
+0.7683140439399999 1.38267855169 -0.989045048734 0.649504257894 1
b
diff -r 000000000000 -r af2624d5ab32 test-data/abr_model01
b
Binary file test-data/abr_model01 has changed
b
diff -r 000000000000 -r af2624d5ab32 test-data/abr_result01
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/abr_result01 Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,6 @@
+0 1 2 3 4 predicted
+86.97021227350001 1.00532111569 -1.01739601979 -0.613139481654 0.641846874331 0.323842059244
+91.2021798817 -0.6215229712070001 1.11914889596 0.390012184498 1.28956938152 1.1503117056799999
+-47.4101632272 -0.638416457964 -0.7327774684530001 -0.8640261049779999 -1.06109770116 -0.7191695359690001
+61.712804630200004 -1.0999480057700002 -0.739679672932 0.585657963012 1.4890682753600002 1.1503117056799999
+-206.998295124 0.130238853011 0.70574123041 1.3320656526399999 -1.3322092373799999 -0.7191695359690001
b
diff -r 000000000000 -r af2624d5ab32 test-data/accuracy_score.txt
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/accuracy_score.txt Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,2 @@
+accuracy_score : 
+0.8461538461538461
b
diff -r 000000000000 -r af2624d5ab32 test-data/auc.txt
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/auc.txt Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,2 @@
+auc : 
+2.5
b
diff -r 000000000000 -r af2624d5ab32 test-data/average_precision_score.txt
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/average_precision_score.txt Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,2 @@
+average_precision_score : 
+1.0
b
diff -r 000000000000 -r af2624d5ab32 test-data/best_estimator_.zip
b
Binary file test-data/best_estimator_.zip has changed
b
diff -r 000000000000 -r af2624d5ab32 test-data/best_params_.txt
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/best_params_.txt Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,1 @@
+{'estimator__n_estimators': 100}
\ No newline at end of file
b
diff -r 000000000000 -r af2624d5ab32 test-data/best_score_.tabular
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/best_score_.tabular Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,2 @@
+best_score_
+0.7976348550293088
b
diff -r 000000000000 -r af2624d5ab32 test-data/blobs.txt
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/blobs.txt Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,101 @@
+0 1 0
+0.33681845896740 -3.40287961299073 0
+-9.48324265575857 -8.66266051536995 2
+-1.93336328496076 5.70953908146890 1
+-10.03182405989413 -5.57834393458362 2
+0.54186077661701 -4.37693628326930 0
+-8.12962929067378 -7.05554320549807 2
+-0.73082578569427 7.32375551699482 1
+-1.84142532007015 6.20630466830832 1
+0.41007185031668 -3.99744881000119 0
+-8.73509589323240 -5.49090535208751 2
+1.84941962277054 -3.91839345672969 0
+-9.15256068848857 -9.17805648051067 2
+-3.21429939077830 5.75926163957071 1
+0.28450378549664 -3.61576522643830 0
+-0.92907484922306 5.79099955373578 1
+0.36692524194483 6.51861929622910 1
+1.59909917965412 -3.07105617297875 0
+-9.71270568435724 -7.91707651499009 2
+-10.08040443063205 -6.55135324108655 2
+1.10594345774293 -4.41906374949547 0
+2.48708049649457 -2.89100712361067 0
+0.00587148930883 -3.18314255539710 0
+1.61854359735349 -4.88855922559208 0
+-9.15856722108140 -7.13894114847511 2
+-3.07633571459573 7.80049676786476 1
+0.11174653022487 -3.61615828710479 0
+-9.43932350782336 -7.29863034570663 2
+-1.69466229591445 4.40837111117530 1
+1.05261752638325 -3.49553009701512 0
+-10.50560592102942 -5.99245086001851 2
+1.54081964152897 -4.53702344151471 0
+0.32228789680820 6.89854008042929 1
+0.61621969660610 -5.27504803637537 0
+-10.22545392329864 -8.71635918421430 2
+-10.61004107591557 -8.15999270542289 2
+-0.74547966700287 -2.96189843151195 0
+0.78848758990191 -5.32234377938911 0
+-10.42005276754933 -7.78467770434098 2
+-2.90664752997062 5.79835066175825 1
+-10.32143921202120 -8.92712052109752 2
+-0.21338559861828 7.84779827247996 1
+-0.07194732572546 -5.26054466248995 0
+-7.60696893546687 -7.73382713697845 2
+-1.37722038386856 6.91773657443747 1
+-3.21560019075551 7.26468660350508 1
+-10.36154489539457 -6.91944465708303 2
+-9.60457341239248 -9.25351754602290 2
+-2.72690231565835 6.73825747902294 1
+-2.80603999216749 6.99066208996353 1
+-0.81952671479263 7.58241271253648 1
+-2.08847400980833 5.69607144720414 1
+-0.31991876149841 -4.98235849165957 0
+-11.32066579703307 -8.20937750734829 2
+-7.96236061274655 -9.01605369665730 2
+2.16784691057462 -6.16570792177736 0
+1.89502027521910 -5.86480290918300 0
+-8.66871499099032 -7.79890226276482 2
+2.05772110384843 -6.12322912450768 0
+-9.31359960682017 -8.00568199998929 2
+-0.76743056356151 -5.47682217583339 0
+-3.46772941922521 6.76072133440808 1
+1.09049844437461 -5.87582929334941 0
+-0.11521126331032 -4.07510454495671 0
+1.08927850504071 -5.50265562869237 0
+-0.61505047925733 7.65521576624828 1
+0.42996321311489 -5.55093054437951 0
+-0.75919485469050 5.58853030731725 1
+-9.12599657251685 -8.00673850068656 2
+-9.77537442082784 -6.61925671967673 2
+-3.01723334528173 7.00340677720469 1
+-0.97308946436741 -4.06651907195677 0
+-0.48830021304200 -5.66504681203900 0
+-11.92081159330307 -7.64815817127183 2
+-9.38262507165980 -7.58496298709520 2
+0.07652275340590 7.58891330491466 1
+0.97696230365299 -3.92480270763176 0
+-7.83082970823398 -7.91191526652019 2
+-3.00736856610051 5.70163666960614 1
+-1.87511017769397 5.62449960555141 1
+-9.68323206673510 -8.25353931958495 2
+-9.30119933759135 -8.47564800181842 2
+0.32365967414684 -5.10078403493750 0
+-1.74836105433202 5.46645574794978 1
+-0.56064340851208 6.87612506043561 1
+0.67860300499613 -4.17761085385070 0
+-8.20199888805984 -8.29076835439347 2
+-3.05026420956995 8.94223661488021 1
+-8.81193622652183 -7.79813533757767 2
+-9.16862770716234 -7.13275033182281 2
+-4.48296365906822 6.92883992453694 1
+-10.52225224786374 -6.80543393827772 2
+-1.58567165074196 6.89948024038567 1
+-1.75853685207545 6.44534621138642 1
+-9.91452153947266 -8.11181559274489 2
+-1.40077619511942 6.92380628122115 1
+-1.19228020907627 6.14310846867304 1
+0.87541339904821 -5.04555103360224 0
+1.48113771750685 -3.69640708480025 0
+0.52495937648759 6.34480823448348 1
+-0.01369955366371 -4.41397334863602 0
b
diff -r 000000000000 -r af2624d5ab32 test-data/brier_score_loss.txt
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/brier_score_loss.txt Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,2 @@
+brier_score_loss : 
+0.24051282051282052
b
diff -r 000000000000 -r af2624d5ab32 test-data/circles.txt
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/circles.txt Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,101 @@
+0 1 0
+-0.06279051952931 -0.99802672842827 0
+0.05023241562345 0.79842138274262 1
+-0.99211470131448 -0.12533323356430 0
+0.42577929156507 -0.90482705246602 0
+-0.30901699437495 -0.95105651629515 0
+-1.00000000000000 -0.00000000000000 0
+-0.18738131458572 -0.98228725072869 0
+-0.53582679497900 -0.84432792550202 0
+-0.77486652890290 -0.19895190973188 1
+-0.87630668004386 0.48175367410172 0
+-0.24721359549996 -0.76084521303612 1
+0.80000000000000 0.00000000000000 1
+0.42866143598320 -0.67546234040161 1
+-0.58317490193713 0.54763768474295 1
+0.70104534403509 -0.38540293928137 1
+-0.74382118871060 -0.29449964214774 1
+-0.74382118871060 0.29449964214774 1
+0.80901699437495 0.58778525229247 0
+0.30901699437495 -0.95105651629515 0
+0.18738131458572 0.98228725072869 0
+-0.87630668004386 -0.48175367410172 0
+-0.42866143598320 -0.67546234040161 1
+-0.50993919179895 -0.61641059422063 1
+0.63742398974869 -0.77051324277579 0
+-0.92977648588825 -0.36812455268468 0
+-0.92977648588825 0.36812455268468 0
+-0.96858316112863 0.24868988716485 0
+0.24721359549996 -0.76084521303612 1
+-0.14990505166858 -0.78582980058295 1
+-0.80901699437495 0.58778525229247 0
+-0.63742398974869 -0.77051324277579 0
+0.72896862742141 0.68454710592869 0
+0.92977648588825 0.36812455268468 0
+0.06279051952931 0.99802672842827 0
+0.79369176105158 0.10026658685144 1
+-0.34062343325206 -0.72386164197282 1
+-0.77486652890290 0.19895190973188 1
+-0.14990505166858 0.78582980058295 1
+0.70104534403509 0.38540293928137 1
+-0.50993919179895 0.61641059422063 1
+-0.80000000000000 -0.00000000000000 1
+-0.79369176105158 0.10026658685144 1
+0.50993919179895 0.61641059422063 1
+0.53582679497900 -0.84432792550202 0
+-0.79369176105158 -0.10026658685144 1
+0.79369176105158 -0.10026658685144 1
+-0.53582679497900 0.84432792550201 0
+0.50993919179895 -0.61641059422063 1
+-0.05023241562345 0.79842138274262 1
+1.00000000000000 0.00000000000000 0
+-0.63742398974869 0.77051324277579 0
+0.72896862742141 -0.68454710592869 0
+0.06279051952931 -0.99802672842827 0
+0.80901699437495 -0.58778525229247 0
+0.18738131458573 -0.98228725072869 0
+-0.64721359549996 0.47022820183398 1
+0.58317490193713 -0.54763768474295 1
+-0.80901699437495 -0.58778525229247 0
+-0.70104534403509 0.38540293928137 1
+0.87630668004386 -0.48175367410172 0
+0.58317490193713 0.54763768474295 1
+-0.64721359549996 -0.47022820183398 1
+0.34062343325206 -0.72386164197282 1
+0.05023241562345 -0.79842138274262 1
+-0.72896862742141 0.68454710592869 0
+-0.58317490193713 -0.54763768474295 1
+0.64721359549996 0.47022820183398 1
+0.14990505166858 -0.78582980058295 1
+0.14990505166858 0.78582980058295 1
+-0.24721359549996 0.76084521303612 1
+0.92977648588825 -0.36812455268468 0
+0.99211470131448 -0.12533323356430 0
+0.63742398974869 0.77051324277579 0
+0.74382118871060 -0.29449964214774 1
+0.34062343325206 0.72386164197282 1
+0.64721359549996 -0.47022820183398 1
+-0.06279051952931 0.99802672842827 0
+0.99211470131448 0.12533323356430 0
+-0.72896862742141 -0.68454710592869 0
+0.87630668004386 0.48175367410172 0
+-0.96858316112863 -0.24868988716486 0
+0.96858316112863 0.24868988716485 0
+0.42577929156507 0.90482705246602 0
+-0.42577929156507 0.90482705246602 0
+0.42866143598320 0.67546234040161 1
+0.24721359549996 0.76084521303612 1
+-0.30901699437495 0.95105651629515 0
+0.77486652890290 -0.19895190973188 1
+-0.42577929156507 -0.90482705246602 0
+-0.18738131458572 0.98228725072869 0
+-0.34062343325206 0.72386164197282 1
+0.74382118871060 0.29449964214774 1
+0.77486652890290 0.19895190973188 1
+0.30901699437495 0.95105651629515 0
+0.96858316112863 -0.24868988716485 0
+-0.70104534403509 -0.38540293928137 1
+-0.05023241562345 -0.79842138274262 1
+-0.42866143598320 0.67546234040161 1
+-0.99211470131448 0.12533323356430 0
+0.53582679497900 0.84432792550202 0
b
diff -r 000000000000 -r af2624d5ab32 test-data/class.txt
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/class.txt Sat May 01 01:24:32 2021 +0000
b
b'@@ -0,0 +1,101 @@\n+0\t1\t2\t3\t4\t5\t6\t7\t8\t9\t10\t11\t12\t13\t14\t15\t16\t17\t18\t19\t0\n+1.103931098477063\t1.1137324694427062\t2.929660893432376\t0.8263678474871072\t-0.5024439301629023\t-0.9778311716440287\t-0.6702242261982462\t-0.3783418745400049\t-1.9100911341304148\t0.41080891898717925\t1.6359138753477174\t-0.3544401787737543\t-0.7776883945882607\t-0.711126068648103\t-1.1632958763488586\t2.881523323585383\t-0.3331610016599995\t1.0249635446624175\t-1.204722529676112\t0.9686027151980944\t1\n+-0.5270034201986623\t-2.4370266994140035\t-0.46398126201591683\t0.6724964425333426\t0.32128693891873533\t-1.4561055975293318\t0.9733737109300644\t1.2635448363305384\t-0.9655190314614323\t-0.30440284592936967\t0.2928325635717791\t-0.642481126749871\t-0.17778292517384178\t-0.23575096986827987\t0.770818433376395\t1.002493460919832\t0.44402946209787597\t0.38673364020325446\t-1.0909759530149077\t0.4374172416803542\t1\n+0.6343790937890923\t-0.7044557030990274\t-1.5479925634100813\t-1.1375423986557498\t0.7574995244231507\t-0.2586895904715146\t1.2113185073849615\t0.8255591814670258\t1.0488550790559334\t-0.013557918030451043\t-0.36824556412752163\t-1.8422341740345995\t0.9791413360462421\t-0.23658235285975457\t0.3758968273279556\t-0.7379662029189028\t-0.9558490082424093\t-0.45167227065102006\t-0.13587675227718632\t-0.43481791249648283\t0\n+-0.2749398078895973\t-0.602759369823714\t-0.34817063773317436\t1.2165805903649096\t0.08822993442548502\t-0.9828118947823061\t1.1255554529825982\t-0.5951138391567017\t1.359567367140958\t1.14745743851399\t-2.2691569946862655\t0.9270532988002531\t-1.28390481061431\t0.702184505359777\t1.1599689740750685\t-0.7022781266128805\t-1.5820069707072104\t-0.1640254026760564\t-0.6268539047283007\t-0.5343960171949464\t0\n+-0.8451664655381013\t0.9592831641658773\t0.29032122469609184\t1.4456183940991385\t-2.2668849557948265\t0.49356800079005453\t0.9973927328851383\t-1.7077448427289017\t-1.525140006218017\t-0.2628130337984583\t-0.6987088119151889\t0.12372879270054708\t-0.37829745272534815\t-0.0010588423370812654\t3.1974829539733727\t1.7610392441369824\t0.461991697252764\t-0.8707192095484595\t0.4949902726326138\t0.7113500316301005\t1\n+-0.6479921130452116\t-0.9442706004373587\t0.20181386383006028\t-1.0034745347115275\t-0.9369221110721804\t-1.003380717730042\t-0.7275212508545039\t-0.1820208348243829\t0.869148773329888\t-0.7855214383236936\t0.1360612935062583\t2.0654861372867295\t-1.2399203282859266\t-0.44615385943239716\t1.7347311831934773\t-0.6314619246803259\t-0.76518919295205\t1.2707549044789055\t-0.7323378102483927\t-0.3626096934734513\t0\n+-0.06451309551365764\t-0.7249330776348837\t0.5963143554325262\t-1.379225616134922\t1.1667980284973485\t-2.274070053731467\t0.7411405179848544\t-0.6631329812615014\t-1.567155162052582\t-0.09527290234272089\t-0.7316650418582739\t-1.0020134142607244\t-0.6953396335230776\t1.5807860908438993\t0.3379535699933314\t1.8800551896643136\t0.37962716233848903\t0.5363444440333102\t0.1390867505544731\t0.7390508093906831\t1\n+0.7576049876525334\t0.8726631262318649\t0.8478637181249223\t0.30198299200599726\t1.0101338828657191\t-1.3836221562341127\t1.0376123351490436\t1.0937481979752155\t1.3929535047023875\t0.8760511854123076\t-1.2981174812942935\t0.3025477016355275\t-0.14253519602584672\t1.2887025562956078\t2.1562199933480133\t-1.0111580468681463\t-1.2275056029861684\t-0.2688763993683175\t-2.2719054986176683\t-0.5810422898079113\t0\n+2.5394320331114613\t0.46034921066168377\t0.8315330299051433\t-0.9396024430587621\t-0.37614736761593637\t-0.17996331764913345\t1.455421460737774\t1.5223077678776793\t1.1770030840483332\t0.40359841542535574\t-0.03766667059723912\t0.2415068878754467\t-0.44558826380657596\t1.2774520318648948\t-1.8848343873195796\t-0.23434224565939143\t-1.8735210102773319\t2.299369468755593\t0.11182257854217889\t-0.41968753568332984\t0\n+-1.1654317335035704\t-0.23406889069910192\t-1.3485118844184532\t0.7912949804001552\t-0.19206908223922012\t-2.752037662677927\t-1.6014139415281856\t-0.5108631934878929\t1.4041570989659866\t0.5382460975045578\t0.012866884184724063\t0.7910261496852212\t-0.5285056361126661\t-2.874968879865529\t-0.8428605517089753\t-1.4276668142409976\t-0.6865704170544349\t0.8660591728218054\t1.176952513690'..b'153635800724\t0.6364125215344348\t0.5362734706812686\t1\n+1.0759409181533681\t0.6338708137850724\t1.059455516811933\t0.2736075032324234\t-1.1004879462237114\t0.8983820725024066\t-0.9152704846639929\t-0.8347039847535137\t-1.3994538124984017\t0.06937008395653746\t-0.4322117530530746\t-1.297471755359271\t-0.9256383920977915\t-1.5287869947378168\t0.46665199638203264\t1.3984163949968078\t0.7172731124783118\t-2.1595920504682318\t0.2178924553288528\t0.627726734926914\t1\n+1.1631257343736865\t0.7161109143496656\t1.165181781246556\t-0.0970197604214342\t1.770668260834617\t0.09786380091576943\t-0.25203469271235573\t-0.07117035012372852\t1.2621614052889216\t-2.204226920077547\t-0.833481645415412\t1.668179441254334\t0.6299876168291397\t-0.4391047192362273\t-0.12336287720355432\t-2.4752753514344055\t0.9905764766530935\t0.16824138572933983\t-1.108371640458861\t-0.7056991628790823\t0\n+-0.4653767839296524\t0.5706552646301977\t-1.2510825198094822\t-0.38542737502404606\t0.5418393251037328\t0.8696564647003973\t-0.2677426807372017\t1.3874400614164746\t-1.6989225614176242\t-0.8543980754353178\t-0.7126300388983264\t0.39242735549607893\t0.7427861661062981\t0.23731164772086588\t0.17840259925316965\t2.264950231927068\t0.10561848543619334\t1.7893962060023398\t-0.33937719999794\t0.8272635120183163\t1\n+1.0658262297925543\t0.2245144207327693\t1.9979515177687335\t-1.3687162010707115\t-1.1274591498928925\t0.6453464430821444\t0.10571095020938731\t-0.04489492214522473\t0.4070092579150457\t-1.6549967992364703\t-0.1861816445428681\t-1.0013467840435817\t0.13042091725382485\t-0.9328609421342365\t1.4771353822876396\t1.0854915441340736\t-2.221251309417225\t0.5725567515972323\t-1.1577200461261594\t-0.011036089287608658\t1\n+-1.0583794427218747\t2.576977679031155\t-0.5895820679190702\t0.13438281144361666\t0.36102541634537905\t1.5183620699261768\t1.5873212424728582\t-0.7273069057149364\t0.4522026560345715\t-0.02860552628379647\t-0.018212347104613166\t0.687677616154882\t0.5422573331869172\t0.10659762229930982\t-1.2522775141080984\t0.7277335248049872\t-1.8227895144219035\t-0.7301662802248373\t0.9715535632493052\t-0.0672408254641321\t1\n+-0.1099953959208559\t1.6635363107373078\t0.3272453529764515\t-1.4246555886796946\t1.2410820871966046\t-0.15951736500333072\t-0.661937714925914\t0.4234572818376501\t1.1246881843788494\t0.9529594279919252\t0.39143861927191975\t3.465227148479317\t-0.24134874955198468\t-1.0945571896156956\t-0.9833626436429376\t-1.480187693017323\t-0.09583127396217472\t-0.31134706056867467\t-0.6248721853412322\t-0.5454408106982881\t0\n+0.9291001132966914\t-0.1708304076874391\t0.5364439368681257\t0.2630766894332881\t-0.1295965590136687\t0.9929416493373554\t0.7904280904722739\t-0.01912275129904966\t1.5057113544481104\t-1.9314128569290476\t-0.40508326392063543\t1.0918159072154612\t0.1881369570559398\t-1.213691539345214\t0.02421534060406341\t-1.96631401509566\t-0.14897841915958698\t-2.1313146599852018\t-1.2710579854942345\t-0.7284633084773273\t0\n+0.6336131127287113\t2.0333233170635046\t-0.022356711144941453\t-0.22007309599774338\t0.9282123550423084\t-0.787901129200937\t0.5812629099886915\t0.377426024051308\t0.15067520175237897\t-2.340925516401822\t0.07371157701560777\t1.560723423781778\t-0.38910754054643126\t1.0173191686261756\t-0.4198460795464502\t-0.4257545472403689\t0.2939445657648525\t0.6855820937261274\t-2.068890495355913\t-0.09921878204870066\t0\n+2.049778771444076\t1.3048378295965286\t1.563792608618236\t-0.7047392202425459\t0.5499305970570395\t-0.04884518704139992\t0.5223109585785488\t-1.4893434370374596\t1.3606389947395752\t0.3899429971033616\t0.055686488142052015\t0.8438100462780511\t1.6850310129308619\t1.2652993760910154\t-0.2279594058376745\t-1.9365760629271713\t0.0807919955941725\t-0.6380407350109051\t-1.0466273798176675\t-0.6766362607223333\t0\n+0.630742979769623\t-0.12660063112597814\t-1.1219892377344292\t-0.24320231504242704\t-0.11846930012185257\t0.35618373486097415\t-0.35432027228237667\t0.6830976831702715\t-1.2988376519016114\t-0.12917328933680922\t0.4878147649765918\t1.6226344780340827\t0.46020710543895615\t-0.9537377215409267\t0.8308526010187456\t1.1069055404414496\t0.9232784698807094\t-1.2718116679596179\t-0.5666412777157238\t0.5606432963172591\t1\n'
b
diff -r 000000000000 -r af2624d5ab32 test-data/classification_report.txt
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/classification_report.txt Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,11 @@
+classification_report : 
+              precision    recall  f1-score   support
+
+           0       1.00      1.00      1.00        14
+           1       1.00      0.62      0.77        16
+           2       0.60      1.00      0.75         9
+
+    accuracy                           0.85        39
+   macro avg       0.87      0.88      0.84        39
+weighted avg       0.91      0.85      0.85        39
+
b
diff -r 000000000000 -r af2624d5ab32 test-data/cluster_result01.txt
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/cluster_result01.txt Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,48 @@
+0 44 64 -76 0
+0 51 48 -73 0
+0 58 65 -49 0
+0 43 61 -49 0
+0 45 43 -79 0
+0 42 60 -98 0
+0 50 55 -59 0
+0 53 53 -56 0
+0 45 44 -61 0
+0 43 65 -84 0
+0 35 52 -75 0
+0 56 56 -70 0
+1 -61 86 43 2
+1 -67 93 15 2
+1 -59 94 36 2
+1 -50 92 62 2
+1 -78 91 70 2
+1 -35 87 47 2
+1 -56 91 52 2
+1 -61 81 46 2
+1 -83 78 34 2
+1 -50 87 45 2
+1 -67 73 50 2
+1 -50 97 45 2
+1 -61 111 45 2
+2 -109 23 -92 1
+2 -94 20 -96 1
+2 -85 26 -88 1
+2 -90 33 -114 1
+2 -63 9 -106 1
+2 -79 9 -93 1
+2 -99 26 -108 1
+2 -81 19 -110 1
+2 -108 21 -108 1
+2 -92 27 -106 1
+2 -88 2 -106 1
+2 -88 15 -103 1
+3 54 -74 4 3
+3 42 -92 31 3
+3 39 -99 -7 3
+3 48 -115 -5 3
+3 39 -96 2 3
+3 31 -109 9 3
+3 33 -96 -8 3
+3 23 -102 4 3
+3 38 -90 21 3
+3 34 -107 1 3
+3 35 -78 18 3
b
diff -r 000000000000 -r af2624d5ab32 test-data/cluster_result02.txt
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/cluster_result02.txt Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,48 @@
+0 44 64 -76 3
+0 51 48 -73 3
+0 58 65 -49 3
+0 43 61 -49 3
+0 45 43 -79 3
+0 42 60 -98 3
+0 50 55 -59 3
+0 53 53 -56 3
+0 45 44 -61 3
+0 43 65 -84 3
+0 35 52 -75 3
+0 56 56 -70 3
+1 -61 86 43 2
+1 -67 93 15 2
+1 -59 94 36 2
+1 -50 92 62 2
+1 -78 91 70 2
+1 -35 87 47 2
+1 -56 91 52 2
+1 -61 81 46 2
+1 -83 78 34 2
+1 -50 87 45 2
+1 -67 73 50 2
+1 -50 97 45 2
+1 -61 111 45 2
+2 -109 23 -92 0
+2 -94 20 -96 0
+2 -85 26 -88 0
+2 -90 33 -114 0
+2 -63 9 -106 0
+2 -79 9 -93 0
+2 -99 26 -108 0
+2 -81 19 -110 0
+2 -108 21 -108 0
+2 -92 27 -106 0
+2 -88 2 -106 0
+2 -88 15 -103 0
+3 54 -74 4 1
+3 42 -92 31 1
+3 39 -99 -7 1
+3 48 -115 -5 1
+3 39 -96 2 1
+3 31 -109 9 1
+3 33 -96 -8 1
+3 23 -102 4 1
+3 38 -90 21 1
+3 34 -107 1 1
+3 35 -78 18 1
b
diff -r 000000000000 -r af2624d5ab32 test-data/cluster_result03.txt
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/cluster_result03.txt Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,48 @@
+0 44 64 -76 -1
+0 51 48 -73 -1
+0 58 65 -49 -1
+0 43 61 -49 -1
+0 45 43 -79 -1
+0 42 60 -98 -1
+0 50 55 -59 -1
+0 53 53 -56 -1
+0 45 44 -61 -1
+0 43 65 -84 -1
+0 35 52 -75 -1
+0 56 56 -70 -1
+1 -61 86 43 -1
+1 -67 93 15 -1
+1 -59 94 36 -1
+1 -50 92 62 -1
+1 -78 91 70 -1
+1 -35 87 47 -1
+1 -56 91 52 -1
+1 -61 81 46 -1
+1 -83 78 34 -1
+1 -50 87 45 -1
+1 -67 73 50 -1
+1 -50 97 45 -1
+1 -61 111 45 -1
+2 -109 23 -92 -1
+2 -94 20 -96 -1
+2 -85 26 -88 -1
+2 -90 33 -114 -1
+2 -63 9 -106 -1
+2 -79 9 -93 -1
+2 -99 26 -108 -1
+2 -81 19 -110 -1
+2 -108 21 -108 -1
+2 -92 27 -106 -1
+2 -88 2 -106 -1
+2 -88 15 -103 -1
+3 54 -74 4 -1
+3 42 -92 31 -1
+3 39 -99 -7 -1
+3 48 -115 -5 -1
+3 39 -96 2 -1
+3 31 -109 9 -1
+3 33 -96 -8 -1
+3 23 -102 4 -1
+3 38 -90 21 -1
+3 34 -107 1 -1
+3 35 -78 18 -1
b
diff -r 000000000000 -r af2624d5ab32 test-data/cluster_result04.txt
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/cluster_result04.txt Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,48 @@
+0 44 64 -76 1
+0 51 48 -73 1
+0 58 65 -49 1
+0 43 61 -49 1
+0 45 43 -79 1
+0 42 60 -98 1
+0 50 55 -59 1
+0 53 53 -56 1
+0 45 44 -61 1
+0 43 65 -84 1
+0 35 52 -75 1
+0 56 56 -70 1
+1 -61 86 43 2
+1 -67 93 15 2
+1 -59 94 36 2
+1 -50 92 62 2
+1 -78 91 70 2
+1 -35 87 47 2
+1 -56 91 52 2
+1 -61 81 46 2
+1 -83 78 34 2
+1 -50 87 45 2
+1 -67 73 50 2
+1 -50 97 45 2
+1 -61 111 45 2
+2 -109 23 -92 3
+2 -94 20 -96 3
+2 -85 26 -88 3
+2 -90 33 -114 3
+2 -63 9 -106 3
+2 -79 9 -93 3
+2 -99 26 -108 3
+2 -81 19 -110 3
+2 -108 21 -108 3
+2 -92 27 -106 3
+2 -88 2 -106 3
+2 -88 15 -103 3
+3 54 -74 4 0
+3 42 -92 31 0
+3 39 -99 -7 0
+3 48 -115 -5 0
+3 39 -96 2 0
+3 31 -109 9 0
+3 33 -96 -8 0
+3 23 -102 4 0
+3 38 -90 21 0
+3 34 -107 1 0
+3 35 -78 18 0
b
diff -r 000000000000 -r af2624d5ab32 test-data/cluster_result05.txt
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/cluster_result05.txt Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,48 @@
+0 44 64 -76 0
+0 51 48 -73 0
+0 58 65 -49 0
+0 43 61 -49 0
+0 45 43 -79 0
+0 42 60 -98 0
+0 50 55 -59 0
+0 53 53 -56 0
+0 45 44 -61 0
+0 43 65 -84 0
+0 35 52 -75 0
+0 56 56 -70 0
+1 -61 86 43 2
+1 -67 93 15 2
+1 -59 94 36 2
+1 -50 92 62 2
+1 -78 91 70 2
+1 -35 87 47 2
+1 -56 91 52 2
+1 -61 81 46 2
+1 -83 78 34 2
+1 -50 87 45 2
+1 -67 73 50 2
+1 -50 97 45 2
+1 -61 111 45 2
+2 -109 23 -92 0
+2 -94 20 -96 0
+2 -85 26 -88 0
+2 -90 33 -114 0
+2 -63 9 -106 0
+2 -79 9 -93 0
+2 -99 26 -108 0
+2 -81 19 -110 0
+2 -108 21 -108 0
+2 -92 27 -106 0
+2 -88 2 -106 0
+2 -88 15 -103 0
+3 54 -74 4 1
+3 42 -92 31 1
+3 39 -99 -7 1
+3 48 -115 -5 1
+3 39 -96 2 1
+3 31 -109 9 1
+3 33 -96 -8 1
+3 23 -102 4 1
+3 38 -90 21 1
+3 34 -107 1 1
+3 35 -78 18 1
b
diff -r 000000000000 -r af2624d5ab32 test-data/cluster_result06.txt
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/cluster_result06.txt Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,48 @@
+0 44 64 -76 0
+0 51 48 -73 0
+0 58 65 -49 0
+0 43 61 -49 0
+0 45 43 -79 0
+0 42 60 -98 0
+0 50 55 -59 0
+0 53 53 -56 0
+0 45 44 -61 0
+0 43 65 -84 0
+0 35 52 -75 0
+0 56 56 -70 0
+1 -61 86 43 1
+1 -67 93 15 1
+1 -59 94 36 1
+1 -50 92 62 1
+1 -78 91 70 1
+1 -35 87 47 1
+1 -56 91 52 1
+1 -61 81 46 1
+1 -83 78 34 1
+1 -50 87 45 1
+1 -67 73 50 1
+1 -50 97 45 1
+1 -61 111 45 1
+2 -109 23 -92 2
+2 -94 20 -96 2
+2 -85 26 -88 2
+2 -90 33 -114 2
+2 -63 9 -106 2
+2 -79 9 -93 2
+2 -99 26 -108 2
+2 -81 19 -110 2
+2 -108 21 -108 2
+2 -92 27 -106 2
+2 -88 2 -106 2
+2 -88 15 -103 2
+3 54 -74 4 3
+3 42 -92 31 3
+3 39 -99 -7 3
+3 48 -115 -5 3
+3 39 -96 2 3
+3 31 -109 9 3
+3 33 -96 -8 3
+3 23 -102 4 3
+3 38 -90 21 3
+3 34 -107 1 3
+3 35 -78 18 3
b
diff -r 000000000000 -r af2624d5ab32 test-data/cluster_result07.txt
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/cluster_result07.txt Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,48 @@
+0 44 64 -76 0
+0 51 48 -73 0
+0 58 65 -49 0
+0 43 61 -49 0
+0 45 43 -79 0
+0 42 60 -98 0
+0 50 55 -59 0
+0 53 53 -56 0
+0 45 44 -61 0
+0 43 65 -84 0
+0 35 52 -75 0
+0 56 56 -70 0
+1 -61 86 43 1
+1 -67 93 15 1
+1 -59 94 36 1
+1 -50 92 62 1
+1 -78 91 70 1
+1 -35 87 47 1
+1 -56 91 52 1
+1 -61 81 46 1
+1 -83 78 34 1
+1 -50 87 45 1
+1 -67 73 50 1
+1 -50 97 45 1
+1 -61 111 45 1
+2 -109 23 -92 2
+2 -94 20 -96 2
+2 -85 26 -88 2
+2 -90 33 -114 2
+2 -63 9 -106 2
+2 -79 9 -93 2
+2 -99 26 -108 2
+2 -81 19 -110 2
+2 -108 21 -108 2
+2 -92 27 -106 2
+2 -88 2 -106 2
+2 -88 15 -103 2
+3 54 -74 4 3
+3 42 -92 31 3
+3 39 -99 -7 3
+3 48 -115 -5 3
+3 39 -96 2 3
+3 31 -109 9 3
+3 33 -96 -8 3
+3 23 -102 4 3
+3 38 -90 21 3
+3 34 -107 1 3
+3 35 -78 18 3
b
diff -r 000000000000 -r af2624d5ab32 test-data/cluster_result08.txt
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/cluster_result08.txt Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,48 @@
+0 44 64 -76 0
+0 51 48 -73 0
+0 58 65 -49 0
+0 43 61 -49 0
+0 45 43 -79 0
+0 42 60 -98 0
+0 50 55 -59 0
+0 53 53 -56 0
+0 45 44 -61 0
+0 43 65 -84 0
+0 35 52 -75 0
+0 56 56 -70 0
+1 -61 86 43 0
+1 -67 93 15 0
+1 -59 94 36 0
+1 -50 92 62 0
+1 -78 91 70 0
+1 -35 87 47 0
+1 -56 91 52 0
+1 -61 81 46 0
+1 -83 78 34 0
+1 -50 87 45 0
+1 -67 73 50 0
+1 -50 97 45 0
+1 -61 111 45 0
+2 -109 23 -92 0
+2 -94 20 -96 0
+2 -85 26 -88 0
+2 -90 33 -114 0
+2 -63 9 -106 0
+2 -79 9 -93 0
+2 -99 26 -108 0
+2 -81 19 -110 0
+2 -108 21 -108 0
+2 -92 27 -106 0
+2 -88 2 -106 0
+2 -88 15 -103 0
+3 54 -74 4 1
+3 42 -92 31 1
+3 39 -99 -7 1
+3 48 -115 -5 1
+3 39 -96 2 1
+3 31 -109 9 1
+3 33 -96 -8 1
+3 23 -102 4 1
+3 38 -90 21 1
+3 34 -107 1 1
+3 35 -78 18 1
b
diff -r 000000000000 -r af2624d5ab32 test-data/cluster_result09.txt
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/cluster_result09.txt Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,48 @@
+0 44 64 -76 0
+0 51 48 -73 0
+0 58 65 -49 0
+0 43 61 -49 0
+0 45 43 -79 0
+0 42 60 -98 0
+0 50 55 -59 0
+0 53 53 -56 0
+0 45 44 -61 0
+0 43 65 -84 0
+0 35 52 -75 0
+0 56 56 -70 0
+1 -61 86 43 0
+1 -67 93 15 0
+1 -59 94 36 0
+1 -50 92 62 0
+1 -78 91 70 0
+1 -35 87 47 0
+1 -56 91 52 0
+1 -61 81 46 0
+1 -83 78 34 0
+1 -50 87 45 0
+1 -67 73 50 0
+1 -50 97 45 0
+1 -61 111 45 0
+2 -109 23 -92 0
+2 -94 20 -96 0
+2 -85 26 -88 0
+2 -90 33 -114 0
+2 -63 9 -106 0
+2 -79 9 -93 0
+2 -99 26 -108 0
+2 -81 19 -110 0
+2 -108 21 -108 0
+2 -92 27 -106 0
+2 -88 2 -106 0
+2 -88 15 -103 0
+3 54 -74 4 1
+3 42 -92 31 1
+3 39 -99 -7 1
+3 48 -115 -5 1
+3 39 -96 2 1
+3 31 -109 9 1
+3 33 -96 -8 1
+3 23 -102 4 1
+3 38 -90 21 1
+3 34 -107 1 1
+3 35 -78 18 1
b
diff -r 000000000000 -r af2624d5ab32 test-data/cluster_result10.txt
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/cluster_result10.txt Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,48 @@
+0 44 64 -76 3
+0 51 48 -73 3
+0 58 65 -49 3
+0 43 61 -49 3
+0 45 43 -79 3
+0 42 60 -98 3
+0 50 55 -59 3
+0 53 53 -56 3
+0 45 44 -61 3
+0 43 65 -84 3
+0 35 52 -75 3
+0 56 56 -70 3
+1 -61 86 43 0
+1 -67 93 15 0
+1 -59 94 36 0
+1 -50 92 62 0
+1 -78 91 70 0
+1 -35 87 47 0
+1 -56 91 52 0
+1 -61 81 46 0
+1 -83 78 34 0
+1 -50 87 45 0
+1 -67 73 50 0
+1 -50 97 45 0
+1 -61 111 45 0
+2 -109 23 -92 2
+2 -94 20 -96 2
+2 -85 26 -88 2
+2 -90 33 -114 2
+2 -63 9 -106 2
+2 -79 9 -93 2
+2 -99 26 -108 2
+2 -81 19 -110 2
+2 -108 21 -108 2
+2 -92 27 -106 2
+2 -88 2 -106 2
+2 -88 15 -103 2
+3 54 -74 4 1
+3 42 -92 31 1
+3 39 -99 -7 1
+3 48 -115 -5 1
+3 39 -96 2 1
+3 31 -109 9 1
+3 33 -96 -8 1
+3 23 -102 4 1
+3 38 -90 21 1
+3 34 -107 1 1
+3 35 -78 18 1
b
diff -r 000000000000 -r af2624d5ab32 test-data/cluster_result11.txt
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/cluster_result11.txt Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,48 @@
+0 44 64 -76 2
+0 51 48 -73 2
+0 58 65 -49 2
+0 43 61 -49 2
+0 45 43 -79 2
+0 42 60 -98 2
+0 50 55 -59 2
+0 53 53 -56 2
+0 45 44 -61 2
+0 43 65 -84 2
+0 35 52 -75 2
+0 56 56 -70 2
+1 -61 86 43 0
+1 -67 93 15 0
+1 -59 94 36 0
+1 -50 92 62 0
+1 -78 91 70 0
+1 -35 87 47 0
+1 -56 91 52 0
+1 -61 81 46 0
+1 -83 78 34 0
+1 -50 87 45 0
+1 -67 73 50 0
+1 -50 97 45 0
+1 -61 111 45 0
+2 -109 23 -92 1
+2 -94 20 -96 1
+2 -85 26 -88 1
+2 -90 33 -114 1
+2 -63 9 -106 1
+2 -79 9 -93 1
+2 -99 26 -108 1
+2 -81 19 -110 1
+2 -108 21 -108 1
+2 -92 27 -106 1
+2 -88 2 -106 1
+2 -88 15 -103 1
+3 54 -74 4 3
+3 42 -92 31 3
+3 39 -99 -7 3
+3 48 -115 -5 3
+3 39 -96 2 3
+3 31 -109 9 3
+3 33 -96 -8 3
+3 23 -102 4 3
+3 38 -90 21 3
+3 34 -107 1 3
+3 35 -78 18 3
b
diff -r 000000000000 -r af2624d5ab32 test-data/cluster_result12.txt
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/cluster_result12.txt Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,48 @@
+0 44 64 -76 1
+0 51 48 -73 1
+0 58 65 -49 1
+0 43 61 -49 0
+0 45 43 -79 1
+0 42 60 -98 1
+0 50 55 -59 1
+0 53 53 -56 1
+0 45 44 -61 0
+0 43 65 -84 1
+0 35 52 -75 1
+0 56 56 -70 1
+1 -61 86 43 2
+1 -67 93 15 1
+1 -59 94 36 1
+1 -50 92 62 0
+1 -78 91 70 1
+1 -35 87 47 1
+1 -56 91 52 0
+1 -61 81 46 2
+1 -83 78 34 1
+1 -50 87 45 0
+1 -67 73 50 1
+1 -50 97 45 0
+1 -61 111 45 1
+2 -109 23 -92 0
+2 -94 20 -96 3
+2 -85 26 -88 3
+2 -90 33 -114 3
+2 -63 9 -106 0
+2 -79 9 -93 1
+2 -99 26 -108 3
+2 -81 19 -110 3
+2 -108 21 -108 3
+2 -92 27 -106 3
+2 -88 2 -106 0
+2 -88 15 -103 3
+3 54 -74 4 1
+3 42 -92 31 3
+3 39 -99 -7 3
+3 48 -115 -5 1
+3 39 -96 2 3
+3 31 -109 9 3
+3 33 -96 -8 3
+3 23 -102 4 3
+3 38 -90 21 3
+3 34 -107 1 3
+3 35 -78 18 3
b
diff -r 000000000000 -r af2624d5ab32 test-data/cluster_result13.txt
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/cluster_result13.txt Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,48 @@
+0 44 64 -76 4
+0 51 48 -73 0
+0 58 65 -49 0
+0 43 61 -49 1
+0 45 43 -79 0
+0 42 60 -98 0
+0 50 55 -59 2
+0 53 53 -56 2
+0 45 44 -61 0
+0 43 65 -84 4
+0 35 52 -75 1
+0 56 56 -70 0
+1 -61 86 43 0
+1 -67 93 15 0
+1 -59 94 36 0
+1 -50 92 62 0
+1 -78 91 70 1
+1 -35 87 47 0
+1 -56 91 52 0
+1 -61 81 46 0
+1 -83 78 34 0
+1 -50 87 45 0
+1 -67 73 50 1
+1 -50 97 45 0
+1 -61 111 45 0
+2 -109 23 -92 0
+2 -94 20 -96 0
+2 -85 26 -88 0
+2 -90 33 -114 1
+2 -63 9 -106 0
+2 -79 9 -93 1
+2 -99 26 -108 3
+2 -81 19 -110 0
+2 -108 21 -108 0
+2 -92 27 -106 3
+2 -88 2 -106 1
+2 -88 15 -103 0
+3 54 -74 4 0
+3 42 -92 31 1
+3 39 -99 -7 1
+3 48 -115 -5 1
+3 39 -96 2 1
+3 31 -109 9 1
+3 33 -96 -8 1
+3 23 -102 4 0
+3 38 -90 21 1
+3 34 -107 1 1
+3 35 -78 18 1
b
diff -r 000000000000 -r af2624d5ab32 test-data/cluster_result14.txt
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/cluster_result14.txt Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,48 @@
+0 44 64 -76 2
+0 51 48 -73 2
+0 58 65 -49 5
+0 43 61 -49 5
+0 45 43 -79 2
+0 42 60 -98 2
+0 50 55 -59 5
+0 53 53 -56 5
+0 45 44 -61 2
+0 43 65 -84 2
+0 35 52 -75 2
+0 56 56 -70 2
+1 -61 86 43 1
+1 -67 93 15 1
+1 -59 94 36 1
+1 -50 92 62 1
+1 -78 91 70 7
+1 -35 87 47 1
+1 -56 91 52 1
+1 -61 81 46 7
+1 -83 78 34 7
+1 -50 87 45 1
+1 -67 73 50 7
+1 -50 97 45 1
+1 -61 111 45 1
+2 -109 23 -92 6
+2 -94 20 -96 6
+2 -85 26 -88 6
+2 -90 33 -114 6
+2 -63 9 -106 3
+2 -79 9 -93 3
+2 -99 26 -108 6
+2 -81 19 -110 6
+2 -108 21 -108 6
+2 -92 27 -106 6
+2 -88 2 -106 3
+2 -88 15 -103 6
+3 54 -74 4 4
+3 42 -92 31 4
+3 39 -99 -7 0
+3 48 -115 -5 0
+3 39 -96 2 0
+3 31 -109 9 0
+3 33 -96 -8 0
+3 23 -102 4 0
+3 38 -90 21 4
+3 34 -107 1 0
+3 35 -78 18 4
b
diff -r 000000000000 -r af2624d5ab32 test-data/cluster_result15.txt
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/cluster_result15.txt Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,48 @@
+0 44 64 -76 1
+0 51 48 -73 1
+0 58 65 -49 1
+0 43 61 -49 1
+0 45 43 -79 1
+0 42 60 -98 1
+0 50 55 -59 1
+0 53 53 -56 1
+0 45 44 -61 1
+0 43 65 -84 1
+0 35 52 -75 1
+0 56 56 -70 1
+1 -61 86 43 2
+1 -67 93 15 2
+1 -59 94 36 2
+1 -50 92 62 2
+1 -78 91 70 2
+1 -35 87 47 2
+1 -56 91 52 2
+1 -61 81 46 2
+1 -83 78 34 2
+1 -50 87 45 2
+1 -67 73 50 2
+1 -50 97 45 2
+1 -61 111 45 2
+2 -109 23 -92 3
+2 -94 20 -96 3
+2 -85 26 -88 3
+2 -90 33 -114 3
+2 -63 9 -106 3
+2 -79 9 -93 3
+2 -99 26 -108 3
+2 -81 19 -110 3
+2 -108 21 -108 3
+2 -92 27 -106 3
+2 -88 2 -106 3
+2 -88 15 -103 3
+3 54 -74 4 0
+3 42 -92 31 0
+3 39 -99 -7 0
+3 48 -115 -5 0
+3 39 -96 2 0
+3 31 -109 9 0
+3 33 -96 -8 0
+3 23 -102 4 0
+3 38 -90 21 0
+3 34 -107 1 0
+3 35 -78 18 0
b
diff -r 000000000000 -r af2624d5ab32 test-data/cluster_result16.txt
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/cluster_result16.txt Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,48 @@
+0 44 64 -76 0
+0 51 48 -73 0
+0 58 65 -49 0
+0 43 61 -49 0
+0 45 43 -79 0
+0 42 60 -98 0
+0 50 55 -59 0
+0 53 53 -56 0
+0 45 44 -61 0
+0 43 65 -84 0
+0 35 52 -75 0
+0 56 56 -70 0
+1 -61 86 43 2
+1 -67 93 15 2
+1 -59 94 36 2
+1 -50 92 62 2
+1 -78 91 70 2
+1 -35 87 47 2
+1 -56 91 52 2
+1 -61 81 46 2
+1 -83 78 34 2
+1 -50 87 45 2
+1 -67 73 50 2
+1 -50 97 45 2
+1 -61 111 45 2
+2 -109 23 -92 3
+2 -94 20 -96 3
+2 -85 26 -88 3
+2 -90 33 -114 3
+2 -63 9 -106 3
+2 -79 9 -93 3
+2 -99 26 -108 3
+2 -81 19 -110 3
+2 -108 21 -108 3
+2 -92 27 -106 3
+2 -88 2 -106 3
+2 -88 15 -103 3
+3 54 -74 4 1
+3 42 -92 31 1
+3 39 -99 -7 1
+3 48 -115 -5 1
+3 39 -96 2 1
+3 31 -109 9 1
+3 33 -96 -8 1
+3 23 -102 4 1
+3 38 -90 21 1
+3 34 -107 1 1
+3 35 -78 18 1
b
diff -r 000000000000 -r af2624d5ab32 test-data/cluster_result17.txt
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/cluster_result17.txt Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,4 @@
+0
+1
+0
+0
b
diff -r 000000000000 -r af2624d5ab32 test-data/cluster_result18.txt
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/cluster_result18.txt Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,4 @@
+-1
+-1
+-1
+-1
b
diff -r 000000000000 -r af2624d5ab32 test-data/cluster_result19.txt
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/cluster_result19.txt Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,4 @@
+0
+1
+0
+0
b
diff -r 000000000000 -r af2624d5ab32 test-data/cluster_result20.txt
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/cluster_result20.txt Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,4 @@
+0
+1
+0
+0
b
diff -r 000000000000 -r af2624d5ab32 test-data/cluster_result21.txt
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/cluster_result21.txt Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,4 @@
+0
+1
+0
+0
b
diff -r 000000000000 -r af2624d5ab32 test-data/confusion_matrix.txt
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/confusion_matrix.txt Sat May 01 01:24:32 2021 +0000
[
@@ -0,0 +1,4 @@
+confusion_matrix : 
+[[14  0  0]
+ [ 0 10  6]
+ [ 0  0  9]]
b
diff -r 000000000000 -r af2624d5ab32 test-data/converter_result01.json
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/converter_result01.json Sat May 01 01:24:32 2021 +0000
[
b'@@ -0,0 +1,2 @@\n+{"directed": false, "graph": {"info": "RNAfold", "id": "CP000097.1/1411351-1411410", "structure": "....((((.((((((....)))))).)..)))...(((((((..(...)..)).))))).", "sequence": "CAACGUUCACCUCACAUUUGUGAGGCGCAGACAACCCAGGCCAAGGAACGGGGACCUGGA"}, "nodes": [{"position": 0, "id": 0, "label": "C"}, {"position": 1, "id": 1, "label": "A"}, {"position": 2, "id": 2, "label": "A"}, {"position": 3, "id": 3, "label": "C"}, {"position": 4, "id": 4, "label": "G"}, {"position": 5, "id": 5, "label": "U"}, {"position": 6, "id": 6, "label": "U"}, {"position": 7, "id": 7, "label": "C"}, {"position": 8, "id": 8, "label": "A"}, {"position": 9, "id": 9, "label": "C"}, {"position": 10, "id": 10, "label": "C"}, {"position": 11, "id": 11, "label": "U"}, {"position": 12, "id": 12, "label": "C"}, {"position": 13, "id": 13, "label": "A"}, {"position": 14, "id": 14, "label": "C"}, {"position": 15, "id": 15, "label": "A"}, {"position": 16, "id": 16, "label": "U"}, {"position": 17, "id": 17, "label": "U"}, {"position": 18, "id": 18, "label": "U"}, {"position": 19, "id": 19, "label": "G"}, {"position": 20, "id": 20, "label": "U"}, {"position": 21, "id": 21, "label": "G"}, {"position": 22, "id": 22, "label": "A"}, {"position": 23, "id": 23, "label": "G"}, {"position": 24, "id": 24, "label": "G"}, {"position": 25, "id": 25, "label": "C"}, {"position": 26, "id": 26, "label": "G"}, {"position": 27, "id": 27, "label": "C"}, {"position": 28, "id": 28, "label": "A"}, {"position": 29, "id": 29, "label": "G"}, {"position": 30, "id": 30, "label": "A"}, {"position": 31, "id": 31, "label": "C"}, {"position": 32, "id": 32, "label": "A"}, {"position": 33, "id": 33, "label": "A"}, {"position": 34, "id": 34, "label": "C"}, {"position": 35, "id": 35, "label": "C"}, {"position": 36, "id": 36, "label": "C"}, {"position": 37, "id": 37, "label": "A"}, {"position": 38, "id": 38, "label": "G"}, {"position": 39, "id": 39, "label": "G"}, {"position": 40, "id": 40, "label": "C"}, {"position": 41, "id": 41, "label": "C"}, {"position": 42, "id": 42, "label": "A"}, {"position": 43, "id": 43, "label": "A"}, {"position": 44, "id": 44, "label": "G"}, {"position": 45, "id": 45, "label": "G"}, {"position": 46, "id": 46, "label": "A"}, {"position": 47, "id": 47, "label": "A"}, {"position": 48, "id": 48, "label": "C"}, {"position": 49, "id": 49, "label": "G"}, {"position": 50, "id": 50, "label": "G"}, {"position": 51, "id": 51, "label": "G"}, {"position": 52, "id": 52, "label": "G"}, {"position": 53, "id": 53, "label": "A"}, {"position": 54, "id": 54, "label": "C"}, {"position": 55, "id": 55, "label": "C"}, {"position": 56, "id": 56, "label": "U"}, {"position": 57, "id": 57, "label": "G"}, {"position": 58, "id": 58, "label": "G"}, {"position": 59, "id": 59, "label": "A"}], "links": [{"source": 0, "type": "backbone", "target": 1, "len": 1, "label": "-"}, {"source": 1, "type": "backbone", "target": 2, "len": 1, "label": "-"}, {"source": 2, "type": "backbone", "target": 3, "len": 1, "label": "-"}, {"source": 3, "type": "backbone", "target": 4, "len": 1, "label": "-"}, {"source": 4, "type": "backbone", "target": 5, "len": 1, "label": "-"}, {"source": 4, "type": "basepair", "target": 31, "len": 1, "label": "="}, {"source": 5, "type": "basepair", "target": 30, "len": 1, "label": "="}, {"source": 5, "type": "backbone", "target": 6, "len": 1, "label": "-"}, {"source": 6, "type": "basepair", "target": 29, "len": 1, "label": "="}, {"source": 6, "type": "backbone", "target": 7, "len": 1, "label": "-"}, {"source": 7, "type": "backbone", "target": 8, "len": 1, "label": "-"}, {"source": 7, "type": "basepair", "target": 26, "len": 1, "label": "="}, {"source": 8, "type": "backbone", "target": 9, "len": 1, "label": "-"}, {"source": 9, "type": "basepair", "target": 24, "len": 1, "label": "="}, {"source": 9, "type": "backbone", "target": 10, "len": 1, "label": "-"}, {"source": 10, "type": "backbone", "target": 11, "len": 1, "label": "-"}, {"source": 10, "type": "basepair", "target": 23, "len": 1, "la'..b'e": "backbone", "target": 16, "len": 1, "label": "-"}, {"source": 16, "type": "backbone", "target": 17, "len": 1, "label": "-"}, {"source": 17, "type": "backbone", "target": 18, "len": 1, "label": "-"}, {"source": 18, "type": "backbone", "target": 19, "len": 1, "label": "-"}, {"source": 19, "type": "backbone", "target": 20, "len": 1, "label": "-"}, {"source": 20, "type": "backbone", "target": 21, "len": 1, "label": "-"}, {"source": 21, "type": "backbone", "target": 22, "len": 1, "label": "-"}, {"source": 22, "type": "backbone", "target": 23, "len": 1, "label": "-"}, {"source": 23, "type": "backbone", "target": 24, "len": 1, "label": "-"}, {"source": 24, "type": "backbone", "target": 25, "len": 1, "label": "-"}, {"source": 25, "type": "backbone", "target": 26, "len": 1, "label": "-"}, {"source": 26, "type": "backbone", "target": 27, "len": 1, "label": "-"}, {"source": 27, "type": "backbone", "target": 28, "len": 1, "label": "-"}, {"source": 28, "type": "backbone", "target": 29, "len": 1, "label": "-"}, {"source": 29, "type": "backbone", "target": 30, "len": 1, "label": "-"}, {"source": 30, "type": "backbone", "target": 31, "len": 1, "label": "-"}, {"source": 31, "type": "backbone", "target": 32, "len": 1, "label": "-"}, {"source": 32, "type": "backbone", "target": 33, "len": 1, "label": "-"}, {"source": 33, "type": "backbone", "target": 34, "len": 1, "label": "-"}, {"source": 34, "type": "backbone", "target": 35, "len": 1, "label": "-"}, {"source": 35, "type": "backbone", "target": 36, "len": 1, "label": "-"}, {"source": 36, "type": "backbone", "target": 37, "len": 1, "label": "-"}, {"source": 36, "type": "basepair", "target": 61, "len": 1, "label": "="}, {"source": 37, "type": "basepair", "target": 60, "len": 1, "label": "="}, {"source": 37, "type": "backbone", "target": 38, "len": 1, "label": "-"}, {"source": 38, "type": "basepair", "target": 59, "len": 1, "label": "="}, {"source": 38, "type": "backbone", "target": 39, "len": 1, "label": "-"}, {"source": 39, "type": "backbone", "target": 40, "len": 1, "label": "-"}, {"source": 39, "type": "basepair", "target": 58, "len": 1, "label": "="}, {"source": 40, "type": "backbone", "target": 41, "len": 1, "label": "-"}, {"source": 40, "type": "basepair", "target": 57, "len": 1, "label": "="}, {"source": 41, "type": "basepair", "target": 56, "len": 1, "label": "="}, {"source": 41, "type": "backbone", "target": 42, "len": 1, "label": "-"}, {"source": 42, "type": "backbone", "target": 43, "len": 1, "label": "-"}, {"source": 42, "type": "basepair", "target": 54, "len": 1, "label": "="}, {"source": 43, "type": "backbone", "target": 44, "len": 1, "label": "-"}, {"source": 43, "type": "basepair", "target": 53, "len": 1, "label": "="}, {"source": 44, "type": "backbone", "target": 45, "len": 1, "label": "-"}, {"source": 45, "type": "backbone", "target": 46, "len": 1, "label": "-"}, {"source": 46, "type": "backbone", "target": 47, "len": 1, "label": "-"}, {"source": 47, "type": "backbone", "target": 48, "len": 1, "label": "-"}, {"source": 48, "type": "backbone", "target": 49, "len": 1, "label": "-"}, {"source": 49, "type": "backbone", "target": 50, "len": 1, "label": "-"}, {"source": 50, "type": "backbone", "target": 51, "len": 1, "label": "-"}, {"source": 51, "type": "backbone", "target": 52, "len": 1, "label": "-"}, {"source": 52, "type": "backbone", "target": 53, "len": 1, "label": "-"}, {"source": 53, "type": "backbone", "target": 54, "len": 1, "label": "-"}, {"source": 54, "type": "backbone", "target": 55, "len": 1, "label": "-"}, {"source": 55, "type": "backbone", "target": 56, "len": 1, "label": "-"}, {"source": 56, "type": "backbone", "target": 57, "len": 1, "label": "-"}, {"source": 57, "type": "backbone", "target": 58, "len": 1, "label": "-"}, {"source": 58, "type": "backbone", "target": 59, "len": 1, "label": "-"}, {"source": 59, "type": "backbone", "target": 60, "len": 1, "label": "-"}, {"source": 60, "type": "backbone", "target": 61, "len": 1, "label": "-"}], "multigraph": false}\n'
b
diff -r 000000000000 -r af2624d5ab32 test-data/converter_result02.json
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/converter_result02.json Sat May 01 01:24:32 2021 +0000
[
b'@@ -0,0 +1,4 @@\n+{"directed": false, "graph": {"info": "RNAshapes shape_type=2 energy_range=4 max_num=3", "id": "CP000097.1/1411351-1411410_[_[]_][[]_]", "structure": "....(((..((((((....))))))....)))...(((((((.........)).))))).", "sequence": "CAACGUUCACCUCACAUUUGUGAGGCGCAGACAACCCAGGCCAAGGAACGGGGACCUGGA"}, "nodes": [{"position": 0, "id": 0, "label": "C"}, {"position": 1, "id": 1, "label": "A"}, {"position": 2, "id": 2, "label": "A"}, {"position": 3, "id": 3, "label": "C"}, {"position": 4, "id": 4, "label": "G"}, {"position": 5, "id": 5, "label": "U"}, {"position": 6, "id": 6, "label": "U"}, {"position": 7, "id": 7, "label": "C"}, {"position": 8, "id": 8, "label": "A"}, {"position": 9, "id": 9, "label": "C"}, {"position": 10, "id": 10, "label": "C"}, {"position": 11, "id": 11, "label": "U"}, {"position": 12, "id": 12, "label": "C"}, {"position": 13, "id": 13, "label": "A"}, {"position": 14, "id": 14, "label": "C"}, {"position": 15, "id": 15, "label": "A"}, {"position": 16, "id": 16, "label": "U"}, {"position": 17, "id": 17, "label": "U"}, {"position": 18, "id": 18, "label": "U"}, {"position": 19, "id": 19, "label": "G"}, {"position": 20, "id": 20, "label": "U"}, {"position": 21, "id": 21, "label": "G"}, {"position": 22, "id": 22, "label": "A"}, {"position": 23, "id": 23, "label": "G"}, {"position": 24, "id": 24, "label": "G"}, {"position": 25, "id": 25, "label": "C"}, {"position": 26, "id": 26, "label": "G"}, {"position": 27, "id": 27, "label": "C"}, {"position": 28, "id": 28, "label": "A"}, {"position": 29, "id": 29, "label": "G"}, {"position": 30, "id": 30, "label": "A"}, {"position": 31, "id": 31, "label": "C"}, {"position": 32, "id": 32, "label": "A"}, {"position": 33, "id": 33, "label": "A"}, {"position": 34, "id": 34, "label": "C"}, {"position": 35, "id": 35, "label": "C"}, {"position": 36, "id": 36, "label": "C"}, {"position": 37, "id": 37, "label": "A"}, {"position": 38, "id": 38, "label": "G"}, {"position": 39, "id": 39, "label": "G"}, {"position": 40, "id": 40, "label": "C"}, {"position": 41, "id": 41, "label": "C"}, {"position": 42, "id": 42, "label": "A"}, {"position": 43, "id": 43, "label": "A"}, {"position": 44, "id": 44, "label": "G"}, {"position": 45, "id": 45, "label": "G"}, {"position": 46, "id": 46, "label": "A"}, {"position": 47, "id": 47, "label": "A"}, {"position": 48, "id": 48, "label": "C"}, {"position": 49, "id": 49, "label": "G"}, {"position": 50, "id": 50, "label": "G"}, {"position": 51, "id": 51, "label": "G"}, {"position": 52, "id": 52, "label": "G"}, {"position": 53, "id": 53, "label": "A"}, {"position": 54, "id": 54, "label": "C"}, {"position": 55, "id": 55, "label": "C"}, {"position": 56, "id": 56, "label": "U"}, {"position": 57, "id": 57, "label": "G"}, {"position": 58, "id": 58, "label": "G"}, {"position": 59, "id": 59, "label": "A"}], "links": [{"source": 0, "type": "backbone", "target": 1, "len": 1, "label": "-"}, {"source": 1, "type": "backbone", "target": 2, "len": 1, "label": "-"}, {"source": 2, "type": "backbone", "target": 3, "len": 1, "label": "-"}, {"source": 3, "type": "backbone", "target": 4, "len": 1, "label": "-"}, {"source": 4, "type": "backbone", "target": 5, "len": 1, "label": "-"}, {"source": 4, "type": "basepair", "target": 31, "len": 1, "label": "="}, {"source": 5, "type": "basepair", "target": 30, "len": 1, "label": "="}, {"source": 5, "type": "backbone", "target": 6, "len": 1, "label": "-"}, {"source": 6, "type": "basepair", "target": 29, "len": 1, "label": "="}, {"source": 6, "type": "backbone", "target": 7, "len": 1, "label": "-"}, {"source": 7, "type": "backbone", "target": 8, "len": 1, "label": "-"}, {"source": 8, "type": "backbone", "target": 9, "len": 1, "label": "-"}, {"source": 9, "type": "basepair", "target": 24, "len": 1, "label": "="}, {"source": 9, "type": "backbone", "target": 10, "len": 1, "label": "-"}, {"source": 10, "type": "backbone", "target": 11, "len": 1, "label": "-"}, {"source": 10, "type": "basepair", "target": 23, "len": 1, "label": "="}, {"source"'..b'e": "backbone", "target": 16, "len": 1, "label": "-"}, {"source": 16, "type": "backbone", "target": 17, "len": 1, "label": "-"}, {"source": 17, "type": "backbone", "target": 18, "len": 1, "label": "-"}, {"source": 18, "type": "backbone", "target": 19, "len": 1, "label": "-"}, {"source": 19, "type": "backbone", "target": 20, "len": 1, "label": "-"}, {"source": 20, "type": "backbone", "target": 21, "len": 1, "label": "-"}, {"source": 21, "type": "backbone", "target": 22, "len": 1, "label": "-"}, {"source": 22, "type": "backbone", "target": 23, "len": 1, "label": "-"}, {"source": 23, "type": "backbone", "target": 24, "len": 1, "label": "-"}, {"source": 24, "type": "backbone", "target": 25, "len": 1, "label": "-"}, {"source": 25, "type": "backbone", "target": 26, "len": 1, "label": "-"}, {"source": 26, "type": "backbone", "target": 27, "len": 1, "label": "-"}, {"source": 27, "type": "backbone", "target": 28, "len": 1, "label": "-"}, {"source": 28, "type": "backbone", "target": 29, "len": 1, "label": "-"}, {"source": 29, "type": "backbone", "target": 30, "len": 1, "label": "-"}, {"source": 30, "type": "backbone", "target": 31, "len": 1, "label": "-"}, {"source": 31, "type": "backbone", "target": 32, "len": 1, "label": "-"}, {"source": 32, "type": "backbone", "target": 33, "len": 1, "label": "-"}, {"source": 33, "type": "backbone", "target": 34, "len": 1, "label": "-"}, {"source": 34, "type": "backbone", "target": 35, "len": 1, "label": "-"}, {"source": 35, "type": "backbone", "target": 36, "len": 1, "label": "-"}, {"source": 36, "type": "backbone", "target": 37, "len": 1, "label": "-"}, {"source": 36, "type": "basepair", "target": 61, "len": 1, "label": "="}, {"source": 37, "type": "basepair", "target": 60, "len": 1, "label": "="}, {"source": 37, "type": "backbone", "target": 38, "len": 1, "label": "-"}, {"source": 38, "type": "basepair", "target": 59, "len": 1, "label": "="}, {"source": 38, "type": "backbone", "target": 39, "len": 1, "label": "-"}, {"source": 39, "type": "backbone", "target": 40, "len": 1, "label": "-"}, {"source": 39, "type": "basepair", "target": 58, "len": 1, "label": "="}, {"source": 40, "type": "backbone", "target": 41, "len": 1, "label": "-"}, {"source": 40, "type": "basepair", "target": 57, "len": 1, "label": "="}, {"source": 41, "type": "basepair", "target": 56, "len": 1, "label": "="}, {"source": 41, "type": "backbone", "target": 42, "len": 1, "label": "-"}, {"source": 42, "type": "backbone", "target": 43, "len": 1, "label": "-"}, {"source": 42, "type": "basepair", "target": 54, "len": 1, "label": "="}, {"source": 43, "type": "backbone", "target": 44, "len": 1, "label": "-"}, {"source": 43, "type": "basepair", "target": 53, "len": 1, "label": "="}, {"source": 44, "type": "backbone", "target": 45, "len": 1, "label": "-"}, {"source": 45, "type": "backbone", "target": 46, "len": 1, "label": "-"}, {"source": 46, "type": "backbone", "target": 47, "len": 1, "label": "-"}, {"source": 47, "type": "backbone", "target": 48, "len": 1, "label": "-"}, {"source": 48, "type": "backbone", "target": 49, "len": 1, "label": "-"}, {"source": 49, "type": "backbone", "target": 50, "len": 1, "label": "-"}, {"source": 50, "type": "backbone", "target": 51, "len": 1, "label": "-"}, {"source": 51, "type": "backbone", "target": 52, "len": 1, "label": "-"}, {"source": 52, "type": "backbone", "target": 53, "len": 1, "label": "-"}, {"source": 53, "type": "backbone", "target": 54, "len": 1, "label": "-"}, {"source": 54, "type": "backbone", "target": 55, "len": 1, "label": "-"}, {"source": 55, "type": "backbone", "target": 56, "len": 1, "label": "-"}, {"source": 56, "type": "backbone", "target": 57, "len": 1, "label": "-"}, {"source": 57, "type": "backbone", "target": 58, "len": 1, "label": "-"}, {"source": 58, "type": "backbone", "target": 59, "len": 1, "label": "-"}, {"source": 59, "type": "backbone", "target": 60, "len": 1, "label": "-"}, {"source": 60, "type": "backbone", "target": 61, "len": 1, "label": "-"}], "multigraph": false}\n'
b
diff -r 000000000000 -r af2624d5ab32 test-data/csc_sparse1.mtx
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/csc_sparse1.mtx Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,9 @@
+%%MatrixMarket matrix coordinate integer general
+%
+3 3 6
+1 1 1
+3 1 2
+3 2 3
+1 3 4
+2 3 5
+3 3 6
b
diff -r 000000000000 -r af2624d5ab32 test-data/csc_sparse2.mtx
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/csc_sparse2.mtx Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,9 @@
+%%MatrixMarket matrix coordinate real general
+%
+3 3 6
+1 1 1.5
+3 1 -2
+3 2 0.3
+1 3 41
+2 3 0.1235
+3 3 6
b
diff -r 000000000000 -r af2624d5ab32 test-data/csc_stack_result01.mtx
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/csc_stack_result01.mtx Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,15 @@
+%%MatrixMarket matrix coordinate real general
+%
+3 6 12
+1 1 1.000000000000000e+00
+3 1 2.000000000000000e+00
+3 2 3.000000000000000e+00
+1 3 4.000000000000000e+00
+2 3 5.000000000000000e+00
+3 3 6.000000000000000e+00
+1 4 1.500000000000000e+00
+3 4 -2.000000000000000e+00
+3 5 3.000000000000000e-01
+1 6 4.100000000000000e+01
+2 6 1.235000000000000e-01
+3 6 6.000000000000000e+00
b
diff -r 000000000000 -r af2624d5ab32 test-data/csr_sparse1.mtx
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/csr_sparse1.mtx Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,9 @@
+%%MatrixMarket matrix coordinate integer general
+%
+3 3 6
+1 1 1
+1 3 2
+2 3 3
+3 1 4
+3 2 5
+3 3 6
b
diff -r 000000000000 -r af2624d5ab32 test-data/csr_sparse2.mtx
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/csr_sparse2.mtx Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,9 @@
+%%MatrixMarket matrix coordinate real general
+%
+3 3 6
+1 1 1
+1 3 -0.2
+2 3 11
+3 1 0.04
+3 2 -5
+3 3 2.6
b
diff -r 000000000000 -r af2624d5ab32 test-data/csr_stack_result01.mtx
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/csr_stack_result01.mtx Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,15 @@
+%%MatrixMarket matrix coordinate real general
+%
+6 3 12
+1 1 1.000000000000000e+00
+1 3 2.000000000000000e+00
+2 3 3.000000000000000e+00
+3 1 4.000000000000000e+00
+3 2 5.000000000000000e+00
+3 3 6.000000000000000e+00
+4 1 1.000000000000000e+00
+4 3 -2.000000000000000e-01
+5 3 1.100000000000000e+01
+6 1 4.000000000000000e-02
+6 2 -5.000000000000000e+00
+6 3 2.600000000000000e+00
b
diff -r 000000000000 -r af2624d5ab32 test-data/deepsear_1feature.json
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/deepsear_1feature.json Sat May 01 01:24:32 2021 +0000
[
@@ -0,0 +1,1 @@
+{"class_name": "Sequential", "config": {"name": "sequential_1", "layers": [{"class_name": "Conv1D", "config": {"name": "conv1d_1", "trainable": true, "batch_input_shape": [null, 1000, 4], "dtype": "float32", "filters": 320, "kernel_size": [8], "strides": [1], "padding": "valid", "data_format": "channels_last", "dilation_rate": [1], "activation": "relu", "use_bias": true, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"scale": 1.0, "mode": "fan_avg", "distribution": "uniform", "seed": null}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}}, {"class_name": "MaxPooling1D", "config": {"name": "max_pooling1d_1", "trainable": true, "strides": [4], "pool_size": [4], "padding": "valid", "data_format": "channels_last"}}, {"class_name": "Dropout", "config": {"name": "dropout_1", "trainable": true, "rate": 0.2, "noise_shape": null, "seed": 999}}, {"class_name": "Conv1D", "config": {"name": "conv1d_2", "trainable": true, "filters": 480, "kernel_size": [8], "strides": [1], "padding": "valid", "data_format": "channels_last", "dilation_rate": [1], "activation": "relu", "use_bias": true, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"scale": 1.0, "mode": "fan_avg", "distribution": "uniform", "seed": null}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}}, {"class_name": "MaxPooling1D", "config": {"name": "max_pooling1d_2", "trainable": true, "strides": [4], "pool_size": [4], "padding": "valid", "data_format": "channels_last"}}, {"class_name": "Dropout", "config": {"name": "dropout_2", "trainable": true, "rate": 0.2, "noise_shape": null, "seed": 999}}, {"class_name": "Conv1D", "config": {"name": "conv1d_3", "trainable": true, "filters": 960, "kernel_size": [8], "strides": [1], "padding": "valid", "data_format": "channels_last", "dilation_rate": [1], "activation": "relu", "use_bias": true, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"scale": 1.0, "mode": "fan_avg", "distribution": "uniform", "seed": null}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}}, {"class_name": "Dropout", "config": {"name": "dropout_3", "trainable": true, "rate": 0.5, "noise_shape": null, "seed": 999}}, {"class_name": "Reshape", "config": {"name": "reshape_1", "trainable": true, "target_shape": [50880]}}, {"class_name": "Dense", "config": {"name": "dense_1", "trainable": true, "units": 1, "activation": "relu", "use_bias": true, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"scale": 1.0, "mode": "fan_avg", "distribution": "uniform", "seed": null}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}}, {"class_name": "Dense", "config": {"name": "dense_2", "trainable": true, "units": 1, "activation": "sigmoid", "use_bias": true, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"scale": 1.0, "mode": "fan_avg", "distribution": "uniform", "seed": null}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}}]}, "keras_version": "2.2.4", "backend": "tensorflow"}
b
diff -r 000000000000 -r af2624d5ab32 test-data/empty_file.txt
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/empty_file.txt Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,48 @@
+0 44 64 -76 4
+0 51 48 -73 0
+0 58 65 -49 0
+0 43 61 -49 1
+0 45 43 -79 0
+0 42 60 -98 0
+0 50 55 -59 2
+0 53 53 -56 2
+0 45 44 -61 0
+0 43 65 -84 4
+0 35 52 -75 1
+0 56 56 -70 0
+1 -61 86 43 0
+1 -67 93 15 0
+1 -59 94 36 0
+1 -50 92 62 0
+1 -78 91 70 1
+1 -35 87 47 0
+1 -56 91 52 0
+1 -61 81 46 0
+1 -83 78 34 0
+1 -50 87 45 0
+1 -67 73 50 1
+1 -50 97 45 0
+1 -61 111 45 0
+2 -109 23 -92 0
+2 -94 20 -96 0
+2 -85 26 -88 0
+2 -90 33 -114 1
+2 -63 9 -106 0
+2 -79 9 -93 1
+2 -99 26 -108 3
+2 -81 19 -110 0
+2 -108 21 -108 0
+2 -92 27 -106 3
+2 -88 2 -106 1
+2 -88 15 -103 0
+3 54 -74 4 0
+3 42 -92 31 1
+3 39 -99 -7 1
+3 48 -115 -5 1
+3 39 -96 2 1
+3 31 -109 9 1
+3 33 -96 -8 1
+3 23 -102 4 0
+3 38 -90 21 1
+3 34 -107 1 1
+3 35 -78 18 1
b
diff -r 000000000000 -r af2624d5ab32 test-data/f1_score.txt
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/f1_score.txt Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,2 @@
+f1_score : 
+0.8461538461538461
b
diff -r 000000000000 -r af2624d5ab32 test-data/fbeta_score.txt
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/fbeta_score.txt Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,2 @@
+fbeta_score : 
+0.8461538461538461
b
diff -r 000000000000 -r af2624d5ab32 test-data/feature_importances_.tabular
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/feature_importances_.tabular Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,11 @@
+feature_importances_
+0.15959252
+0.20373514
+0.22071308
+0.06281833
+0.098471984
+0.06960951
+0.13073005
+0.027164686
+0.022071308
+0.0050933785
b
diff -r 000000000000 -r af2624d5ab32 test-data/feature_selection_result01
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/feature_selection_result01 Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,11 @@
+0 1
+143.762620712 -1.1796457192799998
+-88.5787166225 -2.5710918402200003
+-82.8452345578 -0.168636324107
+72.4951388149 0.991068834926
+11.805182128 -0.7096855607860001
+-63.9354970901 0.9841122108220001
+126.32584079600001 0.35353444883900004
+23.0341392692 1.03188231893
+67.6714937696 -0.8214378651719999
+47.39275848810001 -0.0942409319417
b
diff -r 000000000000 -r af2624d5ab32 test-data/feature_selection_result02
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/feature_selection_result02 Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,262 @@
+temp_2 temp_1 forecast_noaa friend
+68.0 69.0 65.0 88.0
+60.0 59.0 57.0 66.0
+85.0 88.0 75.0 70.0
+82.0 65.0 63.0 58.0
+54.0 50.0 44.0 58.0
+48.0 51.0 45.0 63.0
+49.0 52.0 45.0 41.0
+73.0 78.0 75.0 66.0
+39.0 35.0 43.0 38.0
+42.0 40.0 45.0 36.0
+42.0 47.0 41.0 58.0
+76.0 72.0 76.0 88.0
+69.0 76.0 73.0 72.0
+40.0 39.0 45.0 46.0
+71.0 78.0 70.0 84.0
+64.0 71.0 63.0 85.0
+54.0 48.0 44.0 61.0
+73.0 72.0 77.0 68.0
+56.0 57.0 50.0 70.0
+45.0 40.0 44.0 39.0
+50.0 54.0 47.0 53.0
+65.0 58.0 52.0 71.0
+60.0 68.0 58.0 54.0
+60.0 65.0 55.0 65.0
+48.0 47.0 46.0 51.0
+44.0 44.0 43.0 42.0
+64.0 64.0 64.0 69.0
+59.0 62.0 57.0 67.0
+68.0 66.0 64.0 74.0
+77.0 70.0 67.0 90.0
+59.0 57.0 54.0 70.0
+55.0 50.0 46.0 57.0
+58.0 55.0 49.0 71.0
+57.0 55.0 46.0 67.0
+42.0 42.0 41.0 47.0
+64.0 65.0 57.0 41.0
+64.0 63.0 63.0 73.0
+49.0 48.0 45.0 28.0
+40.0 42.0 44.0 62.0
+44.0 51.0 45.0 38.0
+67.0 64.0 65.0 64.0
+79.0 75.0 74.0 63.0
+50.0 52.0 42.0 39.0
+85.0 67.0 66.0 80.0
+67.0 68.0 65.0 56.0
+53.0 54.0 53.0 42.0
+62.0 62.0 52.0 70.0
+77.0 76.0 76.0 61.0
+74.0 73.0 71.0 93.0
+50.0 52.0 50.0 35.0
+75.0 70.0 71.0 68.0
+73.0 77.0 75.0 62.0
+69.0 60.0 52.0 72.0
+55.0 52.0 50.0 54.0
+81.0 79.0 71.0 85.0
+77.0 76.0 53.0 74.0
+66.0 66.0 64.0 85.0
+68.0 57.0 58.0 62.0
+76.0 66.0 57.0 60.0
+60.0 61.0 58.0 41.0
+56.0 55.0 52.0 65.0
+57.0 48.0 46.0 54.0
+53.0 49.0 46.0 63.0
+66.0 65.0 64.0 73.0
+74.0 60.0 58.0 56.0
+55.0 56.0 53.0 36.0
+62.0 59.0 56.0 44.0
+36.0 44.0 41.0 35.0
+77.0 82.0 62.0 83.0
+64.0 64.0 65.0 76.0
+44.0 43.0 41.0 46.0
+56.0 64.0 51.0 57.0
+61.0 63.0 49.0 49.0
+65.0 70.0 67.0 79.0
+63.0 71.0 48.0 42.0
+76.0 76.0 69.0 85.0
+64.0 68.0 58.0 55.0
+39.0 39.0 44.0 39.0
+79.0 71.0 70.0 52.0
+68.0 69.0 68.0 89.0
+70.0 74.0 71.0 82.0
+75.0 81.0 62.0 81.0
+49.0 51.0 49.0 34.0
+52.0 45.0 44.0 61.0
+80.0 87.0 73.0 73.0
+76.0 71.0 71.0 86.0
+65.0 55.0 56.0 77.0
+76.0 80.0 72.0 81.0
+71.0 67.0 65.0 76.0
+64.0 61.0 60.0 78.0
+49.0 46.0 43.0 65.0
+35.0 39.0 42.0 51.0
+68.0 67.0 67.0 61.0
+48.0 52.0 43.0 50.0
+60.0 67.0 68.0 87.0
+74.0 75.0 67.0 77.0
+68.0 68.0 73.0 79.0
+81.0 92.0 65.0 71.0
+68.0 67.0 69.0 56.0
+45.0 44.0 43.0 56.0
+60.0 61.0 56.0 73.0
+65.0 65.0 49.0 41.0
+68.0 68.0 72.0 70.0
+77.0 87.0 62.0 69.0
+65.0 117.0 51.0 62.0
+72.0 80.0 75.0 66.0
+55.0 57.0 47.0 46.0
+63.0 67.0 61.0 68.0
+53.0 58.0 51.0 56.0
+61.0 65.0 53.0 41.0
+56.0 52.0 45.0 47.0
+57.0 59.0 52.0 39.0
+57.0 57.0 53.0 35.0
+89.0 81.0 56.0 66.0
+71.0 75.0 76.0 75.0
+88.0 76.0 76.0 95.0
+65.0 57.0 61.0 53.0
+68.0 69.0 72.0 86.0
+76.0 77.0 66.0 64.0
+58.0 55.0 47.0 55.0
+50.0 49.0 45.0 53.0
+53.0 54.0 48.0 57.0
+59.0 55.0 49.0 42.0
+51.0 56.0 53.0 45.0
+76.0 68.0 72.0 77.0
+52.0 54.0 49.0 44.0
+65.0 67.0 69.0 87.0
+45.0 49.0 45.0 33.0
+49.0 49.0 47.0 45.0
+57.0 56.0 48.0 49.0
+76.0 73.0 66.0 78.0
+65.0 66.0 65.0 60.0
+77.0 69.0 66.0 62.0
+77.0 82.0 64.0 65.0
+87.0 90.0 75.0 65.0
+51.0 51.0 49.0 43.0
+68.0 77.0 57.0 41.0
+57.0 60.0 58.0 58.0
+79.0 74.0 71.0 87.0
+80.0 85.0 73.0 74.0
+60.0 68.0 61.0 64.0
+62.0 56.0 46.0 37.0
+73.0 71.0 55.0 45.0
+60.0 62.0 57.0 40.0
+79.0 83.0 76.0 76.0
+71.0 64.0 62.0 56.0
+54.0 56.0 45.0 54.0
+40.0 41.0 42.0 31.0
+66.0 65.0 66.0 67.0
+57.0 65.0 49.0 38.0
+41.0 40.0 46.0 41.0
+45.0 45.0 43.0 29.0
+52.0 52.0 48.0 58.0
+64.0 63.0 50.0 63.0
+52.0 52.0 47.0 44.0
+58.0 60.0 55.0 77.0
+84.0 81.0 73.0 89.0
+77.0 75.0 74.0 77.0
+63.0 59.0 48.0 64.0
+72.0 73.0 77.0 94.0
+73.0 75.0 73.0 66.0
+59.0 60.0 56.0 59.0
+73.0 75.0 68.0 56.0
+66.0 59.0 56.0 40.0
+49.0 53.0 47.0 56.0
+80.0 79.0 76.0 60.0
+59.0 57.0 49.0 46.0
+79.0 75.0 64.0 77.0
+69.0 71.0 67.0 81.0
+57.0 53.0 50.0 42.0
+47.0 46.0 48.0 56.0
+82.0 81.0 72.0 70.0
+54.0 49.0 47.0 29.0
+56.0 57.0 44.0 34.0
+60.0 60.0 54.0 53.0
+70.0 67.0 72.0 64.0
+65.0 61.0 62.0 60.0
+70.0 66.0 66.0 85.0
+65.0 64.0 50.0 55.0
+63.0 66.0 62.0 49.0
+57.0 64.0 52.0 49.0
+60.0 71.0 61.0 56.0
+67.0 75.0 62.0 60.0
+45.0 48.0 46.0 47.0
+60.0 53.0 48.0 70.0
+55.0 49.0 46.0 65.0
+86.0 85.0 67.0 81.0
+57.0 62.0 48.0 30.0
+46.0 50.0 42.0 58.0
+65.0 58.0 51.0 39.0
+79.0 72.0 74.0 95.0
+57.0 55.0 50.0 34.0
+72.0 74.0 70.0 91.0
+83.0 85.0 77.0 77.0
+77.0 73.0 77.0 93.0
+52.0 52.0 44.0 39.0
+64.0 67.0 64.0 62.0
+49.0 45.0 45.0 35.0
+52.0 46.0 46.0 41.0
+62.0 66.0 60.0 57.0
+81.0 71.0 75.0 86.0
+65.0 70.0 66.0 79.0
+55.0 58.0 46.0 53.0
+72.0 72.0 76.0 65.0
+74.0 74.0 74.0 71.0
+63.0 65.0 63.0 49.0
+68.0 77.0 55.0 39.0
+60.0 59.0 49.0 35.0
+44.0 45.0 41.0 61.0
+51.0 53.0 49.0 46.0
+57.0 53.0 54.0 72.0
+85.0 79.0 73.0 79.0
+51.0 49.0 44.0 44.0
+66.0 63.0 62.0 78.0
+63.0 69.0 54.0 45.0
+51.0 60.0 47.0 46.0
+63.0 64.0 60.0 73.0
+75.0 79.0 66.0 64.0
+49.0 55.0 43.0 58.0
+68.0 73.0 54.0 41.0
+62.0 60.0 57.0 62.0
+71.0 67.0 67.0 77.0
+41.0 42.0 45.0 58.0
+57.0 60.0 62.0 55.0
+55.0 57.0 47.0 30.0
+35.0 35.0 44.0 36.0
+71.0 75.0 66.0 84.0
+59.0 61.0 48.0 65.0
+53.0 51.0 46.0 59.0
+69.0 71.0 67.0 70.0
+71.0 74.0 74.0 71.0
+48.0 48.0 44.0 42.0
+68.0 74.0 70.0 60.0
+70.0 76.0 68.0 57.0
+54.0 58.0 47.0 37.0
+53.0 51.0 48.0 43.0
+67.0 72.0 68.0 78.0
+67.0 76.0 64.0 74.0
+52.0 52.0 47.0 60.0
+52.0 53.0 48.0 53.0
+67.0 65.0 65.0 83.0
+61.0 58.0 58.0 43.0
+74.0 77.0 74.0 56.0
+58.0 61.0 51.0 35.0
+66.0 67.0 64.0 54.0
+55.0 54.0 46.0 58.0
+71.0 79.0 65.0 58.0
+81.0 77.0 63.0 67.0
+75.0 71.0 64.0 55.0
+59.0 58.0 54.0 61.0
+64.0 68.0 55.0 56.0
+43.0 40.0 45.0 49.0
+75.0 80.0 75.0 71.0
+87.0 74.0 59.0 61.0
+48.0 57.0 42.0 57.0
+48.0 52.0 43.0 57.0
+74.0 71.0 71.0 95.0
+54.0 49.0 49.0 70.0
+77.0 89.0 59.0 61.0
+66.0 60.0 56.0 78.0
+59.0 59.0 58.0 40.0
b
diff -r 000000000000 -r af2624d5ab32 test-data/feature_selection_result03
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/feature_selection_result03 Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,262 @@
+temp_1 friend
+69.0 88.0
+59.0 66.0
+88.0 70.0
+65.0 58.0
+50.0 58.0
+51.0 63.0
+52.0 41.0
+78.0 66.0
+35.0 38.0
+40.0 36.0
+47.0 58.0
+72.0 88.0
+76.0 72.0
+39.0 46.0
+78.0 84.0
+71.0 85.0
+48.0 61.0
+72.0 68.0
+57.0 70.0
+40.0 39.0
+54.0 53.0
+58.0 71.0
+68.0 54.0
+65.0 65.0
+47.0 51.0
+44.0 42.0
+64.0 69.0
+62.0 67.0
+66.0 74.0
+70.0 90.0
+57.0 70.0
+50.0 57.0
+55.0 71.0
+55.0 67.0
+42.0 47.0
+65.0 41.0
+63.0 73.0
+48.0 28.0
+42.0 62.0
+51.0 38.0
+64.0 64.0
+75.0 63.0
+52.0 39.0
+67.0 80.0
+68.0 56.0
+54.0 42.0
+62.0 70.0
+76.0 61.0
+73.0 93.0
+52.0 35.0
+70.0 68.0
+77.0 62.0
+60.0 72.0
+52.0 54.0
+79.0 85.0
+76.0 74.0
+66.0 85.0
+57.0 62.0
+66.0 60.0
+61.0 41.0
+55.0 65.0
+48.0 54.0
+49.0 63.0
+65.0 73.0
+60.0 56.0
+56.0 36.0
+59.0 44.0
+44.0 35.0
+82.0 83.0
+64.0 76.0
+43.0 46.0
+64.0 57.0
+63.0 49.0
+70.0 79.0
+71.0 42.0
+76.0 85.0
+68.0 55.0
+39.0 39.0
+71.0 52.0
+69.0 89.0
+74.0 82.0
+81.0 81.0
+51.0 34.0
+45.0 61.0
+87.0 73.0
+71.0 86.0
+55.0 77.0
+80.0 81.0
+67.0 76.0
+61.0 78.0
+46.0 65.0
+39.0 51.0
+67.0 61.0
+52.0 50.0
+67.0 87.0
+75.0 77.0
+68.0 79.0
+92.0 71.0
+67.0 56.0
+44.0 56.0
+61.0 73.0
+65.0 41.0
+68.0 70.0
+87.0 69.0
+117.0 62.0
+80.0 66.0
+57.0 46.0
+67.0 68.0
+58.0 56.0
+65.0 41.0
+52.0 47.0
+59.0 39.0
+57.0 35.0
+81.0 66.0
+75.0 75.0
+76.0 95.0
+57.0 53.0
+69.0 86.0
+77.0 64.0
+55.0 55.0
+49.0 53.0
+54.0 57.0
+55.0 42.0
+56.0 45.0
+68.0 77.0
+54.0 44.0
+67.0 87.0
+49.0 33.0
+49.0 45.0
+56.0 49.0
+73.0 78.0
+66.0 60.0
+69.0 62.0
+82.0 65.0
+90.0 65.0
+51.0 43.0
+77.0 41.0
+60.0 58.0
+74.0 87.0
+85.0 74.0
+68.0 64.0
+56.0 37.0
+71.0 45.0
+62.0 40.0
+83.0 76.0
+64.0 56.0
+56.0 54.0
+41.0 31.0
+65.0 67.0
+65.0 38.0
+40.0 41.0
+45.0 29.0
+52.0 58.0
+63.0 63.0
+52.0 44.0
+60.0 77.0
+81.0 89.0
+75.0 77.0
+59.0 64.0
+73.0 94.0
+75.0 66.0
+60.0 59.0
+75.0 56.0
+59.0 40.0
+53.0 56.0
+79.0 60.0
+57.0 46.0
+75.0 77.0
+71.0 81.0
+53.0 42.0
+46.0 56.0
+81.0 70.0
+49.0 29.0
+57.0 34.0
+60.0 53.0
+67.0 64.0
+61.0 60.0
+66.0 85.0
+64.0 55.0
+66.0 49.0
+64.0 49.0
+71.0 56.0
+75.0 60.0
+48.0 47.0
+53.0 70.0
+49.0 65.0
+85.0 81.0
+62.0 30.0
+50.0 58.0
+58.0 39.0
+72.0 95.0
+55.0 34.0
+74.0 91.0
+85.0 77.0
+73.0 93.0
+52.0 39.0
+67.0 62.0
+45.0 35.0
+46.0 41.0
+66.0 57.0
+71.0 86.0
+70.0 79.0
+58.0 53.0
+72.0 65.0
+74.0 71.0
+65.0 49.0
+77.0 39.0
+59.0 35.0
+45.0 61.0
+53.0 46.0
+53.0 72.0
+79.0 79.0
+49.0 44.0
+63.0 78.0
+69.0 45.0
+60.0 46.0
+64.0 73.0
+79.0 64.0
+55.0 58.0
+73.0 41.0
+60.0 62.0
+67.0 77.0
+42.0 58.0
+60.0 55.0
+57.0 30.0
+35.0 36.0
+75.0 84.0
+61.0 65.0
+51.0 59.0
+71.0 70.0
+74.0 71.0
+48.0 42.0
+74.0 60.0
+76.0 57.0
+58.0 37.0
+51.0 43.0
+72.0 78.0
+76.0 74.0
+52.0 60.0
+53.0 53.0
+65.0 83.0
+58.0 43.0
+77.0 56.0
+61.0 35.0
+67.0 54.0
+54.0 58.0
+79.0 58.0
+77.0 67.0
+71.0 55.0
+58.0 61.0
+68.0 56.0
+40.0 49.0
+80.0 71.0
+74.0 61.0
+57.0 57.0
+52.0 57.0
+71.0 95.0
+49.0 70.0
+89.0 61.0
+60.0 78.0
+59.0 40.0
b
diff -r 000000000000 -r af2624d5ab32 test-data/feature_selection_result04
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/feature_selection_result04 Sat May 01 01:24:32 2021 +0000
b
b'@@ -0,0 +1,262 @@\n+month\tday\ttemp_2\ttemp_1\taverage\tforecast_noaa\tforecast_acc\tforecast_under\tfriend\tweek_Mon\n+9.0\t19.0\t68.0\t69.0\t69.7\t65.0\t74.0\t71.0\t88.0\t1.0\n+4.0\t14.0\t60.0\t59.0\t58.1\t57.0\t63.0\t58.0\t66.0\t0.0\n+7.0\t30.0\t85.0\t88.0\t77.3\t75.0\t79.0\t77.0\t70.0\t0.0\n+5.0\t15.0\t82.0\t65.0\t64.7\t63.0\t69.0\t64.0\t58.0\t0.0\n+1.0\t18.0\t54.0\t50.0\t47.5\t44.0\t48.0\t49.0\t58.0\t1.0\n+1.0\t25.0\t48.0\t51.0\t48.2\t45.0\t51.0\t49.0\t63.0\t1.0\n+11.0\t25.0\t49.0\t52.0\t48.6\t45.0\t52.0\t47.0\t41.0\t0.0\n+7.0\t20.0\t73.0\t78.0\t76.7\t75.0\t78.0\t77.0\t66.0\t0.0\n+12.0\t17.0\t39.0\t35.0\t45.2\t43.0\t47.0\t46.0\t38.0\t0.0\n+12.0\t8.0\t42.0\t40.0\t46.1\t45.0\t51.0\t47.0\t36.0\t0.0\n+12.0\t28.0\t42.0\t47.0\t45.3\t41.0\t49.0\t44.0\t58.0\t0.0\n+7.0\t17.0\t76.0\t72.0\t76.3\t76.0\t78.0\t77.0\t88.0\t0.0\n+7.0\t7.0\t69.0\t76.0\t74.4\t73.0\t77.0\t74.0\t72.0\t0.0\n+12.0\t15.0\t40.0\t39.0\t45.3\t45.0\t49.0\t47.0\t46.0\t0.0\n+6.0\t27.0\t71.0\t78.0\t72.2\t70.0\t74.0\t72.0\t84.0\t1.0\n+5.0\t31.0\t64.0\t71.0\t67.3\t63.0\t72.0\t68.0\t85.0\t0.0\n+1.0\t20.0\t54.0\t48.0\t47.7\t44.0\t52.0\t49.0\t61.0\t0.0\n+8.0\t10.0\t73.0\t72.0\t77.0\t77.0\t78.0\t77.0\t68.0\t0.0\n+3.0\t23.0\t56.0\t57.0\t54.7\t50.0\t58.0\t55.0\t70.0\t0.0\n+12.0\t24.0\t45.0\t40.0\t45.1\t44.0\t47.0\t46.0\t39.0\t0.0\n+1.0\t19.0\t50.0\t54.0\t47.6\t47.0\t49.0\t48.0\t53.0\t0.0\n+11.0\t6.0\t65.0\t58.0\t53.2\t52.0\t57.0\t55.0\t71.0\t0.0\n+4.0\t17.0\t60.0\t68.0\t58.6\t58.0\t62.0\t59.0\t54.0\t0.0\n+10.0\t29.0\t60.0\t65.0\t55.3\t55.0\t59.0\t55.0\t65.0\t0.0\n+2.0\t1.0\t48.0\t47.0\t48.8\t46.0\t49.0\t49.0\t51.0\t1.0\n+12.0\t12.0\t44.0\t44.0\t45.6\t43.0\t50.0\t45.0\t42.0\t1.0\n+5.0\t30.0\t64.0\t64.0\t67.1\t64.0\t70.0\t66.0\t69.0\t1.0\n+10.0\t23.0\t59.0\t62.0\t57.1\t57.0\t58.0\t59.0\t67.0\t0.0\n+9.0\t30.0\t68.0\t66.0\t65.7\t64.0\t67.0\t65.0\t74.0\t0.0\n+9.0\t12.0\t77.0\t70.0\t71.8\t67.0\t73.0\t73.0\t90.0\t1.0\n+11.0\t2.0\t59.0\t57.0\t54.2\t54.0\t58.0\t55.0\t70.0\t0.0\n+11.0\t17.0\t55.0\t50.0\t50.5\t46.0\t51.0\t50.0\t57.0\t0.0\n+3.0\t3.0\t58.0\t55.0\t51.8\t49.0\t54.0\t50.0\t71.0\t0.0\n+11.0\t21.0\t57.0\t55.0\t49.5\t46.0\t51.0\t49.0\t67.0\t1.0\n+12.0\t27.0\t42.0\t42.0\t45.2\t41.0\t50.0\t47.0\t47.0\t0.0\n+4.0\t24.0\t64.0\t65.0\t60.1\t57.0\t61.0\t60.0\t41.0\t0.0\n+5.0\t20.0\t64.0\t63.0\t65.6\t63.0\t70.0\t64.0\t73.0\t0.0\n+1.0\t16.0\t49.0\t48.0\t47.3\t45.0\t52.0\t46.0\t28.0\t0.0\n+12.0\t7.0\t40.0\t42.0\t46.3\t44.0\t51.0\t46.0\t62.0\t0.0\n+1.0\t7.0\t44.0\t51.0\t46.2\t45.0\t49.0\t46.0\t38.0\t0.0\n+9.0\t24.0\t67.0\t64.0\t68.0\t65.0\t71.0\t66.0\t64.0\t0.0\n+8.0\t30.0\t79.0\t75.0\t74.6\t74.0\t76.0\t75.0\t63.0\t0.0\n+1.0\t11.0\t50.0\t52.0\t46.7\t42.0\t48.0\t48.0\t39.0\t1.0\n+6.0\t9.0\t85.0\t67.0\t68.6\t66.0\t73.0\t69.0\t80.0\t0.0\n+9.0\t22.0\t67.0\t68.0\t68.7\t65.0\t70.0\t69.0\t56.0\t0.0\n+3.0\t25.0\t53.0\t54.0\t55.0\t53.0\t57.0\t57.0\t42.0\t0.0\n+10.0\t24.0\t62.0\t62.0\t56.8\t52.0\t61.0\t57.0\t70.0\t1.0\n+7.0\t16.0\t77.0\t76.0\t76.1\t76.0\t78.0\t75.0\t61.0\t0.0\n+7.0\t1.0\t74.0\t73.0\t73.1\t71.0\t75.0\t72.0\t93.0\t0.0\n+11.0\t18.0\t50.0\t52.0\t50.3\t50.0\t53.0\t50.0\t35.0\t0.0\n+9.0\t3.0\t75.0\t70.0\t73.9\t71.0\t75.0\t73.0\t68.0\t0.0\n+8.0\t2.0\t73.0\t77.0\t77.4\t75.0\t80.0\t79.0\t62.0\t0.0\n+4.0\t5.0\t69.0\t60.0\t56.6\t52.0\t58.0\t56.0\t72.0\t0.0\n+3.0\t13.0\t55.0\t52.0\t53.3\t50.0\t55.0\t53.0\t54.0\t0.0\n+8.0\t28.0\t81.0\t79.0\t75.0\t71.0\t77.0\t76.0\t85.0\t0.0\n+4.0\t9.0\t77.0\t76.0\t57.2\t53.0\t61.0\t57.0\t74.0\t0.0\n+5.0\t26.0\t66.0\t66.0\t66.5\t64.0\t70.0\t65.0\t85.0\t0.0\n+10.0\t10.0\t68.0\t57.0\t61.8\t58.0\t64.0\t61.0\t62.0\t1.0\n+4.0\t10.0\t76.0\t66.0\t57.4\t57.0\t60.0\t57.0\t60.0\t0.0\n+10.0\t19.0\t60.0\t61.0\t58.4\t58.0\t60.0\t57.0\t41.0\t0.0\n+3.0\t12.0\t56.0\t55.0\t53.1\t52.0\t58.0\t53.0\t65.0\t0.0\n+1.0\t24.0\t57.0\t48.0\t48.1\t46.0\t50.0\t48.0\t54.0\t0.0\n+2.0\t7.0\t53.0\t49.0\t49.2\t46.0\t51.0\t48.0\t63.0\t0.0\n+5.0\t27.0\t66.0\t65.0\t66.7\t64.0\t67.0\t68.0\t73.0\t0.0\n+5.0\t5.0\t74.0\t60.0\t62.5\t58.0\t66.0\t62.0\t56.0\t0.0\n+3.0\t11.0\t55.0\t56.0\t53.0\t53.0\t53.0\t51.0\t36.0\t0.0\n+10.0\t22.0\t62.0\t59.0\t57.4\t56.0\t59.0\t58.0\t44.0\t0.0\n+12.0\t11.0\t36.0\t44.0\t45.7\t41.0\t46.0\t47.0\t35.0\t0.0\n+5.0\t8.0\t77.0\t82.0\t63.2\t62.0\t65.0\t63.0\t83.0\t0.0\n+5.0\t29.0\t64.0\t64.0\t67.0\t65.0\t71.0\t65.0\t76.0\t0.0\n+12.0\t13.0\t44.0\t43.0\t45.5\t41.0\t47.0\t46.0\t46.0\t0.0\n+3.0\t30.0\t56.0\t64.0\t55.7\t51.0\t57.0\t56.0\t57.0\t0.0\n+11.0\t8.0\t61.0\t63.0\t52.7\t49.0\t57.0\t52.0\t49.0\t0.0\n+6.0\t20.0\t65.0\t70.0\t70.6\t67.0\t71.0\t70.0\t79.0\t1.0\n+11.0\t9.0\t63.0\t71.0\t52.4\t48.0\t56.0\t52.0\t42.0\t0.0\n+7.0\t3.0\t76.0\t76.0\t73.5\t69.0\t76.0\t75.0\t85.0\t0.0\n+10.0\t9.0\t64.0\t68.0\t62.1\t58.0\t65.0\t63.0\t55.0\t0.0\n+12.0\t16.0\t39.0\t39.0\t45.3\t44.0\t49.0\t44.0\t39.0\t0.0\n+9.0\t16.0\t79.0\t71.0\t70.7\t70.0\t74.0\t71.0\t52.0\t0.0\n+6.0\t25.0\t68.0'..b'6.0\t65.7\t62.0\t67.0\t65.0\t49.0\t0.0\n+3.0\t6.0\t57.0\t64.0\t52.2\t52.0\t53.0\t51.0\t49.0\t0.0\n+5.0\t18.0\t60.0\t71.0\t65.2\t61.0\t68.0\t65.0\t56.0\t0.0\n+5.0\t11.0\t67.0\t75.0\t63.8\t62.0\t68.0\t63.0\t60.0\t0.0\n+1.0\t9.0\t45.0\t48.0\t46.4\t46.0\t50.0\t45.0\t47.0\t0.0\n+3.0\t8.0\t60.0\t53.0\t52.5\t48.0\t56.0\t51.0\t70.0\t0.0\n+1.0\t15.0\t55.0\t49.0\t47.1\t46.0\t51.0\t46.0\t65.0\t0.0\n+6.0\t8.0\t86.0\t85.0\t68.5\t67.0\t70.0\t69.0\t81.0\t0.0\n+2.0\t10.0\t57.0\t62.0\t49.4\t48.0\t50.0\t49.0\t30.0\t0.0\n+12.0\t3.0\t46.0\t50.0\t47.0\t42.0\t52.0\t47.0\t58.0\t0.0\n+10.0\t27.0\t65.0\t58.0\t55.9\t51.0\t60.0\t55.0\t39.0\t0.0\n+8.0\t7.0\t79.0\t72.0\t77.2\t74.0\t78.0\t77.0\t95.0\t0.0\n+11.0\t16.0\t57.0\t55.0\t50.7\t50.0\t51.0\t49.0\t34.0\t0.0\n+9.0\t10.0\t72.0\t74.0\t72.3\t70.0\t77.0\t74.0\t91.0\t0.0\n+7.0\t29.0\t83.0\t85.0\t77.3\t77.0\t80.0\t79.0\t77.0\t0.0\n+8.0\t3.0\t77.0\t73.0\t77.3\t77.0\t81.0\t77.0\t93.0\t0.0\n+12.0\t1.0\t52.0\t52.0\t47.4\t44.0\t48.0\t49.0\t39.0\t0.0\n+9.0\t25.0\t64.0\t67.0\t67.6\t64.0\t72.0\t67.0\t62.0\t0.0\n+12.0\t23.0\t49.0\t45.0\t45.1\t45.0\t49.0\t44.0\t35.0\t0.0\n+12.0\t2.0\t52.0\t46.0\t47.2\t46.0\t51.0\t49.0\t41.0\t0.0\n+10.0\t13.0\t62.0\t66.0\t60.6\t60.0\t62.0\t60.0\t57.0\t0.0\n+7.0\t23.0\t81.0\t71.0\t77.0\t75.0\t81.0\t76.0\t86.0\t0.0\n+6.0\t13.0\t65.0\t70.0\t69.3\t66.0\t72.0\t69.0\t79.0\t1.0\n+2.0\t15.0\t55.0\t58.0\t49.9\t46.0\t52.0\t49.0\t53.0\t1.0\n+8.0\t8.0\t72.0\t72.0\t77.1\t76.0\t78.0\t77.0\t65.0\t1.0\n+7.0\t12.0\t74.0\t74.0\t75.4\t74.0\t77.0\t77.0\t71.0\t0.0\n+10.0\t3.0\t63.0\t65.0\t64.5\t63.0\t68.0\t65.0\t49.0\t1.0\n+4.0\t18.0\t68.0\t77.0\t58.8\t55.0\t59.0\t57.0\t39.0\t1.0\n+2.0\t25.0\t60.0\t59.0\t50.9\t49.0\t51.0\t49.0\t35.0\t0.0\n+1.0\t2.0\t44.0\t45.0\t45.7\t41.0\t50.0\t44.0\t61.0\t0.0\n+2.0\t21.0\t51.0\t53.0\t50.5\t49.0\t54.0\t52.0\t46.0\t0.0\n+3.0\t24.0\t57.0\t53.0\t54.9\t54.0\t56.0\t56.0\t72.0\t0.0\n+7.0\t27.0\t85.0\t79.0\t77.3\t73.0\t78.0\t79.0\t79.0\t0.0\n+2.0\t4.0\t51.0\t49.0\t49.0\t44.0\t54.0\t51.0\t44.0\t0.0\n+10.0\t7.0\t66.0\t63.0\t62.9\t62.0\t67.0\t64.0\t78.0\t0.0\n+4.0\t4.0\t63.0\t69.0\t56.5\t54.0\t59.0\t56.0\t45.0\t1.0\n+2.0\t24.0\t51.0\t60.0\t50.8\t47.0\t53.0\t50.0\t46.0\t0.0\n+10.0\t8.0\t63.0\t64.0\t62.5\t60.0\t65.0\t61.0\t73.0\t0.0\n+9.0\t15.0\t75.0\t79.0\t71.0\t66.0\t76.0\t69.0\t64.0\t0.0\n+1.0\t14.0\t49.0\t55.0\t47.0\t43.0\t47.0\t46.0\t58.0\t0.0\n+4.0\t1.0\t68.0\t73.0\t56.0\t54.0\t59.0\t55.0\t41.0\t0.0\n+10.0\t17.0\t62.0\t60.0\t59.1\t57.0\t63.0\t59.0\t62.0\t1.0\n+6.0\t18.0\t71.0\t67.0\t70.2\t67.0\t75.0\t69.0\t77.0\t0.0\n+12.0\t26.0\t41.0\t42.0\t45.2\t45.0\t48.0\t46.0\t58.0\t1.0\n+5.0\t17.0\t57.0\t60.0\t65.0\t62.0\t65.0\t65.0\t55.0\t0.0\n+11.0\t20.0\t55.0\t57.0\t49.8\t47.0\t54.0\t48.0\t30.0\t0.0\n+12.0\t18.0\t35.0\t35.0\t45.2\t44.0\t46.0\t46.0\t36.0\t0.0\n+9.0\t17.0\t71.0\t75.0\t70.3\t66.0\t73.0\t70.0\t84.0\t0.0\n+2.0\t26.0\t59.0\t61.0\t51.1\t48.0\t56.0\t53.0\t65.0\t0.0\n+2.0\t22.0\t53.0\t51.0\t50.6\t46.0\t51.0\t50.0\t59.0\t1.0\n+6.0\t26.0\t69.0\t71.0\t71.9\t67.0\t74.0\t72.0\t70.0\t0.0\n+7.0\t11.0\t71.0\t74.0\t75.3\t74.0\t79.0\t75.0\t71.0\t1.0\n+12.0\t30.0\t48.0\t48.0\t45.4\t44.0\t46.0\t44.0\t42.0\t0.0\n+7.0\t9.0\t68.0\t74.0\t74.9\t70.0\t79.0\t76.0\t60.0\t0.0\n+6.0\t21.0\t70.0\t76.0\t70.8\t68.0\t75.0\t71.0\t57.0\t0.0\n+3.0\t2.0\t54.0\t58.0\t51.6\t47.0\t54.0\t52.0\t37.0\t0.0\n+2.0\t20.0\t53.0\t51.0\t50.4\t48.0\t55.0\t51.0\t43.0\t0.0\n+9.0\t9.0\t67.0\t72.0\t72.6\t68.0\t77.0\t71.0\t78.0\t0.0\n+9.0\t26.0\t67.0\t76.0\t67.2\t64.0\t69.0\t69.0\t74.0\t1.0\n+1.0\t22.0\t52.0\t52.0\t47.9\t47.0\t48.0\t48.0\t60.0\t0.0\n+11.0\t27.0\t52.0\t53.0\t48.2\t48.0\t49.0\t49.0\t53.0\t0.0\n+6.0\t12.0\t67.0\t65.0\t69.1\t65.0\t73.0\t70.0\t83.0\t0.0\n+10.0\t20.0\t61.0\t58.0\t58.1\t58.0\t59.0\t58.0\t43.0\t0.0\n+7.0\t13.0\t74.0\t77.0\t75.6\t74.0\t78.0\t76.0\t56.0\t0.0\n+11.0\t7.0\t58.0\t61.0\t52.9\t51.0\t56.0\t51.0\t35.0\t1.0\n+10.0\t1.0\t66.0\t67.0\t65.3\t64.0\t70.0\t64.0\t54.0\t0.0\n+11.0\t22.0\t55.0\t54.0\t49.3\t46.0\t54.0\t49.0\t58.0\t0.0\n+6.0\t1.0\t71.0\t79.0\t67.4\t65.0\t69.0\t66.0\t58.0\t0.0\n+5.0\t13.0\t81.0\t77.0\t64.3\t63.0\t67.0\t66.0\t67.0\t0.0\n+6.0\t3.0\t75.0\t71.0\t67.7\t64.0\t71.0\t66.0\t55.0\t0.0\n+4.0\t12.0\t59.0\t58.0\t57.7\t54.0\t59.0\t57.0\t61.0\t0.0\n+3.0\t31.0\t64.0\t68.0\t55.9\t55.0\t59.0\t56.0\t56.0\t0.0\n+12.0\t14.0\t43.0\t40.0\t45.4\t45.0\t48.0\t45.0\t49.0\t0.0\n+8.0\t5.0\t75.0\t80.0\t77.3\t75.0\t81.0\t78.0\t71.0\t0.0\n+5.0\t4.0\t87.0\t74.0\t62.3\t59.0\t65.0\t64.0\t61.0\t0.0\n+12.0\t31.0\t48.0\t57.0\t45.5\t42.0\t48.0\t47.0\t57.0\t0.0\n+1.0\t21.0\t48.0\t52.0\t47.8\t43.0\t51.0\t46.0\t57.0\t0.0\n+7.0\t10.0\t74.0\t71.0\t75.1\t71.0\t77.0\t76.0\t95.0\t0.0\n+3.0\t15.0\t54.0\t49.0\t53.6\t49.0\t58.0\t52.0\t70.0\t0.0\n+4.0\t19.0\t77.0\t89.0\t59.0\t59.0\t63.0\t59.0\t61.0\t0.0\n+10.0\t14.0\t66.0\t60.0\t60.2\t56.0\t64.0\t60.0\t78.0\t0.0\n+4.0\t15.0\t59.0\t59.0\t58.3\t58.0\t61.0\t60.0\t40.0\t0.0\n'
b
diff -r 000000000000 -r af2624d5ab32 test-data/feature_selection_result05
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/feature_selection_result05 Sat May 01 01:24:32 2021 +0000
b
b'@@ -0,0 +1,262 @@\n+month\tday\ttemp_2\ttemp_1\taverage\tforecast_noaa\tforecast_acc\tforecast_under\tfriend\n+9.0\t19.0\t68.0\t69.0\t69.7\t65.0\t74.0\t71.0\t88.0\n+4.0\t14.0\t60.0\t59.0\t58.1\t57.0\t63.0\t58.0\t66.0\n+7.0\t30.0\t85.0\t88.0\t77.3\t75.0\t79.0\t77.0\t70.0\n+5.0\t15.0\t82.0\t65.0\t64.7\t63.0\t69.0\t64.0\t58.0\n+1.0\t18.0\t54.0\t50.0\t47.5\t44.0\t48.0\t49.0\t58.0\n+1.0\t25.0\t48.0\t51.0\t48.2\t45.0\t51.0\t49.0\t63.0\n+11.0\t25.0\t49.0\t52.0\t48.6\t45.0\t52.0\t47.0\t41.0\n+7.0\t20.0\t73.0\t78.0\t76.7\t75.0\t78.0\t77.0\t66.0\n+12.0\t17.0\t39.0\t35.0\t45.2\t43.0\t47.0\t46.0\t38.0\n+12.0\t8.0\t42.0\t40.0\t46.1\t45.0\t51.0\t47.0\t36.0\n+12.0\t28.0\t42.0\t47.0\t45.3\t41.0\t49.0\t44.0\t58.0\n+7.0\t17.0\t76.0\t72.0\t76.3\t76.0\t78.0\t77.0\t88.0\n+7.0\t7.0\t69.0\t76.0\t74.4\t73.0\t77.0\t74.0\t72.0\n+12.0\t15.0\t40.0\t39.0\t45.3\t45.0\t49.0\t47.0\t46.0\n+6.0\t27.0\t71.0\t78.0\t72.2\t70.0\t74.0\t72.0\t84.0\n+5.0\t31.0\t64.0\t71.0\t67.3\t63.0\t72.0\t68.0\t85.0\n+1.0\t20.0\t54.0\t48.0\t47.7\t44.0\t52.0\t49.0\t61.0\n+8.0\t10.0\t73.0\t72.0\t77.0\t77.0\t78.0\t77.0\t68.0\n+3.0\t23.0\t56.0\t57.0\t54.7\t50.0\t58.0\t55.0\t70.0\n+12.0\t24.0\t45.0\t40.0\t45.1\t44.0\t47.0\t46.0\t39.0\n+1.0\t19.0\t50.0\t54.0\t47.6\t47.0\t49.0\t48.0\t53.0\n+11.0\t6.0\t65.0\t58.0\t53.2\t52.0\t57.0\t55.0\t71.0\n+4.0\t17.0\t60.0\t68.0\t58.6\t58.0\t62.0\t59.0\t54.0\n+10.0\t29.0\t60.0\t65.0\t55.3\t55.0\t59.0\t55.0\t65.0\n+2.0\t1.0\t48.0\t47.0\t48.8\t46.0\t49.0\t49.0\t51.0\n+12.0\t12.0\t44.0\t44.0\t45.6\t43.0\t50.0\t45.0\t42.0\n+5.0\t30.0\t64.0\t64.0\t67.1\t64.0\t70.0\t66.0\t69.0\n+10.0\t23.0\t59.0\t62.0\t57.1\t57.0\t58.0\t59.0\t67.0\n+9.0\t30.0\t68.0\t66.0\t65.7\t64.0\t67.0\t65.0\t74.0\n+9.0\t12.0\t77.0\t70.0\t71.8\t67.0\t73.0\t73.0\t90.0\n+11.0\t2.0\t59.0\t57.0\t54.2\t54.0\t58.0\t55.0\t70.0\n+11.0\t17.0\t55.0\t50.0\t50.5\t46.0\t51.0\t50.0\t57.0\n+3.0\t3.0\t58.0\t55.0\t51.8\t49.0\t54.0\t50.0\t71.0\n+11.0\t21.0\t57.0\t55.0\t49.5\t46.0\t51.0\t49.0\t67.0\n+12.0\t27.0\t42.0\t42.0\t45.2\t41.0\t50.0\t47.0\t47.0\n+4.0\t24.0\t64.0\t65.0\t60.1\t57.0\t61.0\t60.0\t41.0\n+5.0\t20.0\t64.0\t63.0\t65.6\t63.0\t70.0\t64.0\t73.0\n+1.0\t16.0\t49.0\t48.0\t47.3\t45.0\t52.0\t46.0\t28.0\n+12.0\t7.0\t40.0\t42.0\t46.3\t44.0\t51.0\t46.0\t62.0\n+1.0\t7.0\t44.0\t51.0\t46.2\t45.0\t49.0\t46.0\t38.0\n+9.0\t24.0\t67.0\t64.0\t68.0\t65.0\t71.0\t66.0\t64.0\n+8.0\t30.0\t79.0\t75.0\t74.6\t74.0\t76.0\t75.0\t63.0\n+1.0\t11.0\t50.0\t52.0\t46.7\t42.0\t48.0\t48.0\t39.0\n+6.0\t9.0\t85.0\t67.0\t68.6\t66.0\t73.0\t69.0\t80.0\n+9.0\t22.0\t67.0\t68.0\t68.7\t65.0\t70.0\t69.0\t56.0\n+3.0\t25.0\t53.0\t54.0\t55.0\t53.0\t57.0\t57.0\t42.0\n+10.0\t24.0\t62.0\t62.0\t56.8\t52.0\t61.0\t57.0\t70.0\n+7.0\t16.0\t77.0\t76.0\t76.1\t76.0\t78.0\t75.0\t61.0\n+7.0\t1.0\t74.0\t73.0\t73.1\t71.0\t75.0\t72.0\t93.0\n+11.0\t18.0\t50.0\t52.0\t50.3\t50.0\t53.0\t50.0\t35.0\n+9.0\t3.0\t75.0\t70.0\t73.9\t71.0\t75.0\t73.0\t68.0\n+8.0\t2.0\t73.0\t77.0\t77.4\t75.0\t80.0\t79.0\t62.0\n+4.0\t5.0\t69.0\t60.0\t56.6\t52.0\t58.0\t56.0\t72.0\n+3.0\t13.0\t55.0\t52.0\t53.3\t50.0\t55.0\t53.0\t54.0\n+8.0\t28.0\t81.0\t79.0\t75.0\t71.0\t77.0\t76.0\t85.0\n+4.0\t9.0\t77.0\t76.0\t57.2\t53.0\t61.0\t57.0\t74.0\n+5.0\t26.0\t66.0\t66.0\t66.5\t64.0\t70.0\t65.0\t85.0\n+10.0\t10.0\t68.0\t57.0\t61.8\t58.0\t64.0\t61.0\t62.0\n+4.0\t10.0\t76.0\t66.0\t57.4\t57.0\t60.0\t57.0\t60.0\n+10.0\t19.0\t60.0\t61.0\t58.4\t58.0\t60.0\t57.0\t41.0\n+3.0\t12.0\t56.0\t55.0\t53.1\t52.0\t58.0\t53.0\t65.0\n+1.0\t24.0\t57.0\t48.0\t48.1\t46.0\t50.0\t48.0\t54.0\n+2.0\t7.0\t53.0\t49.0\t49.2\t46.0\t51.0\t48.0\t63.0\n+5.0\t27.0\t66.0\t65.0\t66.7\t64.0\t67.0\t68.0\t73.0\n+5.0\t5.0\t74.0\t60.0\t62.5\t58.0\t66.0\t62.0\t56.0\n+3.0\t11.0\t55.0\t56.0\t53.0\t53.0\t53.0\t51.0\t36.0\n+10.0\t22.0\t62.0\t59.0\t57.4\t56.0\t59.0\t58.0\t44.0\n+12.0\t11.0\t36.0\t44.0\t45.7\t41.0\t46.0\t47.0\t35.0\n+5.0\t8.0\t77.0\t82.0\t63.2\t62.0\t65.0\t63.0\t83.0\n+5.0\t29.0\t64.0\t64.0\t67.0\t65.0\t71.0\t65.0\t76.0\n+12.0\t13.0\t44.0\t43.0\t45.5\t41.0\t47.0\t46.0\t46.0\n+3.0\t30.0\t56.0\t64.0\t55.7\t51.0\t57.0\t56.0\t57.0\n+11.0\t8.0\t61.0\t63.0\t52.7\t49.0\t57.0\t52.0\t49.0\n+6.0\t20.0\t65.0\t70.0\t70.6\t67.0\t71.0\t70.0\t79.0\n+11.0\t9.0\t63.0\t71.0\t52.4\t48.0\t56.0\t52.0\t42.0\n+7.0\t3.0\t76.0\t76.0\t73.5\t69.0\t76.0\t75.0\t85.0\n+10.0\t9.0\t64.0\t68.0\t62.1\t58.0\t65.0\t63.0\t55.0\n+12.0\t16.0\t39.0\t39.0\t45.3\t44.0\t49.0\t44.0\t39.0\n+9.0\t16.0\t79.0\t71.0\t70.7\t70.0\t74.0\t71.0\t52.0\n+6.0\t25.0\t68.0\t69.0\t71.7\t68.0\t73.0\t73.0\t89.0\n+9.0\t13.0\t70.0\t74.0\t71.5\t71.0\t75.0\t70.0\t82.0\n+5.0\t12.0\t75.0\t81.0\t64.1\t62.0\t67.0\t63.0\t81.0\n+2.0\t8.0\t49.0\t51.0\t49.3\t49.0\t52.0\t50.0\t34.0\n+1.0\t12.0\t52.0\t45.0\t46.8\t44.0\t50.0\t45.0\t61.0\n+8.0\t13.0\t80.0\t87.0\t76.8\t73.0\t79.0\t78.0\t73.0\n+7.0\t4.0\t76.0\t71.0\t73.8\t71.0\t76.0\t73.0\t86.0\n+4.0\t25.0\t65.0\t55.0\t60.3\t5'..b'\t24.0\t54.0\t49.0\t48.9\t47.0\t53.0\t48.0\t29.0\n+1.0\t28.0\t56.0\t57.0\t48.4\t44.0\t52.0\t48.0\t34.0\n+10.0\t18.0\t60.0\t60.0\t58.8\t54.0\t60.0\t57.0\t53.0\n+9.0\t4.0\t70.0\t67.0\t73.7\t72.0\t77.0\t75.0\t64.0\n+10.0\t4.0\t65.0\t61.0\t64.1\t62.0\t69.0\t65.0\t60.0\n+6.0\t14.0\t70.0\t66.0\t69.5\t66.0\t71.0\t69.0\t85.0\n+11.0\t11.0\t65.0\t64.0\t51.9\t50.0\t53.0\t52.0\t55.0\n+5.0\t21.0\t63.0\t66.0\t65.7\t62.0\t67.0\t65.0\t49.0\n+3.0\t6.0\t57.0\t64.0\t52.2\t52.0\t53.0\t51.0\t49.0\n+5.0\t18.0\t60.0\t71.0\t65.2\t61.0\t68.0\t65.0\t56.0\n+5.0\t11.0\t67.0\t75.0\t63.8\t62.0\t68.0\t63.0\t60.0\n+1.0\t9.0\t45.0\t48.0\t46.4\t46.0\t50.0\t45.0\t47.0\n+3.0\t8.0\t60.0\t53.0\t52.5\t48.0\t56.0\t51.0\t70.0\n+1.0\t15.0\t55.0\t49.0\t47.1\t46.0\t51.0\t46.0\t65.0\n+6.0\t8.0\t86.0\t85.0\t68.5\t67.0\t70.0\t69.0\t81.0\n+2.0\t10.0\t57.0\t62.0\t49.4\t48.0\t50.0\t49.0\t30.0\n+12.0\t3.0\t46.0\t50.0\t47.0\t42.0\t52.0\t47.0\t58.0\n+10.0\t27.0\t65.0\t58.0\t55.9\t51.0\t60.0\t55.0\t39.0\n+8.0\t7.0\t79.0\t72.0\t77.2\t74.0\t78.0\t77.0\t95.0\n+11.0\t16.0\t57.0\t55.0\t50.7\t50.0\t51.0\t49.0\t34.0\n+9.0\t10.0\t72.0\t74.0\t72.3\t70.0\t77.0\t74.0\t91.0\n+7.0\t29.0\t83.0\t85.0\t77.3\t77.0\t80.0\t79.0\t77.0\n+8.0\t3.0\t77.0\t73.0\t77.3\t77.0\t81.0\t77.0\t93.0\n+12.0\t1.0\t52.0\t52.0\t47.4\t44.0\t48.0\t49.0\t39.0\n+9.0\t25.0\t64.0\t67.0\t67.6\t64.0\t72.0\t67.0\t62.0\n+12.0\t23.0\t49.0\t45.0\t45.1\t45.0\t49.0\t44.0\t35.0\n+12.0\t2.0\t52.0\t46.0\t47.2\t46.0\t51.0\t49.0\t41.0\n+10.0\t13.0\t62.0\t66.0\t60.6\t60.0\t62.0\t60.0\t57.0\n+7.0\t23.0\t81.0\t71.0\t77.0\t75.0\t81.0\t76.0\t86.0\n+6.0\t13.0\t65.0\t70.0\t69.3\t66.0\t72.0\t69.0\t79.0\n+2.0\t15.0\t55.0\t58.0\t49.9\t46.0\t52.0\t49.0\t53.0\n+8.0\t8.0\t72.0\t72.0\t77.1\t76.0\t78.0\t77.0\t65.0\n+7.0\t12.0\t74.0\t74.0\t75.4\t74.0\t77.0\t77.0\t71.0\n+10.0\t3.0\t63.0\t65.0\t64.5\t63.0\t68.0\t65.0\t49.0\n+4.0\t18.0\t68.0\t77.0\t58.8\t55.0\t59.0\t57.0\t39.0\n+2.0\t25.0\t60.0\t59.0\t50.9\t49.0\t51.0\t49.0\t35.0\n+1.0\t2.0\t44.0\t45.0\t45.7\t41.0\t50.0\t44.0\t61.0\n+2.0\t21.0\t51.0\t53.0\t50.5\t49.0\t54.0\t52.0\t46.0\n+3.0\t24.0\t57.0\t53.0\t54.9\t54.0\t56.0\t56.0\t72.0\n+7.0\t27.0\t85.0\t79.0\t77.3\t73.0\t78.0\t79.0\t79.0\n+2.0\t4.0\t51.0\t49.0\t49.0\t44.0\t54.0\t51.0\t44.0\n+10.0\t7.0\t66.0\t63.0\t62.9\t62.0\t67.0\t64.0\t78.0\n+4.0\t4.0\t63.0\t69.0\t56.5\t54.0\t59.0\t56.0\t45.0\n+2.0\t24.0\t51.0\t60.0\t50.8\t47.0\t53.0\t50.0\t46.0\n+10.0\t8.0\t63.0\t64.0\t62.5\t60.0\t65.0\t61.0\t73.0\n+9.0\t15.0\t75.0\t79.0\t71.0\t66.0\t76.0\t69.0\t64.0\n+1.0\t14.0\t49.0\t55.0\t47.0\t43.0\t47.0\t46.0\t58.0\n+4.0\t1.0\t68.0\t73.0\t56.0\t54.0\t59.0\t55.0\t41.0\n+10.0\t17.0\t62.0\t60.0\t59.1\t57.0\t63.0\t59.0\t62.0\n+6.0\t18.0\t71.0\t67.0\t70.2\t67.0\t75.0\t69.0\t77.0\n+12.0\t26.0\t41.0\t42.0\t45.2\t45.0\t48.0\t46.0\t58.0\n+5.0\t17.0\t57.0\t60.0\t65.0\t62.0\t65.0\t65.0\t55.0\n+11.0\t20.0\t55.0\t57.0\t49.8\t47.0\t54.0\t48.0\t30.0\n+12.0\t18.0\t35.0\t35.0\t45.2\t44.0\t46.0\t46.0\t36.0\n+9.0\t17.0\t71.0\t75.0\t70.3\t66.0\t73.0\t70.0\t84.0\n+2.0\t26.0\t59.0\t61.0\t51.1\t48.0\t56.0\t53.0\t65.0\n+2.0\t22.0\t53.0\t51.0\t50.6\t46.0\t51.0\t50.0\t59.0\n+6.0\t26.0\t69.0\t71.0\t71.9\t67.0\t74.0\t72.0\t70.0\n+7.0\t11.0\t71.0\t74.0\t75.3\t74.0\t79.0\t75.0\t71.0\n+12.0\t30.0\t48.0\t48.0\t45.4\t44.0\t46.0\t44.0\t42.0\n+7.0\t9.0\t68.0\t74.0\t74.9\t70.0\t79.0\t76.0\t60.0\n+6.0\t21.0\t70.0\t76.0\t70.8\t68.0\t75.0\t71.0\t57.0\n+3.0\t2.0\t54.0\t58.0\t51.6\t47.0\t54.0\t52.0\t37.0\n+2.0\t20.0\t53.0\t51.0\t50.4\t48.0\t55.0\t51.0\t43.0\n+9.0\t9.0\t67.0\t72.0\t72.6\t68.0\t77.0\t71.0\t78.0\n+9.0\t26.0\t67.0\t76.0\t67.2\t64.0\t69.0\t69.0\t74.0\n+1.0\t22.0\t52.0\t52.0\t47.9\t47.0\t48.0\t48.0\t60.0\n+11.0\t27.0\t52.0\t53.0\t48.2\t48.0\t49.0\t49.0\t53.0\n+6.0\t12.0\t67.0\t65.0\t69.1\t65.0\t73.0\t70.0\t83.0\n+10.0\t20.0\t61.0\t58.0\t58.1\t58.0\t59.0\t58.0\t43.0\n+7.0\t13.0\t74.0\t77.0\t75.6\t74.0\t78.0\t76.0\t56.0\n+11.0\t7.0\t58.0\t61.0\t52.9\t51.0\t56.0\t51.0\t35.0\n+10.0\t1.0\t66.0\t67.0\t65.3\t64.0\t70.0\t64.0\t54.0\n+11.0\t22.0\t55.0\t54.0\t49.3\t46.0\t54.0\t49.0\t58.0\n+6.0\t1.0\t71.0\t79.0\t67.4\t65.0\t69.0\t66.0\t58.0\n+5.0\t13.0\t81.0\t77.0\t64.3\t63.0\t67.0\t66.0\t67.0\n+6.0\t3.0\t75.0\t71.0\t67.7\t64.0\t71.0\t66.0\t55.0\n+4.0\t12.0\t59.0\t58.0\t57.7\t54.0\t59.0\t57.0\t61.0\n+3.0\t31.0\t64.0\t68.0\t55.9\t55.0\t59.0\t56.0\t56.0\n+12.0\t14.0\t43.0\t40.0\t45.4\t45.0\t48.0\t45.0\t49.0\n+8.0\t5.0\t75.0\t80.0\t77.3\t75.0\t81.0\t78.0\t71.0\n+5.0\t4.0\t87.0\t74.0\t62.3\t59.0\t65.0\t64.0\t61.0\n+12.0\t31.0\t48.0\t57.0\t45.5\t42.0\t48.0\t47.0\t57.0\n+1.0\t21.0\t48.0\t52.0\t47.8\t43.0\t51.0\t46.0\t57.0\n+7.0\t10.0\t74.0\t71.0\t75.1\t71.0\t77.0\t76.0\t95.0\n+3.0\t15.0\t54.0\t49.0\t53.6\t49.0\t58.0\t52.0\t70.0\n+4.0\t19.0\t77.0\t89.0\t59.0\t59.0\t63.0\t59.0\t61.0\n+10.0\t14.0\t66.0\t60.0\t60.2\t56.0\t64.0\t60.0\t78.0\n+4.0\t15.0\t59.0\t59.0\t58.3\t58.0\t61.0\t60.0\t40.0\n'
b
diff -r 000000000000 -r af2624d5ab32 test-data/feature_selection_result06
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/feature_selection_result06 Sat May 01 01:24:32 2021 +0000
b
b'@@ -0,0 +1,262 @@\n+month\tday\ttemp_2\ttemp_1\taverage\tforecast_noaa\tforecast_acc\tforecast_under\tfriend\n+9.0\t19.0\t68.0\t69.0\t69.7\t65.0\t74.0\t71.0\t88.0\n+4.0\t14.0\t60.0\t59.0\t58.1\t57.0\t63.0\t58.0\t66.0\n+7.0\t30.0\t85.0\t88.0\t77.3\t75.0\t79.0\t77.0\t70.0\n+5.0\t15.0\t82.0\t65.0\t64.7\t63.0\t69.0\t64.0\t58.0\n+1.0\t18.0\t54.0\t50.0\t47.5\t44.0\t48.0\t49.0\t58.0\n+1.0\t25.0\t48.0\t51.0\t48.2\t45.0\t51.0\t49.0\t63.0\n+11.0\t25.0\t49.0\t52.0\t48.6\t45.0\t52.0\t47.0\t41.0\n+7.0\t20.0\t73.0\t78.0\t76.7\t75.0\t78.0\t77.0\t66.0\n+12.0\t17.0\t39.0\t35.0\t45.2\t43.0\t47.0\t46.0\t38.0\n+12.0\t8.0\t42.0\t40.0\t46.1\t45.0\t51.0\t47.0\t36.0\n+12.0\t28.0\t42.0\t47.0\t45.3\t41.0\t49.0\t44.0\t58.0\n+7.0\t17.0\t76.0\t72.0\t76.3\t76.0\t78.0\t77.0\t88.0\n+7.0\t7.0\t69.0\t76.0\t74.4\t73.0\t77.0\t74.0\t72.0\n+12.0\t15.0\t40.0\t39.0\t45.3\t45.0\t49.0\t47.0\t46.0\n+6.0\t27.0\t71.0\t78.0\t72.2\t70.0\t74.0\t72.0\t84.0\n+5.0\t31.0\t64.0\t71.0\t67.3\t63.0\t72.0\t68.0\t85.0\n+1.0\t20.0\t54.0\t48.0\t47.7\t44.0\t52.0\t49.0\t61.0\n+8.0\t10.0\t73.0\t72.0\t77.0\t77.0\t78.0\t77.0\t68.0\n+3.0\t23.0\t56.0\t57.0\t54.7\t50.0\t58.0\t55.0\t70.0\n+12.0\t24.0\t45.0\t40.0\t45.1\t44.0\t47.0\t46.0\t39.0\n+1.0\t19.0\t50.0\t54.0\t47.6\t47.0\t49.0\t48.0\t53.0\n+11.0\t6.0\t65.0\t58.0\t53.2\t52.0\t57.0\t55.0\t71.0\n+4.0\t17.0\t60.0\t68.0\t58.6\t58.0\t62.0\t59.0\t54.0\n+10.0\t29.0\t60.0\t65.0\t55.3\t55.0\t59.0\t55.0\t65.0\n+2.0\t1.0\t48.0\t47.0\t48.8\t46.0\t49.0\t49.0\t51.0\n+12.0\t12.0\t44.0\t44.0\t45.6\t43.0\t50.0\t45.0\t42.0\n+5.0\t30.0\t64.0\t64.0\t67.1\t64.0\t70.0\t66.0\t69.0\n+10.0\t23.0\t59.0\t62.0\t57.1\t57.0\t58.0\t59.0\t67.0\n+9.0\t30.0\t68.0\t66.0\t65.7\t64.0\t67.0\t65.0\t74.0\n+9.0\t12.0\t77.0\t70.0\t71.8\t67.0\t73.0\t73.0\t90.0\n+11.0\t2.0\t59.0\t57.0\t54.2\t54.0\t58.0\t55.0\t70.0\n+11.0\t17.0\t55.0\t50.0\t50.5\t46.0\t51.0\t50.0\t57.0\n+3.0\t3.0\t58.0\t55.0\t51.8\t49.0\t54.0\t50.0\t71.0\n+11.0\t21.0\t57.0\t55.0\t49.5\t46.0\t51.0\t49.0\t67.0\n+12.0\t27.0\t42.0\t42.0\t45.2\t41.0\t50.0\t47.0\t47.0\n+4.0\t24.0\t64.0\t65.0\t60.1\t57.0\t61.0\t60.0\t41.0\n+5.0\t20.0\t64.0\t63.0\t65.6\t63.0\t70.0\t64.0\t73.0\n+1.0\t16.0\t49.0\t48.0\t47.3\t45.0\t52.0\t46.0\t28.0\n+12.0\t7.0\t40.0\t42.0\t46.3\t44.0\t51.0\t46.0\t62.0\n+1.0\t7.0\t44.0\t51.0\t46.2\t45.0\t49.0\t46.0\t38.0\n+9.0\t24.0\t67.0\t64.0\t68.0\t65.0\t71.0\t66.0\t64.0\n+8.0\t30.0\t79.0\t75.0\t74.6\t74.0\t76.0\t75.0\t63.0\n+1.0\t11.0\t50.0\t52.0\t46.7\t42.0\t48.0\t48.0\t39.0\n+6.0\t9.0\t85.0\t67.0\t68.6\t66.0\t73.0\t69.0\t80.0\n+9.0\t22.0\t67.0\t68.0\t68.7\t65.0\t70.0\t69.0\t56.0\n+3.0\t25.0\t53.0\t54.0\t55.0\t53.0\t57.0\t57.0\t42.0\n+10.0\t24.0\t62.0\t62.0\t56.8\t52.0\t61.0\t57.0\t70.0\n+7.0\t16.0\t77.0\t76.0\t76.1\t76.0\t78.0\t75.0\t61.0\n+7.0\t1.0\t74.0\t73.0\t73.1\t71.0\t75.0\t72.0\t93.0\n+11.0\t18.0\t50.0\t52.0\t50.3\t50.0\t53.0\t50.0\t35.0\n+9.0\t3.0\t75.0\t70.0\t73.9\t71.0\t75.0\t73.0\t68.0\n+8.0\t2.0\t73.0\t77.0\t77.4\t75.0\t80.0\t79.0\t62.0\n+4.0\t5.0\t69.0\t60.0\t56.6\t52.0\t58.0\t56.0\t72.0\n+3.0\t13.0\t55.0\t52.0\t53.3\t50.0\t55.0\t53.0\t54.0\n+8.0\t28.0\t81.0\t79.0\t75.0\t71.0\t77.0\t76.0\t85.0\n+4.0\t9.0\t77.0\t76.0\t57.2\t53.0\t61.0\t57.0\t74.0\n+5.0\t26.0\t66.0\t66.0\t66.5\t64.0\t70.0\t65.0\t85.0\n+10.0\t10.0\t68.0\t57.0\t61.8\t58.0\t64.0\t61.0\t62.0\n+4.0\t10.0\t76.0\t66.0\t57.4\t57.0\t60.0\t57.0\t60.0\n+10.0\t19.0\t60.0\t61.0\t58.4\t58.0\t60.0\t57.0\t41.0\n+3.0\t12.0\t56.0\t55.0\t53.1\t52.0\t58.0\t53.0\t65.0\n+1.0\t24.0\t57.0\t48.0\t48.1\t46.0\t50.0\t48.0\t54.0\n+2.0\t7.0\t53.0\t49.0\t49.2\t46.0\t51.0\t48.0\t63.0\n+5.0\t27.0\t66.0\t65.0\t66.7\t64.0\t67.0\t68.0\t73.0\n+5.0\t5.0\t74.0\t60.0\t62.5\t58.0\t66.0\t62.0\t56.0\n+3.0\t11.0\t55.0\t56.0\t53.0\t53.0\t53.0\t51.0\t36.0\n+10.0\t22.0\t62.0\t59.0\t57.4\t56.0\t59.0\t58.0\t44.0\n+12.0\t11.0\t36.0\t44.0\t45.7\t41.0\t46.0\t47.0\t35.0\n+5.0\t8.0\t77.0\t82.0\t63.2\t62.0\t65.0\t63.0\t83.0\n+5.0\t29.0\t64.0\t64.0\t67.0\t65.0\t71.0\t65.0\t76.0\n+12.0\t13.0\t44.0\t43.0\t45.5\t41.0\t47.0\t46.0\t46.0\n+3.0\t30.0\t56.0\t64.0\t55.7\t51.0\t57.0\t56.0\t57.0\n+11.0\t8.0\t61.0\t63.0\t52.7\t49.0\t57.0\t52.0\t49.0\n+6.0\t20.0\t65.0\t70.0\t70.6\t67.0\t71.0\t70.0\t79.0\n+11.0\t9.0\t63.0\t71.0\t52.4\t48.0\t56.0\t52.0\t42.0\n+7.0\t3.0\t76.0\t76.0\t73.5\t69.0\t76.0\t75.0\t85.0\n+10.0\t9.0\t64.0\t68.0\t62.1\t58.0\t65.0\t63.0\t55.0\n+12.0\t16.0\t39.0\t39.0\t45.3\t44.0\t49.0\t44.0\t39.0\n+9.0\t16.0\t79.0\t71.0\t70.7\t70.0\t74.0\t71.0\t52.0\n+6.0\t25.0\t68.0\t69.0\t71.7\t68.0\t73.0\t73.0\t89.0\n+9.0\t13.0\t70.0\t74.0\t71.5\t71.0\t75.0\t70.0\t82.0\n+5.0\t12.0\t75.0\t81.0\t64.1\t62.0\t67.0\t63.0\t81.0\n+2.0\t8.0\t49.0\t51.0\t49.3\t49.0\t52.0\t50.0\t34.0\n+1.0\t12.0\t52.0\t45.0\t46.8\t44.0\t50.0\t45.0\t61.0\n+8.0\t13.0\t80.0\t87.0\t76.8\t73.0\t79.0\t78.0\t73.0\n+7.0\t4.0\t76.0\t71.0\t73.8\t71.0\t76.0\t73.0\t86.0\n+4.0\t25.0\t65.0\t55.0\t60.3\t5'..b'\t24.0\t54.0\t49.0\t48.9\t47.0\t53.0\t48.0\t29.0\n+1.0\t28.0\t56.0\t57.0\t48.4\t44.0\t52.0\t48.0\t34.0\n+10.0\t18.0\t60.0\t60.0\t58.8\t54.0\t60.0\t57.0\t53.0\n+9.0\t4.0\t70.0\t67.0\t73.7\t72.0\t77.0\t75.0\t64.0\n+10.0\t4.0\t65.0\t61.0\t64.1\t62.0\t69.0\t65.0\t60.0\n+6.0\t14.0\t70.0\t66.0\t69.5\t66.0\t71.0\t69.0\t85.0\n+11.0\t11.0\t65.0\t64.0\t51.9\t50.0\t53.0\t52.0\t55.0\n+5.0\t21.0\t63.0\t66.0\t65.7\t62.0\t67.0\t65.0\t49.0\n+3.0\t6.0\t57.0\t64.0\t52.2\t52.0\t53.0\t51.0\t49.0\n+5.0\t18.0\t60.0\t71.0\t65.2\t61.0\t68.0\t65.0\t56.0\n+5.0\t11.0\t67.0\t75.0\t63.8\t62.0\t68.0\t63.0\t60.0\n+1.0\t9.0\t45.0\t48.0\t46.4\t46.0\t50.0\t45.0\t47.0\n+3.0\t8.0\t60.0\t53.0\t52.5\t48.0\t56.0\t51.0\t70.0\n+1.0\t15.0\t55.0\t49.0\t47.1\t46.0\t51.0\t46.0\t65.0\n+6.0\t8.0\t86.0\t85.0\t68.5\t67.0\t70.0\t69.0\t81.0\n+2.0\t10.0\t57.0\t62.0\t49.4\t48.0\t50.0\t49.0\t30.0\n+12.0\t3.0\t46.0\t50.0\t47.0\t42.0\t52.0\t47.0\t58.0\n+10.0\t27.0\t65.0\t58.0\t55.9\t51.0\t60.0\t55.0\t39.0\n+8.0\t7.0\t79.0\t72.0\t77.2\t74.0\t78.0\t77.0\t95.0\n+11.0\t16.0\t57.0\t55.0\t50.7\t50.0\t51.0\t49.0\t34.0\n+9.0\t10.0\t72.0\t74.0\t72.3\t70.0\t77.0\t74.0\t91.0\n+7.0\t29.0\t83.0\t85.0\t77.3\t77.0\t80.0\t79.0\t77.0\n+8.0\t3.0\t77.0\t73.0\t77.3\t77.0\t81.0\t77.0\t93.0\n+12.0\t1.0\t52.0\t52.0\t47.4\t44.0\t48.0\t49.0\t39.0\n+9.0\t25.0\t64.0\t67.0\t67.6\t64.0\t72.0\t67.0\t62.0\n+12.0\t23.0\t49.0\t45.0\t45.1\t45.0\t49.0\t44.0\t35.0\n+12.0\t2.0\t52.0\t46.0\t47.2\t46.0\t51.0\t49.0\t41.0\n+10.0\t13.0\t62.0\t66.0\t60.6\t60.0\t62.0\t60.0\t57.0\n+7.0\t23.0\t81.0\t71.0\t77.0\t75.0\t81.0\t76.0\t86.0\n+6.0\t13.0\t65.0\t70.0\t69.3\t66.0\t72.0\t69.0\t79.0\n+2.0\t15.0\t55.0\t58.0\t49.9\t46.0\t52.0\t49.0\t53.0\n+8.0\t8.0\t72.0\t72.0\t77.1\t76.0\t78.0\t77.0\t65.0\n+7.0\t12.0\t74.0\t74.0\t75.4\t74.0\t77.0\t77.0\t71.0\n+10.0\t3.0\t63.0\t65.0\t64.5\t63.0\t68.0\t65.0\t49.0\n+4.0\t18.0\t68.0\t77.0\t58.8\t55.0\t59.0\t57.0\t39.0\n+2.0\t25.0\t60.0\t59.0\t50.9\t49.0\t51.0\t49.0\t35.0\n+1.0\t2.0\t44.0\t45.0\t45.7\t41.0\t50.0\t44.0\t61.0\n+2.0\t21.0\t51.0\t53.0\t50.5\t49.0\t54.0\t52.0\t46.0\n+3.0\t24.0\t57.0\t53.0\t54.9\t54.0\t56.0\t56.0\t72.0\n+7.0\t27.0\t85.0\t79.0\t77.3\t73.0\t78.0\t79.0\t79.0\n+2.0\t4.0\t51.0\t49.0\t49.0\t44.0\t54.0\t51.0\t44.0\n+10.0\t7.0\t66.0\t63.0\t62.9\t62.0\t67.0\t64.0\t78.0\n+4.0\t4.0\t63.0\t69.0\t56.5\t54.0\t59.0\t56.0\t45.0\n+2.0\t24.0\t51.0\t60.0\t50.8\t47.0\t53.0\t50.0\t46.0\n+10.0\t8.0\t63.0\t64.0\t62.5\t60.0\t65.0\t61.0\t73.0\n+9.0\t15.0\t75.0\t79.0\t71.0\t66.0\t76.0\t69.0\t64.0\n+1.0\t14.0\t49.0\t55.0\t47.0\t43.0\t47.0\t46.0\t58.0\n+4.0\t1.0\t68.0\t73.0\t56.0\t54.0\t59.0\t55.0\t41.0\n+10.0\t17.0\t62.0\t60.0\t59.1\t57.0\t63.0\t59.0\t62.0\n+6.0\t18.0\t71.0\t67.0\t70.2\t67.0\t75.0\t69.0\t77.0\n+12.0\t26.0\t41.0\t42.0\t45.2\t45.0\t48.0\t46.0\t58.0\n+5.0\t17.0\t57.0\t60.0\t65.0\t62.0\t65.0\t65.0\t55.0\n+11.0\t20.0\t55.0\t57.0\t49.8\t47.0\t54.0\t48.0\t30.0\n+12.0\t18.0\t35.0\t35.0\t45.2\t44.0\t46.0\t46.0\t36.0\n+9.0\t17.0\t71.0\t75.0\t70.3\t66.0\t73.0\t70.0\t84.0\n+2.0\t26.0\t59.0\t61.0\t51.1\t48.0\t56.0\t53.0\t65.0\n+2.0\t22.0\t53.0\t51.0\t50.6\t46.0\t51.0\t50.0\t59.0\n+6.0\t26.0\t69.0\t71.0\t71.9\t67.0\t74.0\t72.0\t70.0\n+7.0\t11.0\t71.0\t74.0\t75.3\t74.0\t79.0\t75.0\t71.0\n+12.0\t30.0\t48.0\t48.0\t45.4\t44.0\t46.0\t44.0\t42.0\n+7.0\t9.0\t68.0\t74.0\t74.9\t70.0\t79.0\t76.0\t60.0\n+6.0\t21.0\t70.0\t76.0\t70.8\t68.0\t75.0\t71.0\t57.0\n+3.0\t2.0\t54.0\t58.0\t51.6\t47.0\t54.0\t52.0\t37.0\n+2.0\t20.0\t53.0\t51.0\t50.4\t48.0\t55.0\t51.0\t43.0\n+9.0\t9.0\t67.0\t72.0\t72.6\t68.0\t77.0\t71.0\t78.0\n+9.0\t26.0\t67.0\t76.0\t67.2\t64.0\t69.0\t69.0\t74.0\n+1.0\t22.0\t52.0\t52.0\t47.9\t47.0\t48.0\t48.0\t60.0\n+11.0\t27.0\t52.0\t53.0\t48.2\t48.0\t49.0\t49.0\t53.0\n+6.0\t12.0\t67.0\t65.0\t69.1\t65.0\t73.0\t70.0\t83.0\n+10.0\t20.0\t61.0\t58.0\t58.1\t58.0\t59.0\t58.0\t43.0\n+7.0\t13.0\t74.0\t77.0\t75.6\t74.0\t78.0\t76.0\t56.0\n+11.0\t7.0\t58.0\t61.0\t52.9\t51.0\t56.0\t51.0\t35.0\n+10.0\t1.0\t66.0\t67.0\t65.3\t64.0\t70.0\t64.0\t54.0\n+11.0\t22.0\t55.0\t54.0\t49.3\t46.0\t54.0\t49.0\t58.0\n+6.0\t1.0\t71.0\t79.0\t67.4\t65.0\t69.0\t66.0\t58.0\n+5.0\t13.0\t81.0\t77.0\t64.3\t63.0\t67.0\t66.0\t67.0\n+6.0\t3.0\t75.0\t71.0\t67.7\t64.0\t71.0\t66.0\t55.0\n+4.0\t12.0\t59.0\t58.0\t57.7\t54.0\t59.0\t57.0\t61.0\n+3.0\t31.0\t64.0\t68.0\t55.9\t55.0\t59.0\t56.0\t56.0\n+12.0\t14.0\t43.0\t40.0\t45.4\t45.0\t48.0\t45.0\t49.0\n+8.0\t5.0\t75.0\t80.0\t77.3\t75.0\t81.0\t78.0\t71.0\n+5.0\t4.0\t87.0\t74.0\t62.3\t59.0\t65.0\t64.0\t61.0\n+12.0\t31.0\t48.0\t57.0\t45.5\t42.0\t48.0\t47.0\t57.0\n+1.0\t21.0\t48.0\t52.0\t47.8\t43.0\t51.0\t46.0\t57.0\n+7.0\t10.0\t74.0\t71.0\t75.1\t71.0\t77.0\t76.0\t95.0\n+3.0\t15.0\t54.0\t49.0\t53.6\t49.0\t58.0\t52.0\t70.0\n+4.0\t19.0\t77.0\t89.0\t59.0\t59.0\t63.0\t59.0\t61.0\n+10.0\t14.0\t66.0\t60.0\t60.2\t56.0\t64.0\t60.0\t78.0\n+4.0\t15.0\t59.0\t59.0\t58.3\t58.0\t61.0\t60.0\t40.0\n'
b
diff -r 000000000000 -r af2624d5ab32 test-data/feature_selection_result07
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/feature_selection_result07 Sat May 01 01:24:32 2021 +0000
b
b'@@ -0,0 +1,262 @@\n+month\tday\ttemp_2\ttemp_1\taverage\tforecast_noaa\tforecast_acc\tforecast_under\tfriend\n+9.0\t19.0\t68.0\t69.0\t69.7\t65.0\t74.0\t71.0\t88.0\n+4.0\t14.0\t60.0\t59.0\t58.1\t57.0\t63.0\t58.0\t66.0\n+7.0\t30.0\t85.0\t88.0\t77.3\t75.0\t79.0\t77.0\t70.0\n+5.0\t15.0\t82.0\t65.0\t64.7\t63.0\t69.0\t64.0\t58.0\n+1.0\t18.0\t54.0\t50.0\t47.5\t44.0\t48.0\t49.0\t58.0\n+1.0\t25.0\t48.0\t51.0\t48.2\t45.0\t51.0\t49.0\t63.0\n+11.0\t25.0\t49.0\t52.0\t48.6\t45.0\t52.0\t47.0\t41.0\n+7.0\t20.0\t73.0\t78.0\t76.7\t75.0\t78.0\t77.0\t66.0\n+12.0\t17.0\t39.0\t35.0\t45.2\t43.0\t47.0\t46.0\t38.0\n+12.0\t8.0\t42.0\t40.0\t46.1\t45.0\t51.0\t47.0\t36.0\n+12.0\t28.0\t42.0\t47.0\t45.3\t41.0\t49.0\t44.0\t58.0\n+7.0\t17.0\t76.0\t72.0\t76.3\t76.0\t78.0\t77.0\t88.0\n+7.0\t7.0\t69.0\t76.0\t74.4\t73.0\t77.0\t74.0\t72.0\n+12.0\t15.0\t40.0\t39.0\t45.3\t45.0\t49.0\t47.0\t46.0\n+6.0\t27.0\t71.0\t78.0\t72.2\t70.0\t74.0\t72.0\t84.0\n+5.0\t31.0\t64.0\t71.0\t67.3\t63.0\t72.0\t68.0\t85.0\n+1.0\t20.0\t54.0\t48.0\t47.7\t44.0\t52.0\t49.0\t61.0\n+8.0\t10.0\t73.0\t72.0\t77.0\t77.0\t78.0\t77.0\t68.0\n+3.0\t23.0\t56.0\t57.0\t54.7\t50.0\t58.0\t55.0\t70.0\n+12.0\t24.0\t45.0\t40.0\t45.1\t44.0\t47.0\t46.0\t39.0\n+1.0\t19.0\t50.0\t54.0\t47.6\t47.0\t49.0\t48.0\t53.0\n+11.0\t6.0\t65.0\t58.0\t53.2\t52.0\t57.0\t55.0\t71.0\n+4.0\t17.0\t60.0\t68.0\t58.6\t58.0\t62.0\t59.0\t54.0\n+10.0\t29.0\t60.0\t65.0\t55.3\t55.0\t59.0\t55.0\t65.0\n+2.0\t1.0\t48.0\t47.0\t48.8\t46.0\t49.0\t49.0\t51.0\n+12.0\t12.0\t44.0\t44.0\t45.6\t43.0\t50.0\t45.0\t42.0\n+5.0\t30.0\t64.0\t64.0\t67.1\t64.0\t70.0\t66.0\t69.0\n+10.0\t23.0\t59.0\t62.0\t57.1\t57.0\t58.0\t59.0\t67.0\n+9.0\t30.0\t68.0\t66.0\t65.7\t64.0\t67.0\t65.0\t74.0\n+9.0\t12.0\t77.0\t70.0\t71.8\t67.0\t73.0\t73.0\t90.0\n+11.0\t2.0\t59.0\t57.0\t54.2\t54.0\t58.0\t55.0\t70.0\n+11.0\t17.0\t55.0\t50.0\t50.5\t46.0\t51.0\t50.0\t57.0\n+3.0\t3.0\t58.0\t55.0\t51.8\t49.0\t54.0\t50.0\t71.0\n+11.0\t21.0\t57.0\t55.0\t49.5\t46.0\t51.0\t49.0\t67.0\n+12.0\t27.0\t42.0\t42.0\t45.2\t41.0\t50.0\t47.0\t47.0\n+4.0\t24.0\t64.0\t65.0\t60.1\t57.0\t61.0\t60.0\t41.0\n+5.0\t20.0\t64.0\t63.0\t65.6\t63.0\t70.0\t64.0\t73.0\n+1.0\t16.0\t49.0\t48.0\t47.3\t45.0\t52.0\t46.0\t28.0\n+12.0\t7.0\t40.0\t42.0\t46.3\t44.0\t51.0\t46.0\t62.0\n+1.0\t7.0\t44.0\t51.0\t46.2\t45.0\t49.0\t46.0\t38.0\n+9.0\t24.0\t67.0\t64.0\t68.0\t65.0\t71.0\t66.0\t64.0\n+8.0\t30.0\t79.0\t75.0\t74.6\t74.0\t76.0\t75.0\t63.0\n+1.0\t11.0\t50.0\t52.0\t46.7\t42.0\t48.0\t48.0\t39.0\n+6.0\t9.0\t85.0\t67.0\t68.6\t66.0\t73.0\t69.0\t80.0\n+9.0\t22.0\t67.0\t68.0\t68.7\t65.0\t70.0\t69.0\t56.0\n+3.0\t25.0\t53.0\t54.0\t55.0\t53.0\t57.0\t57.0\t42.0\n+10.0\t24.0\t62.0\t62.0\t56.8\t52.0\t61.0\t57.0\t70.0\n+7.0\t16.0\t77.0\t76.0\t76.1\t76.0\t78.0\t75.0\t61.0\n+7.0\t1.0\t74.0\t73.0\t73.1\t71.0\t75.0\t72.0\t93.0\n+11.0\t18.0\t50.0\t52.0\t50.3\t50.0\t53.0\t50.0\t35.0\n+9.0\t3.0\t75.0\t70.0\t73.9\t71.0\t75.0\t73.0\t68.0\n+8.0\t2.0\t73.0\t77.0\t77.4\t75.0\t80.0\t79.0\t62.0\n+4.0\t5.0\t69.0\t60.0\t56.6\t52.0\t58.0\t56.0\t72.0\n+3.0\t13.0\t55.0\t52.0\t53.3\t50.0\t55.0\t53.0\t54.0\n+8.0\t28.0\t81.0\t79.0\t75.0\t71.0\t77.0\t76.0\t85.0\n+4.0\t9.0\t77.0\t76.0\t57.2\t53.0\t61.0\t57.0\t74.0\n+5.0\t26.0\t66.0\t66.0\t66.5\t64.0\t70.0\t65.0\t85.0\n+10.0\t10.0\t68.0\t57.0\t61.8\t58.0\t64.0\t61.0\t62.0\n+4.0\t10.0\t76.0\t66.0\t57.4\t57.0\t60.0\t57.0\t60.0\n+10.0\t19.0\t60.0\t61.0\t58.4\t58.0\t60.0\t57.0\t41.0\n+3.0\t12.0\t56.0\t55.0\t53.1\t52.0\t58.0\t53.0\t65.0\n+1.0\t24.0\t57.0\t48.0\t48.1\t46.0\t50.0\t48.0\t54.0\n+2.0\t7.0\t53.0\t49.0\t49.2\t46.0\t51.0\t48.0\t63.0\n+5.0\t27.0\t66.0\t65.0\t66.7\t64.0\t67.0\t68.0\t73.0\n+5.0\t5.0\t74.0\t60.0\t62.5\t58.0\t66.0\t62.0\t56.0\n+3.0\t11.0\t55.0\t56.0\t53.0\t53.0\t53.0\t51.0\t36.0\n+10.0\t22.0\t62.0\t59.0\t57.4\t56.0\t59.0\t58.0\t44.0\n+12.0\t11.0\t36.0\t44.0\t45.7\t41.0\t46.0\t47.0\t35.0\n+5.0\t8.0\t77.0\t82.0\t63.2\t62.0\t65.0\t63.0\t83.0\n+5.0\t29.0\t64.0\t64.0\t67.0\t65.0\t71.0\t65.0\t76.0\n+12.0\t13.0\t44.0\t43.0\t45.5\t41.0\t47.0\t46.0\t46.0\n+3.0\t30.0\t56.0\t64.0\t55.7\t51.0\t57.0\t56.0\t57.0\n+11.0\t8.0\t61.0\t63.0\t52.7\t49.0\t57.0\t52.0\t49.0\n+6.0\t20.0\t65.0\t70.0\t70.6\t67.0\t71.0\t70.0\t79.0\n+11.0\t9.0\t63.0\t71.0\t52.4\t48.0\t56.0\t52.0\t42.0\n+7.0\t3.0\t76.0\t76.0\t73.5\t69.0\t76.0\t75.0\t85.0\n+10.0\t9.0\t64.0\t68.0\t62.1\t58.0\t65.0\t63.0\t55.0\n+12.0\t16.0\t39.0\t39.0\t45.3\t44.0\t49.0\t44.0\t39.0\n+9.0\t16.0\t79.0\t71.0\t70.7\t70.0\t74.0\t71.0\t52.0\n+6.0\t25.0\t68.0\t69.0\t71.7\t68.0\t73.0\t73.0\t89.0\n+9.0\t13.0\t70.0\t74.0\t71.5\t71.0\t75.0\t70.0\t82.0\n+5.0\t12.0\t75.0\t81.0\t64.1\t62.0\t67.0\t63.0\t81.0\n+2.0\t8.0\t49.0\t51.0\t49.3\t49.0\t52.0\t50.0\t34.0\n+1.0\t12.0\t52.0\t45.0\t46.8\t44.0\t50.0\t45.0\t61.0\n+8.0\t13.0\t80.0\t87.0\t76.8\t73.0\t79.0\t78.0\t73.0\n+7.0\t4.0\t76.0\t71.0\t73.8\t71.0\t76.0\t73.0\t86.0\n+4.0\t25.0\t65.0\t55.0\t60.3\t5'..b'\t24.0\t54.0\t49.0\t48.9\t47.0\t53.0\t48.0\t29.0\n+1.0\t28.0\t56.0\t57.0\t48.4\t44.0\t52.0\t48.0\t34.0\n+10.0\t18.0\t60.0\t60.0\t58.8\t54.0\t60.0\t57.0\t53.0\n+9.0\t4.0\t70.0\t67.0\t73.7\t72.0\t77.0\t75.0\t64.0\n+10.0\t4.0\t65.0\t61.0\t64.1\t62.0\t69.0\t65.0\t60.0\n+6.0\t14.0\t70.0\t66.0\t69.5\t66.0\t71.0\t69.0\t85.0\n+11.0\t11.0\t65.0\t64.0\t51.9\t50.0\t53.0\t52.0\t55.0\n+5.0\t21.0\t63.0\t66.0\t65.7\t62.0\t67.0\t65.0\t49.0\n+3.0\t6.0\t57.0\t64.0\t52.2\t52.0\t53.0\t51.0\t49.0\n+5.0\t18.0\t60.0\t71.0\t65.2\t61.0\t68.0\t65.0\t56.0\n+5.0\t11.0\t67.0\t75.0\t63.8\t62.0\t68.0\t63.0\t60.0\n+1.0\t9.0\t45.0\t48.0\t46.4\t46.0\t50.0\t45.0\t47.0\n+3.0\t8.0\t60.0\t53.0\t52.5\t48.0\t56.0\t51.0\t70.0\n+1.0\t15.0\t55.0\t49.0\t47.1\t46.0\t51.0\t46.0\t65.0\n+6.0\t8.0\t86.0\t85.0\t68.5\t67.0\t70.0\t69.0\t81.0\n+2.0\t10.0\t57.0\t62.0\t49.4\t48.0\t50.0\t49.0\t30.0\n+12.0\t3.0\t46.0\t50.0\t47.0\t42.0\t52.0\t47.0\t58.0\n+10.0\t27.0\t65.0\t58.0\t55.9\t51.0\t60.0\t55.0\t39.0\n+8.0\t7.0\t79.0\t72.0\t77.2\t74.0\t78.0\t77.0\t95.0\n+11.0\t16.0\t57.0\t55.0\t50.7\t50.0\t51.0\t49.0\t34.0\n+9.0\t10.0\t72.0\t74.0\t72.3\t70.0\t77.0\t74.0\t91.0\n+7.0\t29.0\t83.0\t85.0\t77.3\t77.0\t80.0\t79.0\t77.0\n+8.0\t3.0\t77.0\t73.0\t77.3\t77.0\t81.0\t77.0\t93.0\n+12.0\t1.0\t52.0\t52.0\t47.4\t44.0\t48.0\t49.0\t39.0\n+9.0\t25.0\t64.0\t67.0\t67.6\t64.0\t72.0\t67.0\t62.0\n+12.0\t23.0\t49.0\t45.0\t45.1\t45.0\t49.0\t44.0\t35.0\n+12.0\t2.0\t52.0\t46.0\t47.2\t46.0\t51.0\t49.0\t41.0\n+10.0\t13.0\t62.0\t66.0\t60.6\t60.0\t62.0\t60.0\t57.0\n+7.0\t23.0\t81.0\t71.0\t77.0\t75.0\t81.0\t76.0\t86.0\n+6.0\t13.0\t65.0\t70.0\t69.3\t66.0\t72.0\t69.0\t79.0\n+2.0\t15.0\t55.0\t58.0\t49.9\t46.0\t52.0\t49.0\t53.0\n+8.0\t8.0\t72.0\t72.0\t77.1\t76.0\t78.0\t77.0\t65.0\n+7.0\t12.0\t74.0\t74.0\t75.4\t74.0\t77.0\t77.0\t71.0\n+10.0\t3.0\t63.0\t65.0\t64.5\t63.0\t68.0\t65.0\t49.0\n+4.0\t18.0\t68.0\t77.0\t58.8\t55.0\t59.0\t57.0\t39.0\n+2.0\t25.0\t60.0\t59.0\t50.9\t49.0\t51.0\t49.0\t35.0\n+1.0\t2.0\t44.0\t45.0\t45.7\t41.0\t50.0\t44.0\t61.0\n+2.0\t21.0\t51.0\t53.0\t50.5\t49.0\t54.0\t52.0\t46.0\n+3.0\t24.0\t57.0\t53.0\t54.9\t54.0\t56.0\t56.0\t72.0\n+7.0\t27.0\t85.0\t79.0\t77.3\t73.0\t78.0\t79.0\t79.0\n+2.0\t4.0\t51.0\t49.0\t49.0\t44.0\t54.0\t51.0\t44.0\n+10.0\t7.0\t66.0\t63.0\t62.9\t62.0\t67.0\t64.0\t78.0\n+4.0\t4.0\t63.0\t69.0\t56.5\t54.0\t59.0\t56.0\t45.0\n+2.0\t24.0\t51.0\t60.0\t50.8\t47.0\t53.0\t50.0\t46.0\n+10.0\t8.0\t63.0\t64.0\t62.5\t60.0\t65.0\t61.0\t73.0\n+9.0\t15.0\t75.0\t79.0\t71.0\t66.0\t76.0\t69.0\t64.0\n+1.0\t14.0\t49.0\t55.0\t47.0\t43.0\t47.0\t46.0\t58.0\n+4.0\t1.0\t68.0\t73.0\t56.0\t54.0\t59.0\t55.0\t41.0\n+10.0\t17.0\t62.0\t60.0\t59.1\t57.0\t63.0\t59.0\t62.0\n+6.0\t18.0\t71.0\t67.0\t70.2\t67.0\t75.0\t69.0\t77.0\n+12.0\t26.0\t41.0\t42.0\t45.2\t45.0\t48.0\t46.0\t58.0\n+5.0\t17.0\t57.0\t60.0\t65.0\t62.0\t65.0\t65.0\t55.0\n+11.0\t20.0\t55.0\t57.0\t49.8\t47.0\t54.0\t48.0\t30.0\n+12.0\t18.0\t35.0\t35.0\t45.2\t44.0\t46.0\t46.0\t36.0\n+9.0\t17.0\t71.0\t75.0\t70.3\t66.0\t73.0\t70.0\t84.0\n+2.0\t26.0\t59.0\t61.0\t51.1\t48.0\t56.0\t53.0\t65.0\n+2.0\t22.0\t53.0\t51.0\t50.6\t46.0\t51.0\t50.0\t59.0\n+6.0\t26.0\t69.0\t71.0\t71.9\t67.0\t74.0\t72.0\t70.0\n+7.0\t11.0\t71.0\t74.0\t75.3\t74.0\t79.0\t75.0\t71.0\n+12.0\t30.0\t48.0\t48.0\t45.4\t44.0\t46.0\t44.0\t42.0\n+7.0\t9.0\t68.0\t74.0\t74.9\t70.0\t79.0\t76.0\t60.0\n+6.0\t21.0\t70.0\t76.0\t70.8\t68.0\t75.0\t71.0\t57.0\n+3.0\t2.0\t54.0\t58.0\t51.6\t47.0\t54.0\t52.0\t37.0\n+2.0\t20.0\t53.0\t51.0\t50.4\t48.0\t55.0\t51.0\t43.0\n+9.0\t9.0\t67.0\t72.0\t72.6\t68.0\t77.0\t71.0\t78.0\n+9.0\t26.0\t67.0\t76.0\t67.2\t64.0\t69.0\t69.0\t74.0\n+1.0\t22.0\t52.0\t52.0\t47.9\t47.0\t48.0\t48.0\t60.0\n+11.0\t27.0\t52.0\t53.0\t48.2\t48.0\t49.0\t49.0\t53.0\n+6.0\t12.0\t67.0\t65.0\t69.1\t65.0\t73.0\t70.0\t83.0\n+10.0\t20.0\t61.0\t58.0\t58.1\t58.0\t59.0\t58.0\t43.0\n+7.0\t13.0\t74.0\t77.0\t75.6\t74.0\t78.0\t76.0\t56.0\n+11.0\t7.0\t58.0\t61.0\t52.9\t51.0\t56.0\t51.0\t35.0\n+10.0\t1.0\t66.0\t67.0\t65.3\t64.0\t70.0\t64.0\t54.0\n+11.0\t22.0\t55.0\t54.0\t49.3\t46.0\t54.0\t49.0\t58.0\n+6.0\t1.0\t71.0\t79.0\t67.4\t65.0\t69.0\t66.0\t58.0\n+5.0\t13.0\t81.0\t77.0\t64.3\t63.0\t67.0\t66.0\t67.0\n+6.0\t3.0\t75.0\t71.0\t67.7\t64.0\t71.0\t66.0\t55.0\n+4.0\t12.0\t59.0\t58.0\t57.7\t54.0\t59.0\t57.0\t61.0\n+3.0\t31.0\t64.0\t68.0\t55.9\t55.0\t59.0\t56.0\t56.0\n+12.0\t14.0\t43.0\t40.0\t45.4\t45.0\t48.0\t45.0\t49.0\n+8.0\t5.0\t75.0\t80.0\t77.3\t75.0\t81.0\t78.0\t71.0\n+5.0\t4.0\t87.0\t74.0\t62.3\t59.0\t65.0\t64.0\t61.0\n+12.0\t31.0\t48.0\t57.0\t45.5\t42.0\t48.0\t47.0\t57.0\n+1.0\t21.0\t48.0\t52.0\t47.8\t43.0\t51.0\t46.0\t57.0\n+7.0\t10.0\t74.0\t71.0\t75.1\t71.0\t77.0\t76.0\t95.0\n+3.0\t15.0\t54.0\t49.0\t53.6\t49.0\t58.0\t52.0\t70.0\n+4.0\t19.0\t77.0\t89.0\t59.0\t59.0\t63.0\t59.0\t61.0\n+10.0\t14.0\t66.0\t60.0\t60.2\t56.0\t64.0\t60.0\t78.0\n+4.0\t15.0\t59.0\t59.0\t58.3\t58.0\t61.0\t60.0\t40.0\n'
b
diff -r 000000000000 -r af2624d5ab32 test-data/feature_selection_result08
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/feature_selection_result08 Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,11 @@
+0 1
+143.762620712 -0.330941870584
+-88.5787166225 1.08055532812
+-82.8452345578 0.272541389247
+72.4951388149 -0.26868660527800003
+11.805182128 1.0360467096600001
+-63.9354970901 -0.101485840571
+126.32584079600001 -0.35999834017899995
+23.0341392692 0.5185404651359999
+67.6714937696 -0.115688051547
+47.39275848810001 -0.7850965413680001
b
diff -r 000000000000 -r af2624d5ab32 test-data/feature_selection_result09
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/feature_selection_result09 Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,11 @@
+0
+143.762620712
+-88.5787166225
+-82.8452345578
+72.4951388149
+11.805182128
+-63.9354970901
+126.32584079600001
+23.0341392692
+67.6714937696
+47.39275848810001
b
diff -r 000000000000 -r af2624d5ab32 test-data/feature_selection_result10
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/feature_selection_result10 Sat May 01 01:24:32 2021 +0000
b
b'@@ -0,0 +1,262 @@\n+month\tday\ttemp_2\ttemp_1\taverage\tforecast_noaa\tforecast_acc\tforecast_under\tfriend\tweek_Fri\tweek_Mon\tweek_Sat\tweek_Sun\tweek_Thurs\tweek_Tues\tweek_Wed\n+9.0\t19.0\t68.0\t69.0\t69.7\t65.0\t74.0\t71.0\t88.0\t0.0\t1.0\t0.0\t0.0\t0.0\t0.0\t0.0\n+4.0\t14.0\t60.0\t59.0\t58.1\t57.0\t63.0\t58.0\t66.0\t0.0\t0.0\t0.0\t0.0\t1.0\t0.0\t0.0\n+7.0\t30.0\t85.0\t88.0\t77.3\t75.0\t79.0\t77.0\t70.0\t0.0\t0.0\t1.0\t0.0\t0.0\t0.0\t0.0\n+5.0\t15.0\t82.0\t65.0\t64.7\t63.0\t69.0\t64.0\t58.0\t0.0\t0.0\t0.0\t1.0\t0.0\t0.0\t0.0\n+1.0\t18.0\t54.0\t50.0\t47.5\t44.0\t48.0\t49.0\t58.0\t0.0\t1.0\t0.0\t0.0\t0.0\t0.0\t0.0\n+1.0\t25.0\t48.0\t51.0\t48.2\t45.0\t51.0\t49.0\t63.0\t0.0\t1.0\t0.0\t0.0\t0.0\t0.0\t0.0\n+11.0\t25.0\t49.0\t52.0\t48.6\t45.0\t52.0\t47.0\t41.0\t1.0\t0.0\t0.0\t0.0\t0.0\t0.0\t0.0\n+7.0\t20.0\t73.0\t78.0\t76.7\t75.0\t78.0\t77.0\t66.0\t0.0\t0.0\t0.0\t0.0\t0.0\t0.0\t1.0\n+12.0\t17.0\t39.0\t35.0\t45.2\t43.0\t47.0\t46.0\t38.0\t0.0\t0.0\t1.0\t0.0\t0.0\t0.0\t0.0\n+12.0\t8.0\t42.0\t40.0\t46.1\t45.0\t51.0\t47.0\t36.0\t0.0\t0.0\t0.0\t0.0\t1.0\t0.0\t0.0\n+12.0\t28.0\t42.0\t47.0\t45.3\t41.0\t49.0\t44.0\t58.0\t0.0\t0.0\t0.0\t0.0\t0.0\t0.0\t1.0\n+7.0\t17.0\t76.0\t72.0\t76.3\t76.0\t78.0\t77.0\t88.0\t0.0\t0.0\t0.0\t1.0\t0.0\t0.0\t0.0\n+7.0\t7.0\t69.0\t76.0\t74.4\t73.0\t77.0\t74.0\t72.0\t0.0\t0.0\t0.0\t0.0\t1.0\t0.0\t0.0\n+12.0\t15.0\t40.0\t39.0\t45.3\t45.0\t49.0\t47.0\t46.0\t0.0\t0.0\t0.0\t0.0\t1.0\t0.0\t0.0\n+6.0\t27.0\t71.0\t78.0\t72.2\t70.0\t74.0\t72.0\t84.0\t0.0\t1.0\t0.0\t0.0\t0.0\t0.0\t0.0\n+5.0\t31.0\t64.0\t71.0\t67.3\t63.0\t72.0\t68.0\t85.0\t0.0\t0.0\t0.0\t0.0\t0.0\t1.0\t0.0\n+1.0\t20.0\t54.0\t48.0\t47.7\t44.0\t52.0\t49.0\t61.0\t0.0\t0.0\t0.0\t0.0\t0.0\t0.0\t1.0\n+8.0\t10.0\t73.0\t72.0\t77.0\t77.0\t78.0\t77.0\t68.0\t0.0\t0.0\t0.0\t0.0\t0.0\t0.0\t1.0\n+3.0\t23.0\t56.0\t57.0\t54.7\t50.0\t58.0\t55.0\t70.0\t0.0\t0.0\t0.0\t0.0\t0.0\t0.0\t1.0\n+12.0\t24.0\t45.0\t40.0\t45.1\t44.0\t47.0\t46.0\t39.0\t0.0\t0.0\t1.0\t0.0\t0.0\t0.0\t0.0\n+1.0\t19.0\t50.0\t54.0\t47.6\t47.0\t49.0\t48.0\t53.0\t0.0\t0.0\t0.0\t0.0\t0.0\t1.0\t0.0\n+11.0\t6.0\t65.0\t58.0\t53.2\t52.0\t57.0\t55.0\t71.0\t0.0\t0.0\t0.0\t1.0\t0.0\t0.0\t0.0\n+4.0\t17.0\t60.0\t68.0\t58.6\t58.0\t62.0\t59.0\t54.0\t0.0\t0.0\t0.0\t1.0\t0.0\t0.0\t0.0\n+10.0\t29.0\t60.0\t65.0\t55.3\t55.0\t59.0\t55.0\t65.0\t0.0\t0.0\t1.0\t0.0\t0.0\t0.0\t0.0\n+2.0\t1.0\t48.0\t47.0\t48.8\t46.0\t49.0\t49.0\t51.0\t0.0\t1.0\t0.0\t0.0\t0.0\t0.0\t0.0\n+12.0\t12.0\t44.0\t44.0\t45.6\t43.0\t50.0\t45.0\t42.0\t0.0\t1.0\t0.0\t0.0\t0.0\t0.0\t0.0\n+5.0\t30.0\t64.0\t64.0\t67.1\t64.0\t70.0\t66.0\t69.0\t0.0\t1.0\t0.0\t0.0\t0.0\t0.0\t0.0\n+10.0\t23.0\t59.0\t62.0\t57.1\t57.0\t58.0\t59.0\t67.0\t0.0\t0.0\t0.0\t1.0\t0.0\t0.0\t0.0\n+9.0\t30.0\t68.0\t66.0\t65.7\t64.0\t67.0\t65.0\t74.0\t1.0\t0.0\t0.0\t0.0\t0.0\t0.0\t0.0\n+9.0\t12.0\t77.0\t70.0\t71.8\t67.0\t73.0\t73.0\t90.0\t0.0\t1.0\t0.0\t0.0\t0.0\t0.0\t0.0\n+11.0\t2.0\t59.0\t57.0\t54.2\t54.0\t58.0\t55.0\t70.0\t0.0\t0.0\t0.0\t0.0\t0.0\t0.0\t1.0\n+11.0\t17.0\t55.0\t50.0\t50.5\t46.0\t51.0\t50.0\t57.0\t0.0\t0.0\t0.0\t0.0\t1.0\t0.0\t0.0\n+3.0\t3.0\t58.0\t55.0\t51.8\t49.0\t54.0\t50.0\t71.0\t0.0\t0.0\t0.0\t0.0\t1.0\t0.0\t0.0\n+11.0\t21.0\t57.0\t55.0\t49.5\t46.0\t51.0\t49.0\t67.0\t0.0\t1.0\t0.0\t0.0\t0.0\t0.0\t0.0\n+12.0\t27.0\t42.0\t42.0\t45.2\t41.0\t50.0\t47.0\t47.0\t0.0\t0.0\t0.0\t0.0\t0.0\t1.0\t0.0\n+4.0\t24.0\t64.0\t65.0\t60.1\t57.0\t61.0\t60.0\t41.0\t0.0\t0.0\t0.0\t1.0\t0.0\t0.0\t0.0\n+5.0\t20.0\t64.0\t63.0\t65.6\t63.0\t70.0\t64.0\t73.0\t1.0\t0.0\t0.0\t0.0\t0.0\t0.0\t0.0\n+1.0\t16.0\t49.0\t48.0\t47.3\t45.0\t52.0\t46.0\t28.0\t0.0\t0.0\t1.0\t0.0\t0.0\t0.0\t0.0\n+12.0\t7.0\t40.0\t42.0\t46.3\t44.0\t51.0\t46.0\t62.0\t0.0\t0.0\t0.0\t0.0\t0.0\t0.0\t1.0\n+1.0\t7.0\t44.0\t51.0\t46.2\t45.0\t49.0\t46.0\t38.0\t0.0\t0.0\t0.0\t0.0\t1.0\t0.0\t0.0\n+9.0\t24.0\t67.0\t64.0\t68.0\t65.0\t71.0\t66.0\t64.0\t0.0\t0.0\t1.0\t0.0\t0.0\t0.0\t0.0\n+8.0\t30.0\t79.0\t75.0\t74.6\t74.0\t76.0\t75.0\t63.0\t0.0\t0.0\t0.0\t0.0\t0.0\t1.0\t0.0\n+1.0\t11.0\t50.0\t52.0\t46.7\t42.0\t48.0\t48.0\t39.0\t0.0\t1.0\t0.0\t0.0\t0.0\t0.0\t0.0\n+6.0\t9.0\t85.0\t67.0\t68.6\t66.0\t73.0\t69.0\t80.0\t0.0\t0.0\t0.0\t0.0\t1.0\t0.0\t0.0\n+9.0\t22.0\t67.0\t68.0\t68.7\t65.0\t70.0\t69.0\t56.0\t0.0\t0.0\t0.0\t0.0\t1.0\t0.0\t0.0\n+3.0\t25.0\t53.0\t54.0\t55.0\t53.0\t57.0\t57.0\t42.0\t1.0\t0.0\t0.0\t0.0\t0.0\t0.0\t0.0\n+10.0\t24.0\t62.0\t62.0\t56.8\t52.0\t61.0\t57.0\t70.0\t0.0\t1.0\t0.0\t0.0\t0.0\t0.0\t0.0\n+7.0\t16.0\t77.0\t76.0\t76.1\t76.0\t78.0\t75.0\t61.0\t0.0\t0.0\t1.0\t0.0\t0.0\t0.0\t0.0\n+7.0\t1.0\t74.0\t73.0\t73.1\t71.0\t75.0\t72.0\t93.0\t1.0\t0.0\t0.0\t0.0\t0.0\t0.0\t0.0\n+11.0\t18.0\t50.0\t52.0\t50.3\t50.0\t53.0\t50.0\t35.0\t1.0\t0.0\t0.0\t0.0\t0.0\t0.0\t0.0\n+9.0\t3.0\t75.0\t70.0\t73.9\t71.0\t75.0\t73.0\t68.0\t0.0\t0.0\t1.0\t0.0\t0.0\t0.0\t0.0\n+8.0\t2.0\t73.0\t77.0\t77.4\t75.0\t80.0\t79.0\t62.0\t0.0\t0.0\t0.0\t0.0\t0.0\t1.0\t0.0\n+4.0\t5.0\t69.0\t60.0\t56.6\t52.0\t58.'..b'7.0\t58.8\t55.0\t59.0\t57.0\t39.0\t0.0\t1.0\t0.0\t0.0\t0.0\t0.0\t0.0\n+2.0\t25.0\t60.0\t59.0\t50.9\t49.0\t51.0\t49.0\t35.0\t0.0\t0.0\t0.0\t0.0\t1.0\t0.0\t0.0\n+1.0\t2.0\t44.0\t45.0\t45.7\t41.0\t50.0\t44.0\t61.0\t0.0\t0.0\t1.0\t0.0\t0.0\t0.0\t0.0\n+2.0\t21.0\t51.0\t53.0\t50.5\t49.0\t54.0\t52.0\t46.0\t0.0\t0.0\t0.0\t1.0\t0.0\t0.0\t0.0\n+3.0\t24.0\t57.0\t53.0\t54.9\t54.0\t56.0\t56.0\t72.0\t0.0\t0.0\t0.0\t0.0\t1.0\t0.0\t0.0\n+7.0\t27.0\t85.0\t79.0\t77.3\t73.0\t78.0\t79.0\t79.0\t0.0\t0.0\t0.0\t0.0\t0.0\t0.0\t1.0\n+2.0\t4.0\t51.0\t49.0\t49.0\t44.0\t54.0\t51.0\t44.0\t0.0\t0.0\t0.0\t0.0\t1.0\t0.0\t0.0\n+10.0\t7.0\t66.0\t63.0\t62.9\t62.0\t67.0\t64.0\t78.0\t1.0\t0.0\t0.0\t0.0\t0.0\t0.0\t0.0\n+4.0\t4.0\t63.0\t69.0\t56.5\t54.0\t59.0\t56.0\t45.0\t0.0\t1.0\t0.0\t0.0\t0.0\t0.0\t0.0\n+2.0\t24.0\t51.0\t60.0\t50.8\t47.0\t53.0\t50.0\t46.0\t0.0\t0.0\t0.0\t0.0\t0.0\t0.0\t1.0\n+10.0\t8.0\t63.0\t64.0\t62.5\t60.0\t65.0\t61.0\t73.0\t0.0\t0.0\t1.0\t0.0\t0.0\t0.0\t0.0\n+9.0\t15.0\t75.0\t79.0\t71.0\t66.0\t76.0\t69.0\t64.0\t0.0\t0.0\t0.0\t0.0\t1.0\t0.0\t0.0\n+1.0\t14.0\t49.0\t55.0\t47.0\t43.0\t47.0\t46.0\t58.0\t0.0\t0.0\t0.0\t0.0\t1.0\t0.0\t0.0\n+4.0\t1.0\t68.0\t73.0\t56.0\t54.0\t59.0\t55.0\t41.0\t1.0\t0.0\t0.0\t0.0\t0.0\t0.0\t0.0\n+10.0\t17.0\t62.0\t60.0\t59.1\t57.0\t63.0\t59.0\t62.0\t0.0\t1.0\t0.0\t0.0\t0.0\t0.0\t0.0\n+6.0\t18.0\t71.0\t67.0\t70.2\t67.0\t75.0\t69.0\t77.0\t0.0\t0.0\t1.0\t0.0\t0.0\t0.0\t0.0\n+12.0\t26.0\t41.0\t42.0\t45.2\t45.0\t48.0\t46.0\t58.0\t0.0\t1.0\t0.0\t0.0\t0.0\t0.0\t0.0\n+5.0\t17.0\t57.0\t60.0\t65.0\t62.0\t65.0\t65.0\t55.0\t0.0\t0.0\t0.0\t0.0\t0.0\t1.0\t0.0\n+11.0\t20.0\t55.0\t57.0\t49.8\t47.0\t54.0\t48.0\t30.0\t0.0\t0.0\t0.0\t1.0\t0.0\t0.0\t0.0\n+12.0\t18.0\t35.0\t35.0\t45.2\t44.0\t46.0\t46.0\t36.0\t0.0\t0.0\t0.0\t1.0\t0.0\t0.0\t0.0\n+9.0\t17.0\t71.0\t75.0\t70.3\t66.0\t73.0\t70.0\t84.0\t0.0\t0.0\t1.0\t0.0\t0.0\t0.0\t0.0\n+2.0\t26.0\t59.0\t61.0\t51.1\t48.0\t56.0\t53.0\t65.0\t1.0\t0.0\t0.0\t0.0\t0.0\t0.0\t0.0\n+2.0\t22.0\t53.0\t51.0\t50.6\t46.0\t51.0\t50.0\t59.0\t0.0\t1.0\t0.0\t0.0\t0.0\t0.0\t0.0\n+6.0\t26.0\t69.0\t71.0\t71.9\t67.0\t74.0\t72.0\t70.0\t0.0\t0.0\t0.0\t1.0\t0.0\t0.0\t0.0\n+7.0\t11.0\t71.0\t74.0\t75.3\t74.0\t79.0\t75.0\t71.0\t0.0\t1.0\t0.0\t0.0\t0.0\t0.0\t0.0\n+12.0\t30.0\t48.0\t48.0\t45.4\t44.0\t46.0\t44.0\t42.0\t1.0\t0.0\t0.0\t0.0\t0.0\t0.0\t0.0\n+7.0\t9.0\t68.0\t74.0\t74.9\t70.0\t79.0\t76.0\t60.0\t0.0\t0.0\t1.0\t0.0\t0.0\t0.0\t0.0\n+6.0\t21.0\t70.0\t76.0\t70.8\t68.0\t75.0\t71.0\t57.0\t0.0\t0.0\t0.0\t0.0\t0.0\t1.0\t0.0\n+3.0\t2.0\t54.0\t58.0\t51.6\t47.0\t54.0\t52.0\t37.0\t0.0\t0.0\t0.0\t0.0\t0.0\t0.0\t1.0\n+2.0\t20.0\t53.0\t51.0\t50.4\t48.0\t55.0\t51.0\t43.0\t0.0\t0.0\t1.0\t0.0\t0.0\t0.0\t0.0\n+9.0\t9.0\t67.0\t72.0\t72.6\t68.0\t77.0\t71.0\t78.0\t1.0\t0.0\t0.0\t0.0\t0.0\t0.0\t0.0\n+9.0\t26.0\t67.0\t76.0\t67.2\t64.0\t69.0\t69.0\t74.0\t0.0\t1.0\t0.0\t0.0\t0.0\t0.0\t0.0\n+1.0\t22.0\t52.0\t52.0\t47.9\t47.0\t48.0\t48.0\t60.0\t1.0\t0.0\t0.0\t0.0\t0.0\t0.0\t0.0\n+11.0\t27.0\t52.0\t53.0\t48.2\t48.0\t49.0\t49.0\t53.0\t0.0\t0.0\t0.0\t1.0\t0.0\t0.0\t0.0\n+6.0\t12.0\t67.0\t65.0\t69.1\t65.0\t73.0\t70.0\t83.0\t0.0\t0.0\t0.0\t1.0\t0.0\t0.0\t0.0\n+10.0\t20.0\t61.0\t58.0\t58.1\t58.0\t59.0\t58.0\t43.0\t0.0\t0.0\t0.0\t0.0\t1.0\t0.0\t0.0\n+7.0\t13.0\t74.0\t77.0\t75.6\t74.0\t78.0\t76.0\t56.0\t0.0\t0.0\t0.0\t0.0\t0.0\t0.0\t1.0\n+11.0\t7.0\t58.0\t61.0\t52.9\t51.0\t56.0\t51.0\t35.0\t0.0\t1.0\t0.0\t0.0\t0.0\t0.0\t0.0\n+10.0\t1.0\t66.0\t67.0\t65.3\t64.0\t70.0\t64.0\t54.0\t0.0\t0.0\t1.0\t0.0\t0.0\t0.0\t0.0\n+11.0\t22.0\t55.0\t54.0\t49.3\t46.0\t54.0\t49.0\t58.0\t0.0\t0.0\t0.0\t0.0\t0.0\t1.0\t0.0\n+6.0\t1.0\t71.0\t79.0\t67.4\t65.0\t69.0\t66.0\t58.0\t0.0\t0.0\t0.0\t0.0\t0.0\t0.0\t1.0\n+5.0\t13.0\t81.0\t77.0\t64.3\t63.0\t67.0\t66.0\t67.0\t1.0\t0.0\t0.0\t0.0\t0.0\t0.0\t0.0\n+6.0\t3.0\t75.0\t71.0\t67.7\t64.0\t71.0\t66.0\t55.0\t1.0\t0.0\t0.0\t0.0\t0.0\t0.0\t0.0\n+4.0\t12.0\t59.0\t58.0\t57.7\t54.0\t59.0\t57.0\t61.0\t0.0\t0.0\t0.0\t0.0\t0.0\t1.0\t0.0\n+3.0\t31.0\t64.0\t68.0\t55.9\t55.0\t59.0\t56.0\t56.0\t0.0\t0.0\t0.0\t0.0\t1.0\t0.0\t0.0\n+12.0\t14.0\t43.0\t40.0\t45.4\t45.0\t48.0\t45.0\t49.0\t0.0\t0.0\t0.0\t0.0\t0.0\t0.0\t1.0\n+8.0\t5.0\t75.0\t80.0\t77.3\t75.0\t81.0\t78.0\t71.0\t1.0\t0.0\t0.0\t0.0\t0.0\t0.0\t0.0\n+5.0\t4.0\t87.0\t74.0\t62.3\t59.0\t65.0\t64.0\t61.0\t0.0\t0.0\t0.0\t0.0\t0.0\t0.0\t1.0\n+12.0\t31.0\t48.0\t57.0\t45.5\t42.0\t48.0\t47.0\t57.0\t0.0\t0.0\t1.0\t0.0\t0.0\t0.0\t0.0\n+1.0\t21.0\t48.0\t52.0\t47.8\t43.0\t51.0\t46.0\t57.0\t0.0\t0.0\t0.0\t0.0\t1.0\t0.0\t0.0\n+7.0\t10.0\t74.0\t71.0\t75.1\t71.0\t77.0\t76.0\t95.0\t0.0\t0.0\t0.0\t1.0\t0.0\t0.0\t0.0\n+3.0\t15.0\t54.0\t49.0\t53.6\t49.0\t58.0\t52.0\t70.0\t0.0\t0.0\t0.0\t0.0\t0.0\t1.0\t0.0\n+4.0\t19.0\t77.0\t89.0\t59.0\t59.0\t63.0\t59.0\t61.0\t0.0\t0.0\t0.0\t0.0\t0.0\t1.0\t0.0\n+10.0\t14.0\t66.0\t60.0\t60.2\t56.0\t64.0\t60.0\t78.0\t1.0\t0.0\t0.0\t0.0\t0.0\t0.0\t0.0\n+4.0\t15.0\t59.0\t59.0\t58.3\t58.0\t61.0\t60.0\t40.0\t1.0\t0.0\t0.0\t0.0\t0.0\t0.0\t0.0\n'
b
diff -r 000000000000 -r af2624d5ab32 test-data/feature_selection_result11
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/feature_selection_result11 Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,51 @@
+Race AIDS Total
+4.0 2555.0 14443382.0
+4.0 55300.0 14704293.0
+4.0 82334.0 16641977.0
+4.0 38006.0 13888285.0
+4.0 16068.0 21845911.0
+2.0 2489.0 2367256.0
+2.0 34204.0 2410019.0
+2.0 51776.0 2727604.0
+2.0 23896.0 2276276.0
+2.0 10169.0 3580523.0
+3.0 1363.0 1542563.0
+3.0 20712.0 1570428.0
+3.0 27200.0 1777374.0
+3.0 11251.0 1483278.0
+3.0 4674.0 2333158.0
+1.0 38.0 699627.0
+1.0 731.0 712265.0
+1.0 1162.0 806125.0
+1.0 560.0 672738.0
+1.0 258.0 1058200.0
+0.0 26.0 169115.0
+0.0 390.0 172170.0
+0.0 417.0 194858.0
+0.0 140.0 162616.0
+0.0 48.0 255790.0
+4.0 490.0 14999423.0
+4.0 4788.0 15270378.0
+4.0 5377.0 17282659.0
+4.0 2152.0 14422956.0
+4.0 1790.0 22686934.0
+2.0 1490.0 2458391.0
+2.0 12280.0 2502800.0
+2.0 15713.0 2832611.0
+2.0 5788.0 2363908.0
+2.0 2534.0 3718366.0
+3.0 493.0 1601948.0
+3.0 4660.0 1630887.0
+3.0 5153.0 1845800.0
+3.0 1944.0 1540381.0
+3.0 910.0 2422980.0
+1.0 6.0 726561.0
+1.0 83.0 739686.0
+1.0 106.0 837159.0
+1.0 69.0 698637.0
+1.0 55.0 1098938.0
+0.0 3.0 175626.0
+0.0 78.0 178798.0
+0.0 77.0 202360.0
+0.0 31.0 168876.0
+0.0 14.0 265637.0
b
diff -r 000000000000 -r af2624d5ab32 test-data/feature_selection_result12
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/feature_selection_result12 Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,11 @@
+0 1
+143.762620712 -0.330941870584
+-88.5787166225 1.08055532812
+-82.8452345578 0.272541389247
+72.4951388149 -0.26868660527800003
+11.805182128 1.0360467096600001
+-63.9354970901 -0.101485840571
+126.32584079600001 -0.35999834017899995
+23.0341392692 0.5185404651359999
+67.6714937696 -0.115688051547
+47.39275848810001 -0.7850965413680001
b
diff -r 000000000000 -r af2624d5ab32 test-data/feature_selection_result13
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/feature_selection_result13 Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,262 @@
+temp_1 average forecast_noaa friend
+69.0 69.7 65.0 88.0
+59.0 58.1 57.0 66.0
+88.0 77.3 75.0 70.0
+65.0 64.7 63.0 58.0
+50.0 47.5 44.0 58.0
+51.0 48.2 45.0 63.0
+52.0 48.6 45.0 41.0
+78.0 76.7 75.0 66.0
+35.0 45.2 43.0 38.0
+40.0 46.1 45.0 36.0
+47.0 45.3 41.0 58.0
+72.0 76.3 76.0 88.0
+76.0 74.4 73.0 72.0
+39.0 45.3 45.0 46.0
+78.0 72.2 70.0 84.0
+71.0 67.3 63.0 85.0
+48.0 47.7 44.0 61.0
+72.0 77.0 77.0 68.0
+57.0 54.7 50.0 70.0
+40.0 45.1 44.0 39.0
+54.0 47.6 47.0 53.0
+58.0 53.2 52.0 71.0
+68.0 58.6 58.0 54.0
+65.0 55.3 55.0 65.0
+47.0 48.8 46.0 51.0
+44.0 45.6 43.0 42.0
+64.0 67.1 64.0 69.0
+62.0 57.1 57.0 67.0
+66.0 65.7 64.0 74.0
+70.0 71.8 67.0 90.0
+57.0 54.2 54.0 70.0
+50.0 50.5 46.0 57.0
+55.0 51.8 49.0 71.0
+55.0 49.5 46.0 67.0
+42.0 45.2 41.0 47.0
+65.0 60.1 57.0 41.0
+63.0 65.6 63.0 73.0
+48.0 47.3 45.0 28.0
+42.0 46.3 44.0 62.0
+51.0 46.2 45.0 38.0
+64.0 68.0 65.0 64.0
+75.0 74.6 74.0 63.0
+52.0 46.7 42.0 39.0
+67.0 68.6 66.0 80.0
+68.0 68.7 65.0 56.0
+54.0 55.0 53.0 42.0
+62.0 56.8 52.0 70.0
+76.0 76.1 76.0 61.0
+73.0 73.1 71.0 93.0
+52.0 50.3 50.0 35.0
+70.0 73.9 71.0 68.0
+77.0 77.4 75.0 62.0
+60.0 56.6 52.0 72.0
+52.0 53.3 50.0 54.0
+79.0 75.0 71.0 85.0
+76.0 57.2 53.0 74.0
+66.0 66.5 64.0 85.0
+57.0 61.8 58.0 62.0
+66.0 57.4 57.0 60.0
+61.0 58.4 58.0 41.0
+55.0 53.1 52.0 65.0
+48.0 48.1 46.0 54.0
+49.0 49.2 46.0 63.0
+65.0 66.7 64.0 73.0
+60.0 62.5 58.0 56.0
+56.0 53.0 53.0 36.0
+59.0 57.4 56.0 44.0
+44.0 45.7 41.0 35.0
+82.0 63.2 62.0 83.0
+64.0 67.0 65.0 76.0
+43.0 45.5 41.0 46.0
+64.0 55.7 51.0 57.0
+63.0 52.7 49.0 49.0
+70.0 70.6 67.0 79.0
+71.0 52.4 48.0 42.0
+76.0 73.5 69.0 85.0
+68.0 62.1 58.0 55.0
+39.0 45.3 44.0 39.0
+71.0 70.7 70.0 52.0
+69.0 71.7 68.0 89.0
+74.0 71.5 71.0 82.0
+81.0 64.1 62.0 81.0
+51.0 49.3 49.0 34.0
+45.0 46.8 44.0 61.0
+87.0 76.8 73.0 73.0
+71.0 73.8 71.0 86.0
+55.0 60.3 56.0 77.0
+80.0 76.9 72.0 81.0
+67.0 69.0 65.0 76.0
+61.0 61.4 60.0 78.0
+46.0 46.6 43.0 65.0
+39.0 45.1 42.0 51.0
+67.0 68.3 67.0 61.0
+52.0 47.8 43.0 50.0
+67.0 69.8 68.0 87.0
+75.0 71.2 67.0 77.0
+68.0 73.3 73.0 79.0
+92.0 68.2 65.0 71.0
+67.0 72.8 69.0 56.0
+44.0 45.8 43.0 56.0
+61.0 61.0 56.0 73.0
+65.0 53.4 49.0 41.0
+68.0 73.0 72.0 70.0
+87.0 62.1 62.0 69.0
+117.0 54.8 51.0 62.0
+80.0 76.4 75.0 66.0
+57.0 51.0 47.0 46.0
+67.0 63.6 61.0 68.0
+58.0 54.0 51.0 56.0
+65.0 56.2 53.0 41.0
+52.0 48.6 45.0 47.0
+59.0 55.3 52.0 39.0
+57.0 53.9 53.0 35.0
+81.0 59.2 56.0 66.0
+75.0 77.1 76.0 75.0
+76.0 77.4 76.0 95.0
+57.0 64.8 61.0 53.0
+69.0 74.2 72.0 86.0
+77.0 66.8 66.0 64.0
+55.0 49.9 47.0 55.0
+49.0 46.8 45.0 53.0
+54.0 52.7 48.0 57.0
+55.0 51.2 49.0 42.0
+56.0 55.6 53.0 45.0
+68.0 74.6 72.0 77.0
+54.0 53.4 49.0 44.0
+67.0 69.0 69.0 87.0
+49.0 46.9 45.0 33.0
+49.0 49.1 47.0 45.0
+56.0 48.5 48.0 49.0
+73.0 71.0 66.0 78.0
+66.0 66.4 65.0 60.0
+69.0 66.5 66.0 62.0
+82.0 64.5 64.0 65.0
+90.0 76.7 75.0 65.0
+51.0 50.7 49.0 43.0
+77.0 57.1 57.0 41.0
+60.0 61.4 58.0 58.0
+74.0 72.8 71.0 87.0
+85.0 77.2 73.0 74.0
+68.0 62.8 61.0 64.0
+56.0 49.5 46.0 37.0
+71.0 56.2 55.0 45.0
+62.0 59.5 57.0 40.0
+83.0 77.3 76.0 76.0
+64.0 65.4 62.0 56.0
+56.0 48.4 45.0 54.0
+41.0 45.1 42.0 31.0
+65.0 66.2 66.0 67.0
+65.0 53.7 49.0 38.0
+40.0 46.0 46.0 41.0
+45.0 45.6 43.0 29.0
+52.0 48.4 48.0 58.0
+63.0 51.7 50.0 63.0
+52.0 47.6 47.0 44.0
+60.0 57.9 55.0 77.0
+81.0 75.7 73.0 89.0
+75.0 75.8 74.0 77.0
+59.0 51.4 48.0 64.0
+73.0 77.1 77.0 94.0
+75.0 77.3 73.0 66.0
+60.0 58.5 56.0 59.0
+75.0 71.3 68.0 56.0
+59.0 57.6 56.0 40.0
+53.0 49.1 47.0 56.0
+79.0 77.2 76.0 60.0
+57.0 52.1 49.0 46.0
+75.0 67.6 64.0 77.0
+71.0 69.4 67.0 81.0
+53.0 50.2 50.0 42.0
+46.0 48.8 48.0 56.0
+81.0 76.9 72.0 70.0
+49.0 48.9 47.0 29.0
+57.0 48.4 44.0 34.0
+60.0 58.8 54.0 53.0
+67.0 73.7 72.0 64.0
+61.0 64.1 62.0 60.0
+66.0 69.5 66.0 85.0
+64.0 51.9 50.0 55.0
+66.0 65.7 62.0 49.0
+64.0 52.2 52.0 49.0
+71.0 65.2 61.0 56.0
+75.0 63.8 62.0 60.0
+48.0 46.4 46.0 47.0
+53.0 52.5 48.0 70.0
+49.0 47.1 46.0 65.0
+85.0 68.5 67.0 81.0
+62.0 49.4 48.0 30.0
+50.0 47.0 42.0 58.0
+58.0 55.9 51.0 39.0
+72.0 77.2 74.0 95.0
+55.0 50.7 50.0 34.0
+74.0 72.3 70.0 91.0
+85.0 77.3 77.0 77.0
+73.0 77.3 77.0 93.0
+52.0 47.4 44.0 39.0
+67.0 67.6 64.0 62.0
+45.0 45.1 45.0 35.0
+46.0 47.2 46.0 41.0
+66.0 60.6 60.0 57.0
+71.0 77.0 75.0 86.0
+70.0 69.3 66.0 79.0
+58.0 49.9 46.0 53.0
+72.0 77.1 76.0 65.0
+74.0 75.4 74.0 71.0
+65.0 64.5 63.0 49.0
+77.0 58.8 55.0 39.0
+59.0 50.9 49.0 35.0
+45.0 45.7 41.0 61.0
+53.0 50.5 49.0 46.0
+53.0 54.9 54.0 72.0
+79.0 77.3 73.0 79.0
+49.0 49.0 44.0 44.0
+63.0 62.9 62.0 78.0
+69.0 56.5 54.0 45.0
+60.0 50.8 47.0 46.0
+64.0 62.5 60.0 73.0
+79.0 71.0 66.0 64.0
+55.0 47.0 43.0 58.0
+73.0 56.0 54.0 41.0
+60.0 59.1 57.0 62.0
+67.0 70.2 67.0 77.0
+42.0 45.2 45.0 58.0
+60.0 65.0 62.0 55.0
+57.0 49.8 47.0 30.0
+35.0 45.2 44.0 36.0
+75.0 70.3 66.0 84.0
+61.0 51.1 48.0 65.0
+51.0 50.6 46.0 59.0
+71.0 71.9 67.0 70.0
+74.0 75.3 74.0 71.0
+48.0 45.4 44.0 42.0
+74.0 74.9 70.0 60.0
+76.0 70.8 68.0 57.0
+58.0 51.6 47.0 37.0
+51.0 50.4 48.0 43.0
+72.0 72.6 68.0 78.0
+76.0 67.2 64.0 74.0
+52.0 47.9 47.0 60.0
+53.0 48.2 48.0 53.0
+65.0 69.1 65.0 83.0
+58.0 58.1 58.0 43.0
+77.0 75.6 74.0 56.0
+61.0 52.9 51.0 35.0
+67.0 65.3 64.0 54.0
+54.0 49.3 46.0 58.0
+79.0 67.4 65.0 58.0
+77.0 64.3 63.0 67.0
+71.0 67.7 64.0 55.0
+58.0 57.7 54.0 61.0
+68.0 55.9 55.0 56.0
+40.0 45.4 45.0 49.0
+80.0 77.3 75.0 71.0
+74.0 62.3 59.0 61.0
+57.0 45.5 42.0 57.0
+52.0 47.8 43.0 57.0
+71.0 75.1 71.0 95.0
+49.0 53.6 49.0 70.0
+89.0 59.0 59.0 61.0
+60.0 60.2 56.0 78.0
+59.0 58.3 58.0 40.0
b
diff -r 000000000000 -r af2624d5ab32 test-data/final_estimator.zip
b
Binary file test-data/final_estimator.zip has changed
b
diff -r 000000000000 -r af2624d5ab32 test-data/fitted_keras_g_regressor01.zip
b
Binary file test-data/fitted_keras_g_regressor01.zip has changed
b
diff -r 000000000000 -r af2624d5ab32 test-data/fitted_model_eval01.tabular
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/fitted_model_eval01.tabular Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,2 @@
+score
+0.8277511130733235
b
diff -r 000000000000 -r af2624d5ab32 test-data/friedman1.txt
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/friedman1.txt Sat May 01 01:24:32 2021 +0000
b
b'@@ -0,0 +1,101 @@\n+0\t1\t2\t3\t4\t5\t6\t7\t8\t9\t0\n+0.54340494179097\t0.27836938509380\t0.42451759074913\t0.84477613231990\t0.00471885619097\t0.12156912078311\t0.67074908472678\t0.82585275510505\t0.13670658968495\t0.57509332942725\t13.16065039739808\n+0.89132195431226\t0.20920212211719\t0.18532821955008\t0.10837689046426\t0.21969749262499\t0.97862378470737\t0.81168314908932\t0.17194101273259\t0.81622474872584\t0.27407374704170\t9.69129813765850\n+0.43170418366312\t0.94002981962237\t0.81764937877673\t0.33611195012090\t0.17541045374234\t0.37283204628992\t0.00568850735257\t0.25242635344484\t0.79566250847329\t0.01525497124634\t15.82161996182878\n+0.59884337692849\t0.60380453904285\t0.10514768541206\t0.38194344494311\t0.03647605659257\t0.89041156344208\t0.98092085701231\t0.05994198881804\t0.89054594472850\t0.57690149940003\t16.18933274618261\n+0.74247968909798\t0.63018393647538\t0.58184219239878\t0.02043913202692\t0.21002657767286\t0.54468487817865\t0.76911517110565\t0.25069522913840\t0.28589569040686\t0.85239508784131\t11.33767760089345\n+0.97500649360659\t0.88485329349111\t0.35950784393690\t0.59885894587575\t0.35479561165730\t0.34019021537065\t0.17808098950580\t0.23769420862405\t0.04486228246078\t0.50543142963579\t12.33714282417860\n+0.37625245429736\t0.59280540097589\t0.62994187558750\t0.14260031444628\t0.93384129946642\t0.94637988080910\t0.60229665773087\t0.38776628032663\t0.36318800410935\t0.20434527686864\t12.88055071230146\n+0.27676506139634\t0.24653588120355\t0.17360800174021\t0.96660969448732\t0.95701260035280\t0.59797368432892\t0.73130075305992\t0.34038522283744\t0.09205560337724\t0.46349801893715\t18.70900393660417\n+0.50869889323819\t0.08846017300289\t0.52803522331805\t0.99215803651053\t0.39503593175823\t0.33559644171857\t0.80545053732928\t0.75434899458235\t0.31306644158851\t0.63403668296228\t13.32147913155627\n+0.54040457530072\t0.29679375088001\t0.11078790118245\t0.31264029787574\t0.45697913004927\t0.65894007022620\t0.25425751781772\t0.64110125870070\t0.20012360721840\t0.65762480552898\t13.26925386310889\n+0.77828921544985\t0.77959839861075\t0.61032815320939\t0.30900034852440\t0.69773490751296\t0.85961829572907\t0.62532375775681\t0.98240782960955\t0.97650012701586\t0.16669413119886\t16.26499517078734\n+0.02317813647840\t0.16074454850708\t0.92349682525909\t0.95354984987953\t0.21097841871845\t0.36052525081461\t0.54937526162767\t0.27183084917697\t0.46060162107485\t0.69616156482339\t14.29442731349912\n+0.50035589667487\t0.71607099056434\t0.52595593622978\t0.00139902311904\t0.39470028668984\t0.49216696990115\t0.40288033137914\t0.35429830010632\t0.50061431944295\t0.44517662883114\t11.02623719229622\n+0.09043278819644\t0.27356292002744\t0.94347709774273\t0.02654464133394\t0.03999868964065\t0.28314035971982\t0.58234417021677\t0.99089280292483\t0.99264223740297\t0.99311737248104\t5.17529680436277\n+0.11004833096656\t0.66448144596394\t0.52398683448831\t0.17314990980873\t0.94296024491503\t0.24186008597625\t0.99893226884321\t0.58269381514990\t0.18327900063058\t0.38684542191779\t8.73494610704017\n+0.18967352891215\t0.41077067302531\t0.59468006890171\t0.71658609312834\t0.48689148236912\t0.30958981776670\t0.57744137282785\t0.44170781956874\t0.35967810260054\t0.32133193200881\t12.20292470261234\n+0.20820724019602\t0.45125862406183\t0.49184291026405\t0.89907631479371\t0.72936046102944\t0.77008977291970\t0.37543924756199\t0.34373953523538\t0.65503520599932\t0.71103799321050\t15.54791473458014\n+0.11353757521868\t0.13302868937358\t0.45603905760612\t0.15973623015851\t0.96164190377465\t0.83761574486181\t0.52016068703792\t0.21827225772815\t0.13491872253240\t0.97907034548387\t6.91854352707436\n+0.70704349568914\t0.85997555694566\t0.38717262782864\t0.25083401983172\t0.29943801894470\t0.85689552840502\t0.47298399056822\t0.66327704701613\t0.80572860743679\t0.25298050464972\t13.68961636548041\n+0.07957343897032\t0.73276060501572\t0.96139747750361\t0.95380473416766\t0.49049905188390\t0.63219206443276\t0.73299501983799\t0.90240950324797\t0.16224691874820\t0.40588132236756\t18.06987664088426\n+0.41709073558366\t0.69559102829207\t0.42484723792483\t0.85811422605143\t0.84693247960942\t0.07019911390869\t0.30175241348415\t0.97962368103017\t0.03562699655303\t0.49239264699858\t20.83271160'..b'97\t0.94227191496291\t7.35500227276053\n+0.65747075744411\t0.19562874880188\t0.52567876074104\t0.31080910409256\t0.55534839433138\t0.53552980736766\t0.46511292889839\t0.76786459433331\t0.88694697168655\t0.82980936841814\t9.82967962816587\n+0.95884307895640\t0.91106399609686\t0.11967478384416\t0.11446859495951\t0.99696500632827\t0.04000832595811\t0.85956374451868\t0.46550503372369\t0.28899832738919\t0.73326395780051\t12.89083214454110\n+0.47219244963378\t0.36603378202459\t0.07374308587639\t0.82120530233350\t0.48801691478932\t0.75706206486561\t0.37107807260931\t0.26950482476264\t0.73459463542670\t0.84656452629874\t19.45300037464767\n+0.77315971269645\t0.09726311997083\t0.31288480540422\t0.05429737124805\t0.99641786449707\t0.17769873435229\t0.37123100482185\t0.35893259209644\t0.23918094189868\t0.19412444639857\t8.56586545020601\n+0.72215686978947\t0.99634986239999\t0.65788106155873\t0.18964066816522\t0.79605001337872\t0.63314883404405\t0.05997465943644\t0.45123696414114\t0.39815557985267\t0.45748771121895\t14.08990318454368\n+0.17329540858703\t0.55516022466921\t0.67557570281697\t0.82642784063039\t0.75397534640948\t0.03806626488278\t0.79511365190160\t0.65393180708085\t0.60499330235987\t0.00079912648847\t15.62730799178629\n+0.01311478146364\t0.14710484933761\t0.26562391867981\t0.06049450827852\t0.25786563084967\t0.22906133301836\t0.82408377109698\t0.20185448655187\t0.88109232562870\t0.21436450568576\t3.05352492776642\n+0.09124750057287\t0.74580579352311\t0.50434003505263\t0.58620204328337\t0.36415611319488\t0.55325395954112\t0.81284469910627\t0.14007325741439\t0.26762510211970\t0.73954855025783\t9.80487335854274\n+0.27379607811177\t0.59686146440691\t0.33862246805035\t0.07160379461501\t0.49859687569685\t0.71449130961071\t0.99063426277316\t0.30616421419444\t0.43181899369393\t0.54818355986588\t8.64124014879148\n+0.59227891215502\t0.10793438223332\t0.72180302378353\t0.28781493382596\t0.71019549092984\t0.26491733998837\t0.32929177720525\t0.15393928318286\t0.30573627751887\t0.76759356843621\t9.40791896736063\n+0.57384804400007\t0.97171023509445\t0.69184936806689\t0.49136225796250\t0.41895381309770\t0.95287842205705\t0.14422252170336\t0.52121030585434\t0.88914945419428\t0.72431615291271\t17.58115736412586\n+0.65242730280799\t0.57321087719437\t0.18508275660220\t0.61388086886624\t0.07695021292316\t0.66809451701064\t0.23147976471743\t0.22373847184444\t0.07931564343309\t0.52905314066137\t17.73348320503098\n+0.29220722494692\t0.53474433027316\t0.49663946753281\t0.43871374689137\t0.40966714178368\t0.26061101484449\t0.08937483777811\t0.80668663205374\t0.15657531573242\t0.91392614525783\t11.14983699152543\n+0.44666536992173\t0.44940086096851\t0.08179437299051\t0.69649341618554\t0.20657215375014\t0.09570310018075\t0.72201072227904\t0.39365518629943\t0.59111307585184\t0.51276461818493\t17.39215032093714\n+0.02479244084719\t0.76279461390933\t0.26576180603379\t0.97882684017667\t0.94868600684785\t0.72566997348949\t0.72550502055146\t0.05082479081617\t0.59406611432528\t0.71712665638338\t16.22282316439366\n+0.04187295085350\t0.48584833343640\t0.98682425894389\t0.04782633490074\t0.57885197413725\t0.07155939791944\t0.28014174429831\t0.70182182600545\t0.16232193959805\t0.49228648720155\t8.75116795261410\n+0.95454571129748\t0.58935516236524\t0.60662682021074\t0.86798654403851\t0.93654793684458\t0.14416045993162\t0.27700719020078\t0.12532193725529\t0.88472078815751\t0.82673777046447\t23.39743606740882\n+0.99535888109278\t0.81386961579101\t0.11914570059659\t0.93153678351429\t0.00698669273111\t0.53839624945247\t0.78250154219744\t0.88886925172791\t0.30537562757152\t0.64467750393558\t17.86973520845505\n+0.12491934664886\t0.60858430036276\t0.18949843940085\t0.43906581937979\t0.97041260302138\t0.06809275523457\t0.20517286226115\t0.50757194094102\t0.14050011761811\t0.93373835572665\t13.53666671909896\n+0.60654543170675\t0.46153152916887\t0.80150217090955\t0.69870731207645\t0.74455734291899\t0.32516377858166\t0.17845078715926\t0.01435150262556\t0.10704972728076\t0.27305170093104\t20.23185859895480\n+0.61652177543964\t0.94757922376409\t0.90647236884292\t0.96509402821359\t0.33762107364120\t0.65640308766918\t0.29145578099293\t0.15086922353098\t0.03693206346401\t0.59796374251126\t24.29559045754858\n'
b
diff -r 000000000000 -r af2624d5ab32 test-data/friedman2.txt
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/friedman2.txt Sat May 01 01:24:32 2021 +0000
b
b'@@ -0,0 +1,101 @@\n+0\t1\t2\t3\t0\n+54.34049417909655\t580.41577804498036\t0.42451759074913\t9.44776132319904\t252.31753213122840\n+0.47188561909726\t324.26244765650472\t0.67074908472678\t9.25852755105048\t217.49891878908315\n+13.67065896849530\t1065.15237514930618\t0.89132195431226\t3.09202122117190\t949.49181230866191\n+18.53282195500751\t302.71124845232475\t0.21969749262499\t10.78623784707370\t69.03858906148859\n+81.16831490893233\t406.55138981837536\t0.81622474872584\t3.74073747041699\t341.61946168251893\n+43.17041836631217\t1661.32290939370682\t0.81764937877673\t4.36111950120899\t1359.06532936898702\n+17.54104537423366\t734.73264332017334\t0.00568850735257\t3.52426353444840\t18.03201411041058\n+79.56625084732873\t150.58465705466725\t0.59884337692849\t7.03804539042854\t120.25989238218870\n+10.51476854120563\t749.61728091801888\t0.03647605659257\t9.90411563442076\t29.29500204274906\n+98.09208570123114\t223.58662823399155\t0.89054594472850\t6.76901499400033\t221.96451266962563\n+74.24796890979773\t1155.14994326908754\t0.58184219239878\t1.20439132026923\t676.20288008333796\n+21.00265776728606\t1015.47627228348676\t0.76911517110565\t3.50695229138396\t781.30027040619507\n+28.58956904068647\t1518.16034202109449\t0.97500649360659\t9.84853293491106\t1480.49219516480912\n+35.95078439369023\t1103.97655589147462\t0.35479561165730\t4.40190215370646\t393.33223818275417\n+17.80809895058049\t513.96766354295312\t0.04486228246078\t6.05431429635789\t29.13372581378542\n+37.62524542973630\t1094.08731435497407\t0.62994187558750\t2.42600314446284\t690.23728828859043\n+93.38412994664191\t1671.69654829222122\t0.60229665773087\t4.87766280326631\t1011.17845018088019\n+36.31880041093498\t459.48790885887036\t0.27676506139634\t3.46535881203550\t132.25413083472941\n+17.36080017402051\t1704.74454199041770\t0.95701260035280\t6.97973684328921\t1631.55429074897393\n+73.13007530599226\t681.72659818103102\t0.09205560337724\t5.63498018937148\t96.36589298307057\n+50.86988932381939\t270.17473755699689\t0.52803522331805\t10.92158036510528\t151.45967058029146\n+39.50359317582296\t673.90351039803500\t0.80545053732928\t8.54348994582354\t544.23136876662579\n+31.30664415885097\t1161.44389849996833\t0.54040457530072\t3.96793750880015\t628.42966975327408\n+11.07879011824457\t636.40170691532239\t0.45697913004927\t7.58940070226197\t291.03303662060034\n+25.42575178177181\t1172.98478850506444\t0.20012360721840\t7.57624805528984\t236.11479781477169\n+77.82892154498485\t1399.23761909305881\t0.61032815320939\t4.09000348524402\t857.53308148795691\n+69.77349075129560\t1529.96037797557256\t0.62532375775681\t10.82407829609550\t959.26142361657560\n+97.65001270158552\t397.97993628844188\t0.02317813647840\t2.60744548507082\t98.08464391759719\n+92.34968252590873\t1683.40961181445255\t0.21097841871845\t4.60525250814608\t366.97302133435386\n+54.93752616276721\t569.73424151618599\t0.46060162107485\t7.96161564823385\t268.10919955478056\n+50.03558966748651\t1295.45745511454766\t0.52595593622978\t1.01399023119044\t683.18750535064999\n+39.47002866898355\t929.68153737371244\t0.40288033137914\t4.54298300106321\t376.62409950100181\n+50.06143194429532\t852.91679202019429\t0.09043278819644\t3.73562920027441\t91.95318916050276\n+94.34770977427269\t169.02778025117351\t0.03999868964065\t3.83140359719820\t94.58952950769820\n+58.23441702167689\t1744.41411222868214\t0.99264223740297\t10.93117372481045\t1732.55803362523739\n+11.00483309665630\t1211.17932126885103\t0.52398683448831\t2.73149909808731\t634.73712227782266\n+94.29602449150258\t520.77315817937847\t0.99893226884321\t6.82693815149899\t528.69394897178495\n+18.32790006305758\t757.62528864091405\t0.18967352891215\t5.10770673025310\t144.86527485195924\n+59.46800689017054\t1296.29894117862568\t0.48689148236912\t4.09589817766705\t633.95209204331320\n+57.74413728278473\t847.25004745856268\t0.35967810260054\t4.21331932008814\t310.15968510981907\n+20.82072401960227\t862.85251081887418\t0.49184291026405\t9.99076314793711\t424.89820582123940\n+72.93604610294412\t1383.70406021234839\t0.37543924756199\t4.43739535235384\t524.59168356660120\n+65.50352059993224\t1287.23540880812243\t0.11353757521868\t2.33028689373575\t160.15715894498879\n+45.60390576061239\t386.61331307620571'..b'5.36613567278891\t0.68462427169271\t5.88293166805099\t1156.58767097944997\n+48.54143101843673\t1704.88050248556237\t0.21134788749712\t5.11648138177833\t363.57774253644357\n+98.96655767792834\t172.07811591269444\t0.70132651409352\t1.25171563884812\t156.06931868323713\n+32.08817260865362\t245.77958638999525\t0.06088456434664\t2.11406316704053\t35.40508437542623\n+16.92689081454309\t1151.06970045219464\t0.43839309463984\t9.30903764603975\t504.90473090436518\n+23.97921895644722\t436.13916546124790\t0.71189965858292\t9.58294925326778\t311.41167624355961\n+55.90558855960195\t1276.42473559746963\t0.60511203551818\t6.59217283268040\t774.40045791345551\n+86.03941909075867\t1628.20197943455605\t0.84960732575898\t3.54466535494455\t1386.00528001290149\n+87.75555422867708\t836.50464658900239\t0.72949434396451\t5.12640767538794\t616.50288055791896\n+19.08360458112225\t1279.03708947993277\t0.24063282092985\t9.51324426832995\t308.36928692774461\n+82.41022892585868\t983.66448115430603\t0.38634079430617\t6.90880790734925\t388.86234039746984\n+13.75236149078257\t1446.07702142766880\t0.96582581524448\t8.79795804232935\t1396.72614500682334\n+23.93350820958198\t1542.44475628035298\t0.80811501289370\t1.63681124220468\t1246.70212041837362\n+23.12283040488030\t1088.99047240300797\t0.13748694797777\t7.78440704371486\t151.49686527332432\n+99.21906895152472\t592.47620099114886\t0.76091275955885\t1.46527167666139\t461.61075902543962\n+33.25359065222067\t1668.71176293712756\t0.63651704125478\t7.01848606131893\t1062.68380316914022\n+92.81846814636464\t422.46031132251642\t0.01782318402652\t2.90072176134172\t93.12330768632674\n+52.18717978245897\t935.65247451329367\t0.80049120556915\t9.59436311444911\t750.79740145181643\n+21.29560322403450\t839.99840771015579\t0.42161750906259\t1.54717377081942\t354.79694525077520\n+0.99336936709830\t1415.84717539845406\t0.27531321879490\t8.17740004543336\t389.80262255819741\n+42.13559217565558\t359.82122610463938\t0.19252168025198\t4.13815233317635\t81.08093014555008\n+80.51701673672972\t146.28961840978741\t0.04910597269756\t6.66000384985094\t80.83675338792878\n+68.68106961543961\t1313.00248207047480\t0.47969376130168\t4.67656721778529\t633.57254918876924\n+83.99700992017513\t867.59808369591872\t0.32136583878220\t1.92719867171042\t291.19364899071132\n+6.04379321355856\t274.24409587470802\t0.68270645642831\t7.80735767230638\t187.32527058550369\n+24.31741658874254\t1171.93956616774358\t0.06913918311155\t9.72919961746265\t84.59721173364210\n+10.96069498325127\t401.83796801162464\t0.46737799144535\t8.75949219403373\t188.12950260375516\n+85.44445157050565\t469.35693097718968\t0.07664186926890\t8.88914797103218\t92.70787499286608\n+54.75000011493021\t1410.11180659606998\t0.92004704285871\t5.80972765927881\t1298.52380843775677\n+45.95536700101840\t1104.17293369002959\t0.59931878072428\t6.04373451233758\t663.34519384806902\n+30.68785297434506\t1010.03319009672180\t0.92492694340279\t10.70550802030219\t934.71071704336362\n+39.57946098969964\t1430.51649192529544\t0.63508814775093\t3.29969165249033\t909.36559893052390\n+5.12070928629372\t172.16298201137550\t0.12284775190377\t3.20212517833639\t21.75914684133634\n+82.90227537008228\t592.05120099602254\t0.78106408263109\t6.04665812596679\t469.80205128257876\n+13.84489237765847\t1396.68614300270474\t0.92133179352084\t10.43018632036191\t1286.88575739979092\n+70.44357972639433\t1259.26516594596546\t0.54655181336750\t4.69217229737883\t691.84909149860289\n+98.24675747269930\t232.84478566118992\t0.89767831074227\t3.63930989094871\t230.95708002350815\n+57.44758420233384\t963.49649420466915\t0.55447680807193\t7.47167331828399\t537.31617549230930\n+18.54741590544805\t569.97496501234991\t0.14843865774278\t1.30304170594405\t86.61413266611218\n+93.92555841521393\t692.13102468460193\t0.10956460430561\t4.78326995661470\t120.71709895964038\n+38.40794590414141\t1212.71859427964318\t0.24449092930616\t7.61480973773685\t298.97589013891826\n+9.84928837918622\t1074.57745936695619\t0.10686550466855\t6.48254507463987\t115.25672659315927\n+51.97517077112073\t609.60010892704452\t0.45572905099810\t1.38666520129567\t282.63144665101163\n+59.90030248885009\t126.45889013919340\t0.50181359368553\t6.01726121691614\t87.26338001352038\n'
b
diff -r 000000000000 -r af2624d5ab32 test-data/friedman3.txt
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/friedman3.txt Sat May 01 01:24:32 2021 +0000
b
b'@@ -0,0 +1,101 @@\n+0\t1\t2\t3\t0\n+54.34049417909655\t580.41577804498036\t0.42451759074913\t9.44776132319904\t1.35373021827042\n+0.47188561909726\t324.26244765650472\t0.67074908472678\t9.25852755105048\t1.56862672525420\n+13.67065896849530\t1065.15237514930618\t0.89132195431226\t3.09202122117190\t1.55639796005439\n+18.53282195500751\t302.71124845232475\t0.21969749262499\t10.78623784707370\t1.29902155047722\n+81.16831490893233\t406.55138981837536\t0.81622474872584\t3.74073747041699\t1.33090339404347\n+43.17041836631217\t1661.32290939370682\t0.81764937877673\t4.36111950120899\t1.53902619730346\n+17.54104537423366\t734.73264332017334\t0.00568850735257\t3.52426353444840\t0.23388919220068\n+79.56625084732873\t150.58465705466725\t0.59884337692849\t7.03804539042854\t0.84782025617307\n+10.51476854120563\t749.61728091801888\t0.03647605659257\t9.90411563442076\t1.20367824178660\n+98.09208570123114\t223.58662823399155\t0.89054594472850\t6.76901499400033\t1.11305076538663\n+74.24796890979773\t1155.14994326908754\t0.58184219239878\t1.20439132026923\t1.46077316749872\n+21.00265776728606\t1015.47627228348676\t0.76911517110565\t3.50695229138396\t1.54391141597256\n+28.58956904068647\t1518.16034202109449\t0.97500649360659\t9.84853293491106\t1.55148427211649\n+35.95078439369023\t1103.97655589147462\t0.35479561165730\t4.40190215370646\t1.47926803122527\n+17.80809895058049\t513.96766354295312\t0.04486228246078\t6.05431429635789\t0.91315256357435\n+37.62524542973630\t1094.08731435497407\t0.62994187558750\t2.42600314446284\t1.51625869990221\n+93.38412994664191\t1671.69654829222122\t0.60229665773087\t4.87766280326631\t1.47831276444836\n+36.31880041093498\t459.48790885887036\t0.27676506139634\t3.46535881203550\t1.29260835966305\n+17.36080017402051\t1704.74454199041770\t0.95701260035280\t6.97973684328921\t1.56015547475739\n+73.13007530599226\t681.72659818103102\t0.09205560337724\t5.63498018937148\t0.70920593282267\n+50.86988932381939\t270.17473755699689\t0.52803522331805\t10.92158036510528\t1.22827369187885\n+39.50359317582296\t673.90351039803500\t0.80545053732928\t8.54348994582354\t1.49814640817714\n+31.30664415885097\t1161.44389849996833\t0.54040457530072\t3.96793750880015\t1.52095843815128\n+11.07879011824457\t636.40170691532239\t0.45697913004927\t7.58940070226197\t1.53272000488866\n+25.42575178177181\t1172.98478850506444\t0.20012360721840\t7.57624805528984\t1.46290326408829\n+77.82892154498485\t1399.23761909305881\t0.61032815320939\t4.09000348524402\t1.47991217301289\n+69.77349075129560\t1529.96037797557256\t0.62532375775681\t10.82407829609550\t1.49799535701286\n+97.65001270158552\t397.97993628844188\t0.02317813647840\t2.60744548507082\t0.09417496029834\n+92.34968252590873\t1683.40961181445255\t0.21097841871845\t4.60525250814608\t1.31640898791919\n+54.93752616276721\t569.73424151618599\t0.46060162107485\t7.96161564823385\t1.36442735303208\n+50.03558966748651\t1295.45745511454766\t0.52595593622978\t1.01399023119044\t1.49749225094498\n+39.47002866898355\t929.68153737371244\t0.40288033137914\t4.54298300106321\t1.46580400777824\n+50.06143194429532\t852.91679202019429\t0.09043278819644\t3.73562920027441\t0.99509523340377\n+94.34770977427269\t169.02778025117351\t0.03999868964065\t3.83140359719820\t0.07152072382162\n+58.23441702167689\t1744.41411222868214\t0.99264223740297\t10.93117372481045\t1.53717818150701\n+11.00483309665630\t1211.17932126885103\t0.52398683448831\t2.73149909808731\t1.55345783481985\n+94.29602449150258\t520.77315817937847\t0.99893226884321\t6.82693815149899\t1.39148036801250\n+18.32790006305758\t757.62528864091405\t0.18967352891215\t5.10770673025310\t1.44393949404119\n+59.46800689017054\t1296.29894117862568\t0.48689148236912\t4.09589817766705\t1.47685300084874\n+57.74413728278473\t847.25004745856268\t0.35967810260054\t4.21331932008814\t1.38352817292728\n+20.82072401960227\t862.85251081887418\t0.49184291026405\t9.99076314793711\t1.52177501985608\n+72.93604610294412\t1383.70406021234839\t0.37543924756199\t4.43739535235384\t1.43131051873998\n+65.50352059993224\t1287.23540880812243\t0.11353757521868\t2.33028689373575\t1.14944356563723\n+45.60390576061239\t386.61331307620571\t0.96164190377465\t9.37615744861810\t1.44874342727972\n+52.01606870379233\t482.23941725'..b'859781106711\t0.08226452393202\t7.35636709825399\t1.26428010560184\n+79.64052251862078\t1685.36613567278891\t0.68462427169271\t5.88293166805099\t1.50188361535352\n+48.54143101843673\t1704.88050248556237\t0.21134788749712\t5.11648138177833\t1.43688601455846\n+98.96655767792834\t172.07811591269444\t0.70132651409352\t1.25171563884812\t0.88392739834518\n+32.08817260865362\t245.77958638999525\t0.06088456434664\t2.11406316704053\t0.43631433888508\n+16.92689081454309\t1151.06970045219464\t0.43839309463984\t9.30903764603975\t1.53726512352234\n+23.97921895644722\t436.13916546124790\t0.71189965858292\t9.58294925326778\t1.49371835981379\n+55.90558855960195\t1276.42473559746963\t0.60511203551818\t6.59217283268040\t1.49854138074293\n+86.03941909075867\t1628.20197943455605\t0.84960732575898\t3.54466535494455\t1.50867912100600\n+87.75555422867708\t836.50464658900239\t0.72949434396451\t5.12640767538794\t1.42796708729724\n+19.08360458112225\t1279.03708947993277\t0.24063282092985\t9.51324426832995\t1.50887120141284\n+82.41022892585868\t983.66448115430603\t0.38634079430617\t6.90880790734925\t1.35725052598321\n+13.75236149078257\t1446.07702142766880\t0.96582581524448\t8.79795804232935\t1.56095002746356\n+23.93350820958198\t1542.44475628035298\t0.80811501289370\t1.63681124220468\t1.55159769213597\n+23.12283040488030\t1088.99047240300797\t0.13748694797777\t7.78440704371486\t1.41756832683904\n+99.21906895152472\t592.47620099114886\t0.76091275955885\t1.46527167666139\t1.35416492977877\n+33.25359065222067\t1668.71176293712756\t0.63651704125478\t7.01848606131893\t1.53949913387512\n+92.81846814636464\t422.46031132251642\t0.01782318402652\t2.90072176134172\t0.08093567500779\n+52.18717978245897\t935.65247451329367\t0.80049120556915\t9.59436311444911\t1.50123122833668\n+21.29560322403450\t839.99840771015579\t0.42161750906259\t1.54717377081942\t1.51073828234348\n+0.99336936709830\t1415.84717539845406\t0.27531321879490\t8.17740004543336\t1.56824793336494\n+42.13559217565558\t359.82122610463938\t0.19252168025198\t4.13815233317635\t1.02432784389074\n+80.51701673672972\t146.28961840978741\t0.04910597269756\t6.66000384985094\t0.08897131915549\n+68.68106961543961\t1313.00248207047480\t0.47969376130168\t4.67656721778529\t1.46218003661270\n+83.99700992017513\t867.59808369591872\t0.32136583878220\t1.92719867171042\t1.27818079494083\n+6.04379321355856\t274.24409587470802\t0.68270645642831\t7.80735767230638\t1.53852709677779\n+24.31741658874254\t1171.93956616774358\t0.06913918311155\t9.72919961746265\t1.27923356083904\n+10.96069498325127\t401.83796801162464\t0.46737799144535\t8.75949219403373\t1.51250187991915\n+85.44445157050565\t469.35693097718968\t0.07664186926890\t8.88914797103218\t0.39847812418013\n+54.75000011493021\t1410.11180659606998\t0.92004704285871\t5.80972765927881\t1.52862056179624\n+45.95536700101840\t1104.17293369002959\t0.59931878072428\t6.04373451233758\t1.50146258128116\n+30.68785297434506\t1010.03319009672180\t0.92492694340279\t10.70550802030219\t1.53795903482368\n+39.57946098969964\t1430.51649192529544\t0.63508814775093\t3.29969165249033\t1.52725831744223\n+5.12070928629372\t172.16298201137550\t0.12284775190377\t3.20212517833639\t1.33323212481198\n+82.90227537008228\t592.05120099602254\t0.78106408263109\t6.04665812596679\t1.39340530523187\n+13.84489237765847\t1396.68614300270474\t0.92133179352084\t10.43018632036191\t1.56003767212055\n+70.44357972639433\t1259.26516594596546\t0.54655181336750\t4.69217229737883\t1.46880028650007\n+98.24675747269930\t232.84478566118992\t0.89767831074227\t3.63930989094871\t1.13140390826337\n+57.44758420233384\t963.49649420466915\t0.55447680807193\t7.47167331828399\t1.46367578814173\n+18.54741590544805\t569.97496501234991\t0.14843865774278\t1.30304170594405\t1.35498659455596\n+93.92555841521393\t692.13102468460193\t0.10956460430561\t4.78326995661470\t0.67921924302924\n+38.40794590414141\t1212.71859427964318\t0.24449092930616\t7.61480973773685\t1.44197530054534\n+9.84928837918622\t1074.57745936695619\t0.10686550466855\t6.48254507463987\t1.48523676081889\n+51.97517077112073\t609.60010892704452\t0.45572905099810\t1.38666520129567\t1.38584637577290\n+59.90030248885009\t126.45889013919340\t0.50181359368553\t6.01726121691614\t0.81422640615706\n'
b
diff -r 000000000000 -r af2624d5ab32 test-data/gaus.txt
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/gaus.txt Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,101 @@
+0 1 0
+1.17052698294814 2.07771223225020 1
+2.03460756150493 -0.55071441191459 1
+-0.07602346572462 0.00395759398760 0
+-0.18949583082318 0.25500144427338 0
+1.29974807475531 -1.73309562365328 1
+1.94326226343400 -1.44743611231959 1
+1.18962226802913 -1.69061682638360 1
+-0.57882582479099 -1.19945119919393 1
+0.73100034383481 1.36155612514533 1
+-0.51442989136879 -0.21606012000326 0
+0.10886346783368 0.50780959049232 0
+-0.12620118371358 1.99027364975409 1
+-0.70441819973502 -0.59137512108517 0
+-1.50758516026439 0.10788413080661 1
+-0.45802698550262 0.43516348812289 0
+1.09368664965872 -0.22951775323996 0
+-0.94004616154477 -0.82793236436587 1
+0.46629936835719 0.26998723863109 0
+-0.32623805920230 0.05567601485478 0
+0.69012147022471 0.68689006613840 0
+1.58617093842324 0.69339065851659 1
+0.67272080570966 -0.10441114339063 0
+-0.59998304484887 1.57616672431921 1
+2.07479316794657 -0.34329768218247 1
+-0.54443916167246 -0.66817173681343 0
+0.52299780452075 -0.01634540275749 0
+-2.97331547405089 0.03331727813886 1
+-0.00889866329211 -0.54319800840717 0
+-1.29639180715015 0.09513944356545 1
+-1.49772038108317 -1.19388597679194 1
+-0.25187913921321 -0.84243573825130 0
+-0.07961124591740 -0.88973148126503 0
+0.89459770576001 0.75969311985021 0
+-0.23871286931468 -1.42906689844829 1
+0.22117966922140 -1.07004333056829 0
+-0.31983104711809 -1.14774159987659 0
+-0.42371509994342 -1.18598356492917 1
+0.98132078695123 0.51421884139438 0
+0.75044476153418 -0.45594692746800 0
+1.29626258639906 0.95227562608189 1
+-1.74976547305470 0.34268040332750 1
+0.73699516901821 0.43586725251491 0
+0.61303888168755 0.73620521332382 0
+-1.41504292085253 -0.64075992301057 1
+0.22239080944545 -0.68492173524723 0
+1.61898166067526 1.54160517451341 1
+1.87657342696217 -0.37690335016897 1
+0.00731456322890 -0.61293873547816 0
+0.74705565509915 0.42967643586261 0
+0.10887198989791 0.02828363482307 0
+-0.43813562270442 -1.11831824625544 0
+0.30104946378807 -1.68489996168518 1
+-1.39699934495328 -1.09717198463982 1
+-0.24888866705811 -0.45017643501165 0
+-1.63552939938082 -1.04420987770932 1
+-0.17478155445150 1.01726434325117 0
+-0.58359505032266 0.81684707168578 0
+-1.95808123420787 -0.13480131198999 1
+0.42238022042198 -1.09404293103224 0
+-0.98331009912963 0.35750775316737 0
+-1.56668752957839 0.90497412146668 1
+0.94900477650526 -0.01939758596247 0
+-0.53128037685191 1.02973268513335 0
+0.75306218769198 -1.60943889617295 1
+0.13024845535270 0.94936086466099 0
+-0.33177713505281 -0.68921797808975 0
+1.70362398812070 -0.72215077005575 1
+-1.84118830018672 0.36609322616730 1
+-0.36546199267663 -1.27102304084666 1
+-0.88179838948302 0.01863894948806 0
+-1.70595200573817 0.36916395710701 1
+-0.86222734651048 1.24946974272698 1
+-1.18801759731772 -0.54974619353549 1
+-1.70465120576096 -1.13626100682736 1
+-0.18501411089711 -2.48715153522277 1
+-0.45592201921402 0.64917292725468 0
+0.22239960855530 -1.44321699522534 1
+0.75045333032684 -1.30699233908082 1
+0.13242780114877 0.02221392803939 0
+1.83193608182554 0.00301743403121 1
+-0.41581633584065 -1.35850293675980 1
+-1.35639904886131 -1.23243451391493 1
+-1.54061602455261 2.04671396848214 1
+-1.21725413064101 -0.15726516737514 0
+1.02692143939979 -1.43219061105893 1
+1.15303580256364 -0.25243603652139 0
+0.58057333579427 -1.10452309266229 1
+1.77599358550677 0.51307437883965 1
+-0.75635230559444 0.81645401101929 0
+1.23690788519023 -0.23028467842711 1
+0.31736797594107 -0.75241417772504 0
+0.18451869056394 0.93708220110895 0
+-0.61662937168319 0.76318364605999 0
+0.77962630366370 -0.43812091634884 0
+0.23784462192362 0.01354854862861 0
+2.29865394071368 -0.16520955264073 1
+0.19291719182331 -0.34845893065237 0
+-1.61357850282218 1.47071386661213 1
+-2.01518871712253 -0.07954058693411 1
+0.77882239932307 0.42823287059674 0
b
diff -r 000000000000 -r af2624d5ab32 test-data/gbc_model01
b
Binary file test-data/gbc_model01 has changed
b
diff -r 000000000000 -r af2624d5ab32 test-data/gbc_result01
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/gbc_result01 Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,6 @@
+0 1 2 3 predicted
+3.68258022948 2.82110345641 -3.9901407239999998 -1.9523364774 1
+0.015942057224 -0.7119585943469999 0.125502976978 -0.972218263337 0
+2.0869076882499997 0.929399321468 -2.1292408448400004 -1.9971402218799998 1
+1.4132105208399999 0.523750660422 -1.4210539291 -1.49298569451 1
+0.7683140439399999 1.38267855169 -0.989045048734 0.649504257894 1
b
diff -r 000000000000 -r af2624d5ab32 test-data/gbr_model01
b
Binary file test-data/gbr_model01 has changed
b
diff -r 000000000000 -r af2624d5ab32 test-data/gbr_prediction_result01.tabular
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/gbr_prediction_result01.tabular Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,88 @@
+year month day temp_2 temp_1 average forecast_noaa forecast_acc forecast_under friend week_Fri week_Mon week_Sat week_Sun week_Thurs week_Tues week_Wed predicted
+2016 9 29 69 68 66.1 63 71 68 57 0 0 0 0 1 0 0 69.58390938468499
+2016 4 27 59 60 60.7 59 65 60 50 0 0 0 0 0 0 1 62.52052253790107
+2016 11 28 53 48 48.0 46 48 49 44 0 1 0 0 0 0 0 51.680887055498296
+2016 10 12 60 62 61.0 60 63 63 52 0 0 0 0 0 0 1 61.246237852679315
+2016 6 19 67 65 70.4 69 73 70 58 0 0 0 1 0 0 0 65.03047512424794
+2016 5 7 68 77 63.0 61 65 63 83 0 0 1 0 0 0 0 71.59883326872612
+2016 7 25 75 80 77.1 75 82 76 81 0 1 0 0 0 0 0 78.5487110206859
+2016 8 15 90 83 76.6 76 79 75 70 0 1 0 0 0 0 0 80.77545676519121
+2016 10 28 58 60 55.6 52 56 55 52 1 0 0 0 0 0 0 62.11231551486949
+2016 6 5 80 81 68.0 64 70 66 54 0 0 0 1 0 0 0 72.42798354934989
+2016 3 19 58 63 54.2 54 59 54 62 0 0 1 0 0 0 0 61.63169537788603
+2016 6 7 92 86 68.3 67 69 70 58 0 0 0 0 0 1 0 74.4731129283374
+2016 12 10 41 36 45.9 44 48 44 65 0 0 1 0 0 0 0 39.39391240070939
+2016 4 23 73 64 59.9 56 63 59 57 0 0 1 0 0 0 0 62.93072314757922
+2016 6 24 75 68 71.5 67 73 73 65 1 0 0 0 0 0 0 73.42248151259705
+2016 2 9 51 57 49.4 45 52 49 57 0 0 0 0 0 1 0 55.106926049453094
+2016 11 10 71 65 52.2 52 54 51 38 0 0 0 0 1 0 0 62.931939262865185
+2016 3 21 61 55 54.5 52 56 55 52 0 1 0 0 0 0 0 56.54303204039889
+2016 2 28 60 57 51.3 48 56 53 66 0 0 0 1 0 0 0 57.5819236251605
+2016 6 28 78 85 72.4 72 76 74 67 0 0 0 0 0 1 0 77.87772901898535
+2016 10 6 63 66 63.3 62 67 63 55 0 0 0 0 1 0 0 64.65839290042257
+2016 2 17 55 56 50.0 45 51 49 46 0 0 0 0 0 0 1 54.26509333618539
+2016 6 15 66 60 69.7 65 73 71 69 0 0 0 0 0 0 1 66.15190585447276
+2016 10 15 60 60 59.9 59 62 59 46 0 0 1 0 0 0 0 62.135403207035466
+2016 3 26 54 57 55.2 53 57 55 54 0 0 1 0 0 0 0 59.148716891180484
+2016 1 26 51 54 48.3 44 53 50 61 0 0 0 0 0 1 0 53.05069255536133
+2016 5 23 59 66 66.1 63 68 68 66 0 1 0 0 0 0 0 64.85734973368784
+2016 1 10 48 50 46.5 45 48 48 49 0 0 0 1 0 0 0 45.06961558051259
+2016 5 22 66 59 65.9 62 66 65 80 0 0 0 1 0 0 0 60.46222634728846
+2016 7 15 75 77 76.0 74 80 78 75 1 0 0 0 0 0 0 82.42822449858019
+2016 4 22 81 73 59.7 59 64 60 59 1 0 0 0 0 0 0 72.82325656081416
+2016 4 29 61 64 61.2 61 65 61 49 1 0 0 0 0 0 0 65.00954748796826
+2016 1 23 52 57 48.0 45 49 50 37 0 0 1 0 0 0 0 50.836039030817304
+2016 8 16 83 84 76.5 72 78 78 90 0 0 0 0 0 1 0 82.12928759095375
+2016 8 1 76 73 77.4 76 78 79 65 0 1 0 0 0 0 0 72.22807576891064
+2016 2 27 61 60 51.2 51 53 53 61 0 0 1 0 0 0 0 61.680080402280524
+2016 2 12 56 55 49.6 49 52 48 33 1 0 0 0 0 0 0 54.563346197441135
+2016 1 31 52 48 48.7 47 52 49 61 0 0 0 1 0 0 0 51.05906646453181
+2016 9 5 67 68 73.5 71 75 73 54 0 1 0 0 0 0 0 68.96231670707674
+2016 12 20 39 46 45.1 45 49 45 62 0 0 0 0 0 1 0 41.12571355242521
+2016 5 1 61 68 61.6 60 65 60 75 0 0 0 1 0 0 0 66.15287588548186
+2016 3 28 59 51 55.5 55 57 55 47 0 1 0 0 0 0 0 59.11011722462772
+2016 4 21 81 81 59.4 55 61 59 55 0 0 0 0 1 0 0 74.41085058157081
+2016 1 6 40 44 46.1 43 49 48 40 0 0 0 0 0 0 1 41.20470505512009
+2016 10 21 58 62 57.8 56 60 59 44 1 0 0 0 0 0 0 61.62578223843827
+2016 5 2 68 77 61.9 60 66 61 59 0 1 0 0 0 0 0 72.48517225879384
+2016 3 1 53 54 51.5 48 56 50 53 0 0 0 0 0 1 0 53.70588500948454
+2016 7 21 78 82 76.8 73 81 78 84 0 0 0 0 1 0 0 82.7108327367616
+2016 3 17 51 53 53.9 49 58 52 62 0 0 0 0 1 0 0 53.251174797156146
+2016 12 6 46 40 46.4 44 50 45 56 0 0 0 0 0 1 0 42.363067913515295
+2016 12 21 46 51 45.1 44 50 46 39 0 0 0 0 0 0 1 45.6445314453422
+2016 1 4 44 41 45.9 44 48 46 53 0 1 0 0 0 0 0 42.214387828919136
+2016 10 2 67 63 64.9 62 69 66 82 0 0 0 1 0 0 0 62.736396078841445
+2016 5 28 65 64 66.8 64 69 65 64 0 0 1 0 0 0 0 63.947755881441275
+2016 9 11 74 77 72.1 69 75 71 70 0 0 0 1 0 0 0 73.98460722074996
+2016 10 25 62 61 56.5 53 60 55 70 0 0 0 0 0 1 0 61.917230159710556
+2016 2 18 56 57 50.1 47 55 49 34 0 0 0 0 1 0 0 55.720840480421955
+2016 11 1 117 59 54.5 51 59 55 61 0 0 0 0 0 1 0 61.52527009553642
+2016 3 16 49 51 53.7 52 54 55 65 0 0 0 0 0 0 1 54.86875365404632
+2016 4 26 55 59 60.5 56 61 62 75 0 0 0 0 0 1 0 61.34654097192005
+2016 6 10 67 65 68.8 67 71 67 73 1 0 0 0 0 0 0 65.38427016260138
+2016 2 3 46 51 48.9 48 49 50 40 0 0 0 0 0 0 1 49.75042424691725
+2016 3 7 64 60 52.4 49 57 53 71 0 1 0 0 0 0 0 61.08886411894317
+2016 9 18 75 68 70.0 66 73 71 90 0 0 0 1 0 0 0 70.7844532497458
+2016 3 20 63 61 54.3 51 56 55 50 0 0 0 1 0 0 0 59.66542877819202
+2016 4 6 60 57 56.8 53 59 57 64 0 0 0 0 0 0 1 59.301283011436794
+2016 7 2 73 76 73.3 70 77 73 84 0 0 1 0 0 0 0 71.22373270826222
+2016 7 5 71 68 74.0 72 77 74 62 0 0 0 0 0 1 0 69.18347305115272
+2016 7 19 80 73 76.6 76 78 77 90 0 0 0 0 0 1 0 77.46150755171419
+2016 12 9 40 41 46.0 43 51 44 54 1 0 0 0 0 0 0 41.72540550328788
+2016 6 29 85 79 72.6 68 76 74 81 0 0 0 0 0 0 1 76.10594345672801
+2016 3 22 55 56 54.6 51 55 54 64 0 0 0 0 0 1 0 58.39058086785531
+2016 4 3 71 63 56.3 54 61 56 64 0 0 0 1 0 0 0 60.14340322699943
+2016 1 17 48 54 47.4 45 51 46 47 0 0 0 1 0 0 0 50.26292708961779
+2016 3 10 54 55 52.8 49 55 53 50 0 0 0 0 1 0 0 55.522605642512985
+2016 5 9 82 63 63.4 59 66 62 64 0 1 0 0 0 0 0 61.00788720614107
+2016 1 8 51 45 46.3 43 47 46 34 1 0 0 0 0 0 0 44.83434926564482
+2016 8 11 72 76 76.9 74 81 75 80 0 0 0 0 1 0 0 74.70250254902773
+2016 12 29 47 48 45.3 43 50 45 65 0 0 0 0 1 0 0 49.53438043623214
+2016 11 23 54 54 49.1 48 52 49 38 0 0 0 0 0 0 1 51.467278500089826
+2016 11 19 52 55 50.0 50 54 49 56 0 0 1 0 0 0 0 53.781953941654095
+2016 4 7 57 68 56.9 52 61 55 38 0 0 0 0 1 0 0 68.59176558339176
+2016 6 4 71 80 67.9 63 72 66 76 0 0 1 0 0 0 0 72.73805569547436
+2016 6 17 67 71 70.0 66 74 69 54 1 0 0 0 0 0 0 74.00873400230815
+2016 10 5 61 63 63.7 61 66 65 48 0 0 0 0 0 0 1 63.553834877849695
+2016 3 4 55 59 51.9 47 56 53 45 1 0 0 0 0 0 0 57.389419897063036
+2016 12 22 51 49 45.1 42 47 46 38 0 0 0 0 1 0 0 44.218563783534144
b
diff -r 000000000000 -r af2624d5ab32 test-data/get_params.tabular
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/get_params.tabular Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,6 @@
+ Parameter Value
+@ copy_X copy_X: True
+@ fit_intercept fit_intercept: True
+* n_jobs n_jobs: 1
+@ normalize normalize: False
+ Note: @, params eligible for search in searchcv tool.
b
diff -r 000000000000 -r af2624d5ab32 test-data/get_params01.tabular
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/get_params01.tabular Sat May 01 01:24:32 2021 +0000
[
@@ -0,0 +1,30 @@
+ Parameter Value
+* memory memory: None
+* steps "steps: [('robustscaler', RobustScaler(copy=True, quantile_range=(25.0, 75.0), with_centering=True,
+       with_scaling=True)), ('selectkbest', SelectKBest(k=10, score_func=<function f_classif at 0x111ef0158>)), ('svr', SVR(C=1.0, cache_size=200, coef0=0.0, degree=3, epsilon=0.1,
+  gamma='auto_deprecated', kernel='linear', max_iter=-1, shrinking=True,
+  tol=0.001, verbose=False))]"
+@ robustscaler "robustscaler: RobustScaler(copy=True, quantile_range=(25.0, 75.0), with_centering=True,
+       with_scaling=True)"
+@ selectkbest selectkbest: SelectKBest(k=10, score_func=<function f_classif at 0x111ef0158>)
+@ svr "svr: SVR(C=1.0, cache_size=200, coef0=0.0, degree=3, epsilon=0.1,
+  gamma='auto_deprecated', kernel='linear', max_iter=-1, shrinking=True,
+  tol=0.001, verbose=False)"
+@ robustscaler__copy robustscaler__copy: True
+@ robustscaler__quantile_range robustscaler__quantile_range: (25.0, 75.0)
+@ robustscaler__with_centering robustscaler__with_centering: True
+@ robustscaler__with_scaling robustscaler__with_scaling: True
+@ selectkbest__k selectkbest__k: 10
+@ selectkbest__score_func selectkbest__score_func: <function f_classif at 0x111ef0158>
+@ svr__C svr__C: 1.0
+@ svr__cache_size svr__cache_size: 200
+@ svr__coef0 svr__coef0: 0.0
+@ svr__degree svr__degree: 3
+@ svr__epsilon svr__epsilon: 0.1
+@ svr__gamma svr__gamma: 'auto_deprecated'
+@ svr__kernel svr__kernel: 'linear'
+@ svr__max_iter svr__max_iter: -1
+@ svr__shrinking svr__shrinking: True
+@ svr__tol svr__tol: 0.001
+* svr__verbose svr__verbose: False
+ Note: @, searchable params in searchcv too.
b
diff -r 000000000000 -r af2624d5ab32 test-data/get_params02.tabular
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/get_params02.tabular Sat May 01 01:24:32 2021 +0000
[
@@ -0,0 +1,33 @@
+ Parameter Value
+* memory memory: None
+* steps "steps: [('robustscaler', RobustScaler(copy=True, quantile_range=(25.0, 75.0), with_centering=True,
+       with_scaling=True)), ('lassocv', LassoCV(alphas=None, copy_X=True, cv='warn', eps=0.001, fit_intercept=True,
+    max_iter=1000, n_alphas=100, n_jobs=1, normalize=False, positive=False,
+    precompute='auto', random_state=None, selection='cyclic', tol=0.0001,
+    verbose=False))]"
+@ robustscaler "robustscaler: RobustScaler(copy=True, quantile_range=(25.0, 75.0), with_centering=True,
+       with_scaling=True)"
+@ lassocv "lassocv: LassoCV(alphas=None, copy_X=True, cv='warn', eps=0.001, fit_intercept=True,
+    max_iter=1000, n_alphas=100, n_jobs=1, normalize=False, positive=False,
+    precompute='auto', random_state=None, selection='cyclic', tol=0.0001,
+    verbose=False)"
+@ robustscaler__copy robustscaler__copy: True
+@ robustscaler__quantile_range robustscaler__quantile_range: (25.0, 75.0)
+@ robustscaler__with_centering robustscaler__with_centering: True
+@ robustscaler__with_scaling robustscaler__with_scaling: True
+@ lassocv__alphas lassocv__alphas: None
+@ lassocv__copy_X lassocv__copy_X: True
+@ lassocv__cv lassocv__cv: 'warn'
+@ lassocv__eps lassocv__eps: 0.001
+@ lassocv__fit_intercept lassocv__fit_intercept: True
+@ lassocv__max_iter lassocv__max_iter: 1000
+@ lassocv__n_alphas lassocv__n_alphas: 100
+* lassocv__n_jobs lassocv__n_jobs: 1
+@ lassocv__normalize lassocv__normalize: False
+@ lassocv__positive lassocv__positive: False
+@ lassocv__precompute lassocv__precompute: 'auto'
+@ lassocv__random_state lassocv__random_state: None
+@ lassocv__selection lassocv__selection: 'cyclic'
+@ lassocv__tol lassocv__tol: 0.0001
+* lassocv__verbose lassocv__verbose: False
+ Note: @, searchable params in searchcv too.
b
diff -r 000000000000 -r af2624d5ab32 test-data/get_params03.tabular
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/get_params03.tabular Sat May 01 01:24:32 2021 +0000
[
@@ -0,0 +1,43 @@
+ Parameter Value
+* memory memory: None
+* steps "steps: [('robustscaler', RobustScaler(copy=True, quantile_range=(25.0, 75.0), with_centering=True,
+       with_scaling=True)), ('xgbclassifier', XGBClassifier(base_score=0.5, booster='gbtree', colsample_bylevel=1,
+       colsample_bytree=1, gamma=0, learning_rate=0.1, max_delta_step=0,
+       max_depth=3, min_child_weight=1, missing=nan, n_estimators=100,
+       n_jobs=1, nthread=None, objective='binary:logistic', random_state=0,
+       reg_alpha=0, reg_lambda=1, scale_pos_weight=1, seed=None,
+       silent=True, subsample=1))]"
+@ robustscaler "robustscaler: RobustScaler(copy=True, quantile_range=(25.0, 75.0), with_centering=True,
+       with_scaling=True)"
+@ xgbclassifier "xgbclassifier: XGBClassifier(base_score=0.5, booster='gbtree', colsample_bylevel=1,
+       colsample_bytree=1, gamma=0, learning_rate=0.1, max_delta_step=0,
+       max_depth=3, min_child_weight=1, missing=nan, n_estimators=100,
+       n_jobs=1, nthread=None, objective='binary:logistic', random_state=0,
+       reg_alpha=0, reg_lambda=1, scale_pos_weight=1, seed=None,
+       silent=True, subsample=1)"
+@ robustscaler__copy robustscaler__copy: True
+@ robustscaler__quantile_range robustscaler__quantile_range: (25.0, 75.0)
+@ robustscaler__with_centering robustscaler__with_centering: True
+@ robustscaler__with_scaling robustscaler__with_scaling: True
+@ xgbclassifier__base_score xgbclassifier__base_score: 0.5
+@ xgbclassifier__booster xgbclassifier__booster: 'gbtree'
+@ xgbclassifier__colsample_bylevel xgbclassifier__colsample_bylevel: 1
+@ xgbclassifier__colsample_bytree xgbclassifier__colsample_bytree: 1
+@ xgbclassifier__gamma xgbclassifier__gamma: 0
+@ xgbclassifier__learning_rate xgbclassifier__learning_rate: 0.1
+@ xgbclassifier__max_delta_step xgbclassifier__max_delta_step: 0
+@ xgbclassifier__max_depth xgbclassifier__max_depth: 3
+@ xgbclassifier__min_child_weight xgbclassifier__min_child_weight: 1
+@ xgbclassifier__missing xgbclassifier__missing: nan
+@ xgbclassifier__n_estimators xgbclassifier__n_estimators: 100
+* xgbclassifier__n_jobs xgbclassifier__n_jobs: 1
+* xgbclassifier__nthread xgbclassifier__nthread: None
+@ xgbclassifier__objective xgbclassifier__objective: 'binary:logistic'
+@ xgbclassifier__random_state xgbclassifier__random_state: 0
+@ xgbclassifier__reg_alpha xgbclassifier__reg_alpha: 0
+@ xgbclassifier__reg_lambda xgbclassifier__reg_lambda: 1
+@ xgbclassifier__scale_pos_weight xgbclassifier__scale_pos_weight: 1
+@ xgbclassifier__seed xgbclassifier__seed: None
+@ xgbclassifier__silent xgbclassifier__silent: True
+@ xgbclassifier__subsample xgbclassifier__subsample: 1
+ Note: @, searchable params in searchcv too.
b
diff -r 000000000000 -r af2624d5ab32 test-data/get_params04.tabular
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/get_params04.tabular Sat May 01 01:24:32 2021 +0000
[
@@ -0,0 +1,39 @@
+ Parameter Value
+* memory memory: None
+* steps "steps: [('selectfrommodel', SelectFromModel(estimator=AdaBoostClassifier(algorithm='SAMME.R', base_estimator=None,
+          learning_rate=1.0, n_estimators=50, random_state=None),
+        max_features=None, norm_order=1, prefit=False, threshold=None)), ('linearsvc', LinearSVC(C=1.0, class_weight=None, dual=True, fit_intercept=True,
+     intercept_scaling=1, loss='squared_hinge', max_iter=1000,
+     multi_class='ovr', penalty='l2', random_state=None, tol=0.0001,
+     verbose=0))]"
+@ selectfrommodel "selectfrommodel: SelectFromModel(estimator=AdaBoostClassifier(algorithm='SAMME.R', base_estimator=None,
+          learning_rate=1.0, n_estimators=50, random_state=None),
+        max_features=None, norm_order=1, prefit=False, threshold=None)"
+@ linearsvc "linearsvc: LinearSVC(C=1.0, class_weight=None, dual=True, fit_intercept=True,
+     intercept_scaling=1, loss='squared_hinge', max_iter=1000,
+     multi_class='ovr', penalty='l2', random_state=None, tol=0.0001,
+     verbose=0)"
+@ selectfrommodel__estimator__algorithm selectfrommodel__estimator__algorithm: 'SAMME.R'
+@ selectfrommodel__estimator__base_estimator selectfrommodel__estimator__base_estimator: None
+@ selectfrommodel__estimator__learning_rate selectfrommodel__estimator__learning_rate: 1.0
+@ selectfrommodel__estimator__n_estimators selectfrommodel__estimator__n_estimators: 50
+@ selectfrommodel__estimator__random_state selectfrommodel__estimator__random_state: None
+@ selectfrommodel__estimator "selectfrommodel__estimator: AdaBoostClassifier(algorithm='SAMME.R', base_estimator=None,
+          learning_rate=1.0, n_estimators=50, random_state=None)"
+@ selectfrommodel__max_features selectfrommodel__max_features: None
+@ selectfrommodel__norm_order selectfrommodel__norm_order: 1
+@ selectfrommodel__prefit selectfrommodel__prefit: False
+@ selectfrommodel__threshold selectfrommodel__threshold: None
+@ linearsvc__C linearsvc__C: 1.0
+@ linearsvc__class_weight linearsvc__class_weight: None
+@ linearsvc__dual linearsvc__dual: True
+@ linearsvc__fit_intercept linearsvc__fit_intercept: True
+@ linearsvc__intercept_scaling linearsvc__intercept_scaling: 1
+@ linearsvc__loss linearsvc__loss: 'squared_hinge'
+@ linearsvc__max_iter linearsvc__max_iter: 1000
+@ linearsvc__multi_class linearsvc__multi_class: 'ovr'
+@ linearsvc__penalty linearsvc__penalty: 'l2'
+@ linearsvc__random_state linearsvc__random_state: None
+@ linearsvc__tol linearsvc__tol: 0.0001
+* linearsvc__verbose linearsvc__verbose: 0
+ Note: @, searchable params in searchcv too.
b
diff -r 000000000000 -r af2624d5ab32 test-data/get_params05.tabular
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/get_params05.tabular Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,18 @@
+ Parameter Value
+@ bootstrap bootstrap: True
+@ criterion criterion: 'mse'
+@ max_depth max_depth: None
+@ max_features max_features: 'auto'
+@ max_leaf_nodes max_leaf_nodes: None
+@ min_impurity_decrease min_impurity_decrease: 0.0
+@ min_impurity_split min_impurity_split: None
+@ min_samples_leaf min_samples_leaf: 1
+@ min_samples_split min_samples_split: 2
+@ min_weight_fraction_leaf min_weight_fraction_leaf: 0.0
+@ n_estimators n_estimators: 100
+* n_jobs n_jobs: 1
+@ oob_score oob_score: False
+@ random_state random_state: 42
+* verbose verbose: 0
+@ warm_start warm_start: False
+ Note: @, params eligible for search in searchcv tool.
b
diff -r 000000000000 -r af2624d5ab32 test-data/get_params06.tabular
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/get_params06.tabular Sat May 01 01:24:32 2021 +0000
[
@@ -0,0 +1,22 @@
+ Parameter Value
+* memory memory: None
+* steps "steps: [('pca', PCA(copy=True, iterated_power='auto', n_components=None, random_state=None,
+  svd_solver='auto', tol=0.0, whiten=False)), ('adaboostregressor', AdaBoostRegressor(base_estimator=None, learning_rate=1.0, loss='linear',
+         n_estimators=50, random_state=None))]"
+@ pca "pca: PCA(copy=True, iterated_power='auto', n_components=None, random_state=None,
+  svd_solver='auto', tol=0.0, whiten=False)"
+@ adaboostregressor "adaboostregressor: AdaBoostRegressor(base_estimator=None, learning_rate=1.0, loss='linear',
+         n_estimators=50, random_state=None)"
+@ pca__copy pca__copy: True
+@ pca__iterated_power pca__iterated_power: 'auto'
+@ pca__n_components pca__n_components: None
+@ pca__random_state pca__random_state: None
+@ pca__svd_solver pca__svd_solver: 'auto'
+@ pca__tol pca__tol: 0.0
+@ pca__whiten pca__whiten: False
+@ adaboostregressor__base_estimator adaboostregressor__base_estimator: None
+@ adaboostregressor__learning_rate adaboostregressor__learning_rate: 1.0
+@ adaboostregressor__loss adaboostregressor__loss: 'linear'
+@ adaboostregressor__n_estimators adaboostregressor__n_estimators: 50
+@ adaboostregressor__random_state adaboostregressor__random_state: None
+ Note: @, searchable params in searchcv too.
b
diff -r 000000000000 -r af2624d5ab32 test-data/get_params07.tabular
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/get_params07.tabular Sat May 01 01:24:32 2021 +0000
[
@@ -0,0 +1,16 @@
+ Parameter Value
+* memory memory: None
+* steps "steps: [('rbfsampler', RBFSampler(gamma=2.0, n_components=10, random_state=None)), ('adaboostclassifier', AdaBoostClassifier(algorithm='SAMME.R', base_estimator=None,
+          learning_rate=1.0, n_estimators=50, random_state=None))]"
+@ rbfsampler rbfsampler: RBFSampler(gamma=2.0, n_components=10, random_state=None)
+@ adaboostclassifier "adaboostclassifier: AdaBoostClassifier(algorithm='SAMME.R', base_estimator=None,
+          learning_rate=1.0, n_estimators=50, random_state=None)"
+@ rbfsampler__gamma rbfsampler__gamma: 2.0
+@ rbfsampler__n_components rbfsampler__n_components: 10
+@ rbfsampler__random_state rbfsampler__random_state: None
+@ adaboostclassifier__algorithm adaboostclassifier__algorithm: 'SAMME.R'
+@ adaboostclassifier__base_estimator adaboostclassifier__base_estimator: None
+@ adaboostclassifier__learning_rate adaboostclassifier__learning_rate: 1.0
+@ adaboostclassifier__n_estimators adaboostclassifier__n_estimators: 50
+@ adaboostclassifier__random_state adaboostclassifier__random_state: None
+ Note: @, searchable params in searchcv too.
b
diff -r 000000000000 -r af2624d5ab32 test-data/get_params08.tabular
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/get_params08.tabular Sat May 01 01:24:32 2021 +0000
[
@@ -0,0 +1,24 @@
+ Parameter Value
+* memory memory: None
+* steps "steps: [('featureagglomeration', FeatureAgglomeration(affinity='euclidean', compute_full_tree='auto',
+           connectivity=None, linkage='ward', memory=None, n_clusters=3,
+           pooling_func=<function mean at 0x1123f1620>)), ('adaboostclassifier', AdaBoostClassifier(algorithm='SAMME.R', base_estimator=None,
+          learning_rate=1.0, n_estimators=50, random_state=None))]"
+@ featureagglomeration "featureagglomeration: FeatureAgglomeration(affinity='euclidean', compute_full_tree='auto',
+           connectivity=None, linkage='ward', memory=None, n_clusters=3,
+           pooling_func=<function mean at 0x1123f1620>)"
+@ adaboostclassifier "adaboostclassifier: AdaBoostClassifier(algorithm='SAMME.R', base_estimator=None,
+          learning_rate=1.0, n_estimators=50, random_state=None)"
+@ featureagglomeration__affinity featureagglomeration__affinity: 'euclidean'
+@ featureagglomeration__compute_full_tree featureagglomeration__compute_full_tree: 'auto'
+@ featureagglomeration__connectivity featureagglomeration__connectivity: None
+@ featureagglomeration__linkage featureagglomeration__linkage: 'ward'
+* featureagglomeration__memory featureagglomeration__memory: None
+@ featureagglomeration__n_clusters featureagglomeration__n_clusters: 3
+@ featureagglomeration__pooling_func featureagglomeration__pooling_func: <function mean at 0x1123f1620>
+@ adaboostclassifier__algorithm adaboostclassifier__algorithm: 'SAMME.R'
+@ adaboostclassifier__base_estimator adaboostclassifier__base_estimator: None
+@ adaboostclassifier__learning_rate adaboostclassifier__learning_rate: 1.0
+@ adaboostclassifier__n_estimators adaboostclassifier__n_estimators: 50
+@ adaboostclassifier__random_state adaboostclassifier__random_state: None
+ Note: @, searchable params in searchcv too.
b
diff -r 000000000000 -r af2624d5ab32 test-data/get_params09.tabular
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/get_params09.tabular Sat May 01 01:24:32 2021 +0000
[
@@ -0,0 +1,39 @@
+ Parameter Value
+* memory memory: None
+* steps "steps: [('relieff', ReliefF(discrete_threshold=10, n_features_to_select=3, n_jobs=1,
+    n_neighbors=100, verbose=False)), ('randomforestregressor', RandomForestRegressor(bootstrap=True, criterion='mse', max_depth=None,
+           max_features='auto', max_leaf_nodes=None,
+           min_impurity_decrease=0.0, min_impurity_split=None,
+           min_samples_leaf=1, min_samples_split=2,
+           min_weight_fraction_leaf=0.0, n_estimators='warn', n_jobs=1,
+           oob_score=False, random_state=None, verbose=0, warm_start=False))]"
+@ relieff "relieff: ReliefF(discrete_threshold=10, n_features_to_select=3, n_jobs=1,
+    n_neighbors=100, verbose=False)"
+@ randomforestregressor "randomforestregressor: RandomForestRegressor(bootstrap=True, criterion='mse', max_depth=None,
+           max_features='auto', max_leaf_nodes=None,
+           min_impurity_decrease=0.0, min_impurity_split=None,
+           min_samples_leaf=1, min_samples_split=2,
+           min_weight_fraction_leaf=0.0, n_estimators='warn', n_jobs=1,
+           oob_score=False, random_state=None, verbose=0, warm_start=False)"
+@ relieff__discrete_threshold relieff__discrete_threshold: 10
+@ relieff__n_features_to_select relieff__n_features_to_select: 3
+* relieff__n_jobs relieff__n_jobs: 1
+@ relieff__n_neighbors relieff__n_neighbors: 100
+* relieff__verbose relieff__verbose: False
+@ randomforestregressor__bootstrap randomforestregressor__bootstrap: True
+@ randomforestregressor__criterion randomforestregressor__criterion: 'mse'
+@ randomforestregressor__max_depth randomforestregressor__max_depth: None
+@ randomforestregressor__max_features randomforestregressor__max_features: 'auto'
+@ randomforestregressor__max_leaf_nodes randomforestregressor__max_leaf_nodes: None
+@ randomforestregressor__min_impurity_decrease randomforestregressor__min_impurity_decrease: 0.0
+@ randomforestregressor__min_impurity_split randomforestregressor__min_impurity_split: None
+@ randomforestregressor__min_samples_leaf randomforestregressor__min_samples_leaf: 1
+@ randomforestregressor__min_samples_split randomforestregressor__min_samples_split: 2
+@ randomforestregressor__min_weight_fraction_leaf randomforestregressor__min_weight_fraction_leaf: 0.0
+@ randomforestregressor__n_estimators randomforestregressor__n_estimators: 'warn'
+* randomforestregressor__n_jobs randomforestregressor__n_jobs: 1
+@ randomforestregressor__oob_score randomforestregressor__oob_score: False
+@ randomforestregressor__random_state randomforestregressor__random_state: None
+* randomforestregressor__verbose randomforestregressor__verbose: 0
+@ randomforestregressor__warm_start randomforestregressor__warm_start: False
+ Note: @, searchable params in searchcv too.
b
diff -r 000000000000 -r af2624d5ab32 test-data/get_params10.tabular
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/get_params10.tabular Sat May 01 01:24:32 2021 +0000
[
@@ -0,0 +1,12 @@
+ Parameter Value
+* memory memory: None
+* steps "steps: [('adaboostregressor', AdaBoostRegressor(base_estimator=None, learning_rate=1.0, loss='linear',
+         n_estimators=50, random_state=None))]"
+@ adaboostregressor "adaboostregressor: AdaBoostRegressor(base_estimator=None, learning_rate=1.0, loss='linear',
+         n_estimators=50, random_state=None)"
+@ adaboostregressor__base_estimator adaboostregressor__base_estimator: None
+@ adaboostregressor__learning_rate adaboostregressor__learning_rate: 1.0
+@ adaboostregressor__loss adaboostregressor__loss: 'linear'
+@ adaboostregressor__n_estimators adaboostregressor__n_estimators: 50
+@ adaboostregressor__random_state adaboostregressor__random_state: None
+ Note: @, params eligible for search in searchcv tool.
b
diff -r 000000000000 -r af2624d5ab32 test-data/get_params11.tabular
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/get_params11.tabular Sat May 01 01:24:32 2021 +0000
[
@@ -0,0 +1,46 @@
+ Parameter Value
+* memory memory: None
+* steps "steps: [('editednearestneighbours', EditedNearestNeighbours(kind_sel='all', n_jobs=1, n_neighbors=3,
+            random_state=None, ratio=None, return_indices=False,
+            sampling_strategy='auto')), ('randomforestclassifier', RandomForestClassifier(bootstrap=True, class_weight=None, criterion='gini',
+            max_depth=None, max_features='auto', max_leaf_nodes=None,
+            min_impurity_decrease=0.0, min_impurity_split=None,
+            min_samples_leaf=1, min_samples_split=2,
+            min_weight_fraction_leaf=0.0, n_estimators='warn', n_jobs=1,
+            oob_score=False, random_state=None, verbose=0,
+            warm_start=False))]"
+@ editednearestneighbours "editednearestneighbours: EditedNearestNeighbours(kind_sel='all', n_jobs=1, n_neighbors=3,
+            random_state=None, ratio=None, return_indices=False,
+            sampling_strategy='auto')"
+@ randomforestclassifier "randomforestclassifier: RandomForestClassifier(bootstrap=True, class_weight=None, criterion='gini',
+            max_depth=None, max_features='auto', max_leaf_nodes=None,
+            min_impurity_decrease=0.0, min_impurity_split=None,
+            min_samples_leaf=1, min_samples_split=2,
+            min_weight_fraction_leaf=0.0, n_estimators='warn', n_jobs=1,
+            oob_score=False, random_state=None, verbose=0,
+            warm_start=False)"
+@ editednearestneighbours__kind_sel editednearestneighbours__kind_sel: 'all'
+* editednearestneighbours__n_jobs editednearestneighbours__n_jobs: 1
+@ editednearestneighbours__n_neighbors editednearestneighbours__n_neighbors: 3
+@ editednearestneighbours__random_state editednearestneighbours__random_state: None
+@ editednearestneighbours__ratio editednearestneighbours__ratio: None
+@ editednearestneighbours__return_indices editednearestneighbours__return_indices: False
+@ editednearestneighbours__sampling_strategy editednearestneighbours__sampling_strategy: 'auto'
+@ randomforestclassifier__bootstrap randomforestclassifier__bootstrap: True
+@ randomforestclassifier__class_weight randomforestclassifier__class_weight: None
+@ randomforestclassifier__criterion randomforestclassifier__criterion: 'gini'
+@ randomforestclassifier__max_depth randomforestclassifier__max_depth: None
+@ randomforestclassifier__max_features randomforestclassifier__max_features: 'auto'
+@ randomforestclassifier__max_leaf_nodes randomforestclassifier__max_leaf_nodes: None
+@ randomforestclassifier__min_impurity_decrease randomforestclassifier__min_impurity_decrease: 0.0
+@ randomforestclassifier__min_impurity_split randomforestclassifier__min_impurity_split: None
+@ randomforestclassifier__min_samples_leaf randomforestclassifier__min_samples_leaf: 1
+@ randomforestclassifier__min_samples_split randomforestclassifier__min_samples_split: 2
+@ randomforestclassifier__min_weight_fraction_leaf randomforestclassifier__min_weight_fraction_leaf: 0.0
+@ randomforestclassifier__n_estimators randomforestclassifier__n_estimators: 'warn'
+* randomforestclassifier__n_jobs randomforestclassifier__n_jobs: 1
+@ randomforestclassifier__oob_score randomforestclassifier__oob_score: False
+@ randomforestclassifier__random_state randomforestclassifier__random_state: None
+* randomforestclassifier__verbose randomforestclassifier__verbose: 0
+@ randomforestclassifier__warm_start randomforestclassifier__warm_start: False
+ Note: @, searchable params in searchcv too.
b
diff -r 000000000000 -r af2624d5ab32 test-data/get_params12.tabular
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/get_params12.tabular Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,32 @@
+ Parameter Value
+@ estimator "estimator: XGBRegressor(base_score=0.5, booster='gbtree', colsample_bylevel=1,
+       colsample_bytree=1, gamma=0, learning_rate=0.1, max_delta_step=0,
+       max_depth=3, min_child_weight=1, missing=nan, n_estimators=100,
+       n_jobs=1, nthread=None, objective='reg:linear', random_state=0,
+       reg_alpha=0, reg_lambda=1, scale_pos_weight=1, seed=None,
+       silent=True, subsample=1)"
+@ n_features_to_select n_features_to_select: None
+* step step: 1
+* verbose verbose: 0
+@ estimator__base_score estimator__base_score: 0.5
+@ estimator__booster estimator__booster: 'gbtree'
+@ estimator__colsample_bylevel estimator__colsample_bylevel: 1
+@ estimator__colsample_bytree estimator__colsample_bytree: 1
+@ estimator__gamma estimator__gamma: 0
+@ estimator__learning_rate estimator__learning_rate: 0.1
+@ estimator__max_delta_step estimator__max_delta_step: 0
+@ estimator__max_depth estimator__max_depth: 3
+@ estimator__min_child_weight estimator__min_child_weight: 1
+@ estimator__missing estimator__missing: nan
+@ estimator__n_estimators estimator__n_estimators: 100
+* estimator__n_jobs estimator__n_jobs: 1
+* estimator__nthread estimator__nthread: None
+@ estimator__objective estimator__objective: 'reg:linear'
+@ estimator__random_state estimator__random_state: 0
+@ estimator__reg_alpha estimator__reg_alpha: 0
+@ estimator__reg_lambda estimator__reg_lambda: 1
+@ estimator__scale_pos_weight estimator__scale_pos_weight: 1
+@ estimator__seed estimator__seed: None
+@ estimator__silent estimator__silent: True
+@ estimator__subsample estimator__subsample: 1
+ Note: @, params eligible for search in searchcv tool.
b
diff -r 000000000000 -r af2624d5ab32 test-data/glm_model01
b
Binary file test-data/glm_model01 has changed
b
diff -r 000000000000 -r af2624d5ab32 test-data/glm_model02
b
Binary file test-data/glm_model02 has changed
b
diff -r 000000000000 -r af2624d5ab32 test-data/glm_model03
b
Binary file test-data/glm_model03 has changed
b
diff -r 000000000000 -r af2624d5ab32 test-data/glm_model04
b
Binary file test-data/glm_model04 has changed
b
diff -r 000000000000 -r af2624d5ab32 test-data/glm_model05
b
Binary file test-data/glm_model05 has changed
b
diff -r 000000000000 -r af2624d5ab32 test-data/glm_model06
b
Binary file test-data/glm_model06 has changed
b
diff -r 000000000000 -r af2624d5ab32 test-data/glm_model07
b
Binary file test-data/glm_model07 has changed
b
diff -r 000000000000 -r af2624d5ab32 test-data/glm_model08
b
Binary file test-data/glm_model08 has changed
b
diff -r 000000000000 -r af2624d5ab32 test-data/glm_result01
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/glm_result01 Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,5 @@
+86.97021227350001 1.00532111569 -1.01739601979 -0.613139481654 0.641846874331 20479602419382.055
+91.2021798817 -0.6215229712070001 1.11914889596 0.390012184498 1.28956938152 21460309408632.004
+-47.4101632272 -0.638416457964 -0.7327774684530001 -0.8640261049779999 -1.06109770116 -11245419999724.842
+61.712804630200004 -1.0999480057700002 -0.739679672932 0.585657963012 1.4890682753600002 14574106078789.26
+-206.998295124 0.130238853011 0.70574123041 1.3320656526399999 -1.3322092373799999 -48782519807586.32
b
diff -r 000000000000 -r af2624d5ab32 test-data/glm_result02
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/glm_result02 Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,5 @@
+3.68258022948 2.82110345641 -3.9901407239999998 -1.9523364774 1
+0.015942057224 -0.7119585943469999 0.125502976978 -0.972218263337 0
+2.0869076882499997 0.929399321468 -2.1292408448400004 -1.9971402218799998 1
+1.4132105208399999 0.523750660422 -1.4210539291 -1.49298569451 1
+0.7683140439399999 1.38267855169 -0.989045048734 0.649504257894 1
b
diff -r 000000000000 -r af2624d5ab32 test-data/glm_result03
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/glm_result03 Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,5 @@
+3.68258022948 2.82110345641 -3.9901407239999998 -1.9523364774 1
+0.015942057224 -0.7119585943469999 0.125502976978 -0.972218263337 0
+2.0869076882499997 0.929399321468 -2.1292408448400004 -1.9971402218799998 0
+1.4132105208399999 0.523750660422 -1.4210539291 -1.49298569451 0
+0.7683140439399999 1.38267855169 -0.989045048734 0.649504257894 1
b
diff -r 000000000000 -r af2624d5ab32 test-data/glm_result04
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/glm_result04 Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,5 @@
+86.97021227350001 1.00532111569 -1.01739601979 -0.613139481654 0.641846874331 0.5282637592226301
+91.2021798817 -0.6215229712070001 1.11914889596 0.390012184498 1.28956938152 0.5180352211818147
+-47.4101632272 -0.638416457964 -0.7327774684530001 -0.8640261049779999 -1.06109770116 0.012682414140451959
+61.712804630200004 -1.0999480057700002 -0.739679672932 0.585657963012 1.4890682753600002 0.1869842234155321
+-206.998295124 0.130238853011 0.70574123041 1.3320656526399999 -1.3322092373799999 -1.6599360904302456
b
diff -r 000000000000 -r af2624d5ab32 test-data/glm_result05
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/glm_result05 Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,5 @@
+3.68258022948 2.82110345641 -3.9901407239999998 -1.9523364774 1
+0.015942057224 -0.7119585943469999 0.125502976978 -0.972218263337 0
+2.0869076882499997 0.929399321468 -2.1292408448400004 -1.9971402218799998 1
+1.4132105208399999 0.523750660422 -1.4210539291 -1.49298569451 1
+0.7683140439399999 1.38267855169 -0.989045048734 0.649504257894 1
b
diff -r 000000000000 -r af2624d5ab32 test-data/glm_result06
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/glm_result06 Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,5 @@
+3.68258022948 2.82110345641 -3.9901407239999998 -1.9523364774 1
+0.015942057224 -0.7119585943469999 0.125502976978 -0.972218263337 0
+2.0869076882499997 0.929399321468 -2.1292408448400004 -1.9971402218799998 1
+1.4132105208399999 0.523750660422 -1.4210539291 -1.49298569451 1
+0.7683140439399999 1.38267855169 -0.989045048734 0.649504257894 1
b
diff -r 000000000000 -r af2624d5ab32 test-data/glm_result07
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/glm_result07 Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,5 @@
+86.97021227350001 1.00532111569 -1.01739601979 -0.613139481654 0.641846874331 0.6093152833692663
+91.2021798817 -0.6215229712070001 1.11914889596 0.390012184498 1.28956938152 0.5963828164943974
+-47.4101632272 -0.638416457964 -0.7327774684530001 -0.8640261049779999 -1.06109770116 -0.07927429227257948
+61.712804630200004 -1.0999480057700002 -0.739679672932 0.585657963012 1.4890682753600002 0.2621440442022235
+-206.998295124 0.130238853011 0.70574123041 1.3320656526399999 -1.3322092373799999 -1.7330414645145749
b
diff -r 000000000000 -r af2624d5ab32 test-data/glm_result08
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/glm_result08 Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,5 @@
+3.68258022948 2.82110345641 -3.9901407239999998 -1.9523364774 1
+0.015942057224 -0.7119585943469999 0.125502976978 -0.972218263337 0
+2.0869076882499997 0.929399321468 -2.1292408448400004 -1.9971402218799998 0
+1.4132105208399999 0.523750660422 -1.4210539291 -1.49298569451 0
+0.7683140439399999 1.38267855169 -0.989045048734 0.649504257894 1
b
diff -r 000000000000 -r af2624d5ab32 test-data/grid_scores_.tabular
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/grid_scores_.tabular Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,18 @@
+grid_scores_
+0.7634899597102532
+0.7953981831108754
+0.7937021172447345
+0.7951323776809974
+0.793206654688313
+0.8046265123256906
+0.7972524937034748
+0.8106427221191455
+0.8072746749161711
+0.8146665413082648
+0.8155998800333571
+0.8056801877422021
+0.8123573954396127
+0.8155472512482351
+0.8164562575257928
+0.8151250518677203
+0.8107710182153142
b
diff -r 000000000000 -r af2624d5ab32 test-data/hamming_loss.txt
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/hamming_loss.txt Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,2 @@
+hamming_loss : 
+0.15384615384615385
b
diff -r 000000000000 -r af2624d5ab32 test-data/hastie.txt
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/hastie.txt Sat May 01 01:24:32 2021 +0000
b
b'@@ -0,0 +1,12001 @@\n+0\t1\t2\t3\t4\t5\t6\t7\t8\t9\t0\n+-1.74976547305470\t0.34268040332750\t1.15303580256364\t-0.25243603652139\t0.98132078695123\t0.51421884139438\t0.22117966922140\t-1.07004333056829\t-0.18949583082318\t0.25500144427338\t-1.00000000000000\n+-0.45802698550262\t0.43516348812289\t-0.58359505032266\t0.81684707168578\t0.67272080570966\t-0.10441114339063\t-0.53128037685191\t1.02973268513335\t-0.43813562270442\t-1.11831824625544\t-1.00000000000000\n+1.61898166067526\t1.54160517451341\t-0.25187913921321\t-0.84243573825130\t0.18451869056394\t0.93708220110895\t0.73100034383481\t1.36155612514533\t-0.32623805920230\t0.05567601485478\t-1.00000000000000\n+0.22239960855530\t-1.44321699522534\t-0.75635230559444\t0.81645401101929\t0.75044476153418\t-0.45594692746800\t1.18962226802913\t-1.69061682638360\t-1.35639904886131\t-1.23243451391493\t1.00000000000000\n+-0.54443916167246\t-0.66817173681343\t0.00731456322890\t-0.61293873547816\t1.29974807475531\t-1.73309562365328\t-0.98331009912963\t0.35750775316737\t-1.61357850282218\t1.47071386661213\t1.00000000000000\n+-1.18801759731772\t-0.54974619353549\t-0.94004616154477\t-0.82793236436587\t0.10886346783368\t0.50780959049232\t-0.86222734651048\t1.24946974272698\t-0.07961124591740\t-0.88973148126503\t-1.00000000000000\n+-0.88179838948302\t0.01863894948806\t0.23784462192362\t0.01354854862861\t-1.63552939938082\t-1.04420987770932\t0.61303888168755\t0.73620521332382\t1.02692143939979\t-1.43219061105893\t-1.00000000000000\n+-1.84118830018672\t0.36609322616730\t-0.33177713505281\t-0.68921797808975\t2.03460756150493\t-0.55071441191459\t0.75045333032684\t-1.30699233908082\t0.58057333579427\t-1.10452309266229\t1.00000000000000\n+0.69012147022471\t0.68689006613840\t-1.56668752957839\t0.90497412146668\t0.77882239932307\t0.42823287059674\t0.10887198989791\t0.02828363482307\t-0.57882582479099\t-1.19945119919393\t-1.00000000000000\n+-1.70595200573817\t0.36916395710701\t1.87657342696217\t-0.37690335016897\t1.83193608182554\t0.00301743403121\t-0.07602346572462\t0.00395759398760\t-0.18501411089711\t-2.48715153522277\t1.00000000000000\n+-1.70465120576096\t-1.13626100682736\t-2.97331547405089\t0.03331727813886\t-0.24888866705811\t-0.45017643501165\t0.13242780114877\t0.02221392803939\t0.31736797594107\t-0.75241417772504\t1.00000000000000\n+-1.29639180715015\t0.09513944356545\t-0.42371509994342\t-1.18598356492917\t-0.36546199267663\t-1.27102304084666\t1.58617093842324\t0.69339065851659\t-1.95808123420787\t-0.13480131198999\t1.00000000000000\n+-1.54061602455261\t2.04671396848214\t-1.39699934495328\t-1.09717198463982\t-0.23871286931468\t-1.42906689844829\t0.94900477650526\t-0.01939758596247\t0.89459770576001\t0.75969311985021\t1.00000000000000\n+-1.49772038108317\t-1.19388597679194\t1.29626258639906\t0.95227562608189\t-1.21725413064101\t-0.15726516737514\t-1.50758516026439\t0.10788413080661\t0.74705565509915\t0.42967643586261\t1.00000000000000\n+-1.41504292085253\t-0.64075992301057\t0.77962630366370\t-0.43812091634884\t2.07479316794657\t-0.34329768218247\t-0.61662937168319\t0.76318364605999\t0.19291719182331\t-0.34845893065237\t-1.00000000000000\n+2.29865394071368\t-0.16520955264073\t0.46629936835719\t0.26998723863109\t-0.31983104711809\t-1.14774159987659\t1.70362398812070\t-0.72215077005575\t1.09368664965872\t-0.22951775323996\t1.00000000000000\n+-0.00889866329211\t-0.54319800840717\t0.75306218769198\t-1.60943889617295\t1.94326226343400\t-1.44743611231959\t0.13024845535270\t0.94936086466099\t-2.01518871712253\t-0.07954058693411\t1.00000000000000\n+0.30104946378807\t-1.68489996168518\t0.22239080944545\t-0.68492173524723\t-0.12620118371358\t1.99027364975409\t0.52299780452075\t-0.01634540275749\t-0.41581633584065\t-1.35850293675980\t1.00000000000000\n+-0.51442989136879\t-0.21606012000326\t0.42238022042198\t-1.09404293103224\t1.23690788519023\t-0.23028467842711\t-0.70441819973502\t-0.59137512108517\t0.73699516901821\t0.43586725251491\t-1.00000000000000\n+1.77599358550677\t0.51307437883965\t1.17052698294814\t2.07771223225020\t-0.45592201921402\t0.64917292725468\t-0.17478155445150\t1.01726434325117\t-0.59998304484887\t1.57616672431921\t1.00000000000000\n+0.60442353858920\t-0.90703041748070\t0.59202326936038\t-0.43706441565157\t0.101775772'..b'06309931633\t-1.06717262694293\t0.50073241156502\t0.18992453098454\t2.04628516955088\t1.82528927949279\t0.42917283635627\t1.00000000000000\n+-1.22259082208966\t1.80486825966875\t0.25472873542702\t-1.14612326011794\t-0.65895878644957\t-0.50665881367303\t-0.58717488257737\t1.98654951853110\t-0.92459516782334\t0.30357698596096\t1.00000000000000\n+-0.45373427820446\t-0.61483801155467\t-0.47897312964695\t-0.04537445187094\t1.32531372085786\t0.33328592586201\t-0.71798479536006\t-0.10644860260678\t-1.33607751334297\t-1.07453058288167\t-1.00000000000000\n+0.27622491542758\t-0.42838847957279\t-2.04367124772039\t-1.90685851796119\t0.96798821663439\t2.17219080431942\t0.10964573562466\t-1.27426723194757\t1.23222183027782\t-0.21419343967053\t1.00000000000000\n+1.25575137679073\t-0.82899667584661\t-0.31025868800052\t1.16595362276325\t0.39295553260644\t1.18950871662693\t-0.40465579431053\t-0.26518694565902\t1.53187556786493\t-0.67960362882453\t-1.00000000000000\n+1.50485411320375\t-1.23818277073586\t0.36024637121746\t-1.70726489995878\t2.04691378922358\t-0.91974061417311\t-0.74871898187249\t-0.18819401708008\t-0.06675497482732\t1.13417573817667\t1.00000000000000\n+-0.99461699492346\t0.61660727541860\t2.07874813007413\t-0.16292573253302\t0.19275302467826\t0.39123837209778\t-1.91087474789752\t-0.03029080425482\t0.60144061405756\t-1.50549085992392\t1.00000000000000\n+-0.89012378251865\t-0.34737909401570\t-1.21739990485979\t-0.36097243202727\t1.53931715782095\t0.96461318969018\t-0.33536048200336\t-0.39976788596124\t0.80781874858536\t-0.01648269060780\t-1.00000000000000\n+2.71624984831006\t-1.10170839054615\t0.40823857179199\t1.25331975589293\t0.24343699441434\t0.96603756159211\t0.16755705151620\t0.52653125273934\t-0.70407892178746\t0.33239506844990\t1.00000000000000\n+1.28538553907580\t0.72766461340125\t-0.76023886670864\t-1.11152239856363\t-0.44518987222192\t-1.49074021117829\t-0.22775458347687\t0.99289407328571\t0.96480612313626\t-0.06280974312666\t-1.00000000000000\n+-0.80234755283167\t-0.44824994889487\t1.87749617599518\t1.83135584380522\t-2.36024606600980\t0.39528743184894\t-0.89400524994086\t0.48937627815753\t-0.40659070466213\t0.31275234803913\t1.00000000000000\n+0.13505159908171\t-1.15605347603301\t0.64922003423281\t1.69618114190407\t-0.70155197211798\t1.08145071330605\t0.85047933757755\t1.10637056716661\t-1.04623999358839\t-1.30135399847546\t1.00000000000000\n+1.67077062936354\t0.64354784129343\t0.59812997964100\t-0.17237698634320\t-0.50352290746151\t0.51328398132421\t-0.58000465555931\t0.23233150054032\t-0.78335885157152\t-1.39723167543104\t-1.00000000000000\n+1.04247849341898\t-1.81435068819736\t0.71293771767906\t-0.21705433945730\t-0.10080393703821\t-0.88205424097537\t0.72201960894325\t-0.79149023242802\t0.28627214656039\t-1.50545688584277\t-1.00000000000000\n+0.21341208874143\t0.72014711833863\t-1.79828012260809\t1.71395949654835\t0.76266388164113\t-1.31967951126868\t0.85347469034475\t0.62002640738142\t0.30566431217301\t-1.56924194492647\t1.00000000000000\n+-0.64999199308320\t-1.02562683437324\t-0.13050507880770\t-0.14950841874608\t-0.31428180397186\t-0.84782684899640\t0.81216245245208\t1.21473327282491\t0.95005680843726\t-0.08427812455387\t-1.00000000000000\n+-0.45255217423509\t0.17011399688269\t0.95454352978426\t-0.50524967574339\t-1.87634064821904\t-1.74871436736529\t-0.90831051008574\t-0.37931872451117\t0.52607520147334\t1.57222444122907\t1.00000000000000\n+0.66169157934456\t0.88668364824582\t-0.25547151405970\t-1.24022983583502\t-1.04307067938948\t1.26349810720956\t0.40213622984300\t0.19115412195518\t-0.78010551786820\t0.30644220235020\t-1.00000000000000\n+0.90409905530316\t0.81499532233643\t0.32991900836553\t-0.64930144336002\t0.41358068830326\t0.33464811275834\t1.13234913031482\t1.77786603651925\t1.41434355354132\t-0.74600858518167\t-1.00000000000000\n+0.69637765860924\t1.04166053271514\t0.34093340616616\t-0.06843683271006\t0.99366591723931\t0.41459338970991\t0.55689330161166\t0.44922520374121\t-0.88329582162890\t1.27761891051187\t-1.00000000000000\n+2.42943479416636\t1.38124984515009\t1.05541499372178\t0.69618399416012\t-0.74518317796755\t0.99911118671284\t1.92133407983838\t0.36285784396181\t-0.27791688211903\t-1.15010888994762\t1.00000000000000\n'
b
diff -r 000000000000 -r af2624d5ab32 test-data/hinge_loss.txt
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/hinge_loss.txt Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,2 @@
+hinge_loss : 
+2.7688227126800844
b
diff -r 000000000000 -r af2624d5ab32 test-data/imblearn_X.tabular
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/imblearn_X.tabular Sat May 01 01:24:32 2021 +0000
b
b'@@ -0,0 +1,1001 @@\n+0\t1\t2\t3\t4\t5\t6\t7\t8\t9\t10\t11\t12\t13\t14\t15\t16\t17\t18\t19\n+0.6001306800348731\t-1.427664018242596\t-0.8128431040977644\t0.1861963773309323\t-0.44607171267571116\t1.4615378813401159\t-0.20792618079810765\t-0.12137371720639965\t-0.06446772833491145\t-1.3310164666152242\t-1.6691939894626755\t-0.13944961174285558\t0.030953613196132247\t-0.5179508164020254\t-0.42962235164106233\t-0.4500954165849111\t2.3038532915304692\t-0.553023784461563\t0.14876985407627283\t-1.7479587701471315\n+0.2536411543894025\t-1.4563873415342614\t-0.5225979569522042\t-1.2005696101463839\t-0.5955528693207531\t3.498593261471326\t0.4473543934217947\t0.7526786936590912\t-0.43342196111582254\t1.5525428863494506\t2.169113462445944\t-0.7532432311768639\t0.6556246969801681\t0.6124285450253644\t-1.1902855719466887\t0.1977380945472285\t1.0019115951772508\t1.694093458508633\t-0.24969904983753968\t0.45875979745362017\n+0.18225579346195472\t-1.2796018753350356\t0.6780773010107425\t-0.10707199060615594\t-1.8915329365851754\t2.9791576382913614\t3.7762486948586975\t0.7669666081001365\t-1.4611274189917611\t-0.5051625294981158\t3.5706473853319283\t0.361457272353967\t0.5353157105921461\t-1.1171165238191816\t0.5003091857920472\t-0.062337162720739185\t-1.664216917267093\t-0.8111511264316701\t-0.2873360912302924\t-1.8361890958897182\n+-0.3139065727141332\t-2.714269287754568\t-0.4617890824485205\t0.45142947949293805\t0.29920888623517505\t2.3280472639084735\t4.721085791340013\t-1.4606647440626481\t-1.0757028886615319\t0.3037546381512859\t3.8378027329578264\t-0.8505759374119342\t-2.4566574243609414\t-0.5335097714369801\t-1.4769442197132496\t0.43056336705151876\t-1.7354548844106463\t-2.0028486121607005\t1.215284108701237\t-2.4579651124937256\n+-1.2628695216496104\t0.11918065353143902\t-1.2346278008576206\t0.2531467787794355\t1.551433469203308\t2.3537000928251253\t3.6644773361790386\t-0.6331477268237771\t-1.7695433837247796\t-0.16479992734977722\t2.8574163681600524\t1.0802040441260066\t-0.7659697128037619\t-0.6336634851328513\t-2.961579371739917\t1.4941732279481985\t-2.712420873286752\t-0.6176127736001279\t-1.4459486909547319\t-2.1836873518837896\n+0.42487189699856703\t-0.5373034697510026\t-1.1657883437507215\t-1.35351449329613\t1.2670224544537296\t2.5781881776435234\t1.0747708563973197\t0.40242575332019875\t-0.7595747370840253\t1.1128259625451298\t1.3044963239922482\t0.657714479238077\t-0.4536911884265618\t0.03825851981369715\t1.6218469815954708\t-0.10738702838298275\t0.26418374248900883\t-1.3036201837106316\t-0.7840346128822773\t-0.772900984348529\n+-1.3152539737693658\t0.04739388964208212\t0.5667176906282858\t0.16723645448034968\t-1.3237156046414544\t2.5576869458278955\t2.970895813541885\t0.36800572011534793\t1.6753411364337387\t-2.235192749512666\t3.0193747039169194\t1.635306712078988\t0.07453719247058022\t-0.3316821738414471\t0.12148384247015409\t0.012671348866862102\t-0.5792108700037811\t0.6156470679976609\t0.6011935845440075\t-1.3138953376368692\n+-1.1420064626568525\t-0.26584154465143667\t0.4013439975765152\t1.2247115662795574\t-0.39875703183699024\t2.1389258755397287\t5.048265811735874\t0.838787906446567\t1.3340919123877284\t0.04328433744956178\t3.6904114144831635\t0.8071200727172667\t1.2016866972951539\t-0.6410634897182509\t-0.6346817606270283\t1.8890510410569996\t0.266916933787833\t1.8832612181439792\t1.4865109081694494\t-2.9062233054748243\n+0.8154945938187875\t-1.3942602322147186\t1.3918151037180722\t0.30202718503532827\t0.653444631281608\t1.4452870874986299\t3.8061827873167413\t-1.1277346263276433\t-0.22425124977321367\t2.2090491331008986\t1.7562433930226553\t0.5570092974580497\t-0.5401661645837045\t1.3119452613127471\t1.7224918758723826\t-1.5521406849496893\t0.8659915301504891\t0.4448835159980526\t0.2696306769788708\t-3.091110583794352\n+0.31171461250717486\t-0.27367105664188995\t0.21180263497907753\t-0.07760204005752984\t0.035673036767129906\t2.3879833063367846\t4.706387286358583\t-0.9578325355578868\t1.452350881861973\t0.6231711985001551\t3.3855289481852875\t-1.0022023758320022\t0.5131015704227361\t0.013664588304865943\t-0.23597344957725325\t-0.4882209765908194\t0.2629281407481283\t0.6789361892009779\t-2.094347254072613\t-2.878015482344443\n+0.845414'..b'19843444\t0.277030189641092\t0.7769336725785939\t-0.5364575947101988\t-1.8525358877628713\n+3.360397523602019\t-0.5470601729870943\t-1.2873197463278891\t-1.2598328753246546\t-0.1428061049836398\t2.0156505744173994\t1.6557719775496316\t1.6226882110628915\t1.0952934126831912\t0.9112290228632093\t1.4494450439546436\t0.10500821788528955\t-0.00704828378956637\t1.4293902174127222\t-1.0197533088358766\t-1.0939163125693334\t1.2480042976575871\t1.093432051347689\t0.07892365771029007\t-1.1246763094116352\n+1.1837495881016549\t1.0465439135904016\t0.9867846466870027\t0.18141633300379795\t-0.38250091701406874\t1.3552926291694947\t-0.6963529623482592\t-0.04799858970990036\t-0.26349548563128933\t0.4449421462300397\t-2.50135342841587\t-0.28897896057116645\t0.6918896525925219\t-0.36785408107246365\t-0.25362665416838454\t0.6945368590910528\t-0.9631718574199114\t-0.1258334517145733\t1.3844996029899148\t-1.936695694110503\n+-0.10861657840399971\t-1.1113143899161724\t-0.3279133081403911\t1.2330194506587273\t0.12110654437697854\t1.872199882968341\t-0.4549985193677535\t1.5812439085428185\t-0.3377141669910402\t-0.7052349840605608\t-2.2530794383766417\t-1.050108501130314\t-2.0828949891173947\t-0.9650369080270697\t0.9659310723796818\t0.21141440268416584\t0.9539162478560591\t-0.6228822167376097\t-0.8694400986582542\t-2.1330641628444216\n+-1.2253999879465634\t-0.21255556808360254\t-0.6426725761169916\t-1.196072883128891\t1.266922746784227\t2.2256888487009903\t3.0054762074122117\t0.13990086989650774\t1.3039648168375455\t-0.5952644743053549\t2.182944202202095\t0.8015328128947369\t1.1283724168682476\t1.3503051252630927\t-1.0955795777766877\t0.7109722757632584\t1.0636052867267143\t-0.8840342072804078\t-1.5759278459319386\t-2.0279117003180893\n+-0.8693927846256988\t-1.4015773896239008\t-0.5767475073478409\t-0.514877043990522\t-0.6926301958015578\t2.810943852625326\t2.1414294878660685\t-0.42261866857539526\t0.722102167933471\t0.41277555491293927\t2.4994899525619645\t-0.9740548736776437\t0.2030120207547904\t-1.8464325894173563\t1.258794140437278\t-1.740830606254658\t-0.2595500855948115\t-0.9467397049189886\t-0.9526964140458886\t-0.937055275475108\n+2.465979504831791\t-0.11647472229306143\t-1.3673978078088291\t0.25792387327309524\t2.02220177737093\t0.056556687812697515\t-0.8599660532852478\t0.2030008126873168\t-0.25092959743335835\t0.24839919756489393\t-2.555666173944962\t-1.2043480430753424\t-0.17953917839861058\t1.7189292170192134\t2.7852928968634107\t0.008400346195698861\t-0.6359320009596753\t0.2357521308160045\t1.2368008363755216\t-1.4146247373944343\n+-0.0163256591828519\t-0.6183055444853768\t-1.159263079089029\t-1.4303550879907347\t-0.28069199914747095\t2.1243880986329158\t1.6951821208174769\t-0.8716165521678049\t-0.33251342254751937\t-0.27386404780277435\t1.4788860902136713\t-0.201208090929832\t2.311548044859444\t1.1017628776236508\t1.4194572000947938\t0.512700384954193\t-1.867727607756348\t-0.031861613113337746\t-0.34307616045334116\t-1.174287965982148\n+-1.4702455093030349\t1.4429199785590026\t-0.6102517293773445\t2.2320246366695096\t0.420092258000585\t3.0690234982020463\t4.577711412585288\t-2.4029905021664475\t2.227258236085046\t1.5521556562795078\t4.273030813010738\t0.4943038596328826\t0.7063998227941131\t2.0814669381342634\t-0.293247790344005\t-0.6595177990179122\t-0.7393112877406384\t-0.808565352079177\t0.9289957408518578\t-2.115107772813518\n+1.1608029701501765\t1.0793013183923594\t-0.10917057298490165\t-0.2777148871472186\t-0.553122987814596\t2.6222096121401433\t1.8145098546790979\t-1.0713142333102095\t0.4451638694576139\t1.10157387968243\t2.088384076464559\t0.6293028510604814\t0.32476475385705694\t0.1207669042410038\t-0.39081080441050287\t1.0757434445088958\t-0.3913471598720806\t-2.584943822716165\t-1.7432615574905008\t-0.8931788187442695\n+-0.29874434526655697\t-1.4814434332669864\t-0.3405176552373323\t-1.5472128948094663\t1.460141833448219\t2.7503298497261937\t1.4919435584703815\t-0.5014938556215274\t1.3898511809047607\t2.1536872532393594\t1.8252155237674077\t-0.055976242115569654\t-1.024054711552412\t0.9786166674778746\t-0.930534193318163\t-1.0692142888694698\t1.1760360066245013\t-0.1777204619951954\t-0.13834763375383666\t-0.8119518506990913\n'
b
diff -r 000000000000 -r af2624d5ab32 test-data/imblearn_y.tabular
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/imblearn_y.tabular Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,1001 @@
+0
+0
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+0
+1
+1
+1
+1
+1
+0
+1
+1
+1
+1
+1
+1
+1
+0
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+0
+0
+1
+1
+1
+0
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+0
+1
+1
+1
+1
+0
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+0
+1
+1
+0
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+0
+1
+1
+1
+1
+1
+1
+0
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+0
+1
+1
+1
+1
+0
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+0
+1
+1
+1
+1
+1
+1
+0
+1
+0
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+0
+1
+1
+1
+1
+1
+1
+1
+0
+1
+1
+1
+1
+0
+1
+1
+1
+1
+1
+1
+1
+1
+1
+0
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+0
+1
+0
+1
+1
+1
+1
+0
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+0
+1
+1
+1
+0
+1
+1
+1
+1
+1
+1
+1
+1
+0
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+0
+1
+1
+1
+1
+1
+1
+0
+1
+1
+1
+1
+0
+1
+1
+1
+1
+1
+1
+0
+1
+0
+1
+0
+1
+1
+1
+1
+1
+0
+0
+1
+1
+0
+0
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+0
+1
+0
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+0
+0
+1
+0
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+0
+1
+1
+1
+1
+0
+1
+1
+1
+1
+1
+0
+0
+1
+1
+1
+1
+1
+1
+0
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+0
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+0
+1
+1
+1
+1
+1
+1
+1
+1
+0
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+0
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+0
+1
+0
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+0
+1
+1
+0
+1
+1
+1
+1
+1
+0
+1
+1
+1
+1
+1
+1
+1
+1
+0
+1
+1
+1
+1
+0
+1
+1
+1
+1
+1
+1
+1
+0
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+0
+1
+0
+1
+1
+1
+1
+1
+1
+1
+1
+0
+1
+0
+1
+1
+1
+1
+1
+0
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+0
+1
+1
+1
+1
+0
+1
+0
+1
+0
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+0
+1
+1
+0
+1
+1
+1
+1
+0
+1
+1
+1
+1
+1
+0
+1
+1
+1
+1
+1
+0
+1
+1
+1
+1
+1
+1
+1
+0
+0
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+0
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+0
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+0
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+0
+1
+1
+1
+1
+1
+1
+0
+1
+0
+1
+0
+1
+1
+1
+1
+1
+1
+1
+0
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+0
+1
+1
+1
+1
+1
+1
+0
+0
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+0
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+0
+1
+1
+1
+1
+1
+1
+0
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+0
+0
+0
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+0
+1
+1
+1
+1
+1
+1
+1
+0
+0
+1
+1
+1
+0
+1
+1
+1
+1
+0
+0
+1
+1
+0
+1
+1
+1
+1
b
diff -r 000000000000 -r af2624d5ab32 test-data/jaccard_similarity_score.txt
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/jaccard_similarity_score.txt Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,2 @@
+jaccard_similarity_score : 
+0.8461538461538461
b
diff -r 000000000000 -r af2624d5ab32 test-data/keras01.json
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/keras01.json Sat May 01 01:24:32 2021 +0000
[
@@ -0,0 +1,90 @@
+{
+  "class_name": "Sequential",
+  "config": {
+    "name": "sequential_1",
+    "layers": [
+      {
+        "class_name": "Dense",
+        "config": {
+          "name": "dense_1",
+          "trainable": true,
+          "batch_input_shape": [
+            null,
+            784
+          ],
+          "dtype": "float32",
+          "units": 32,
+          "activation": "linear",
+          "use_bias": true,
+          "kernel_initializer": {
+            "class_name": "VarianceScaling",
+            "config": {
+              "scale": 1.0,
+              "mode": "fan_avg",
+              "distribution": "uniform",
+              "seed": null
+            }
+          },
+          "bias_initializer": {
+            "class_name": "Zeros",
+            "config": {}
+          },
+          "kernel_regularizer": null,
+          "bias_regularizer": null,
+          "activity_regularizer": null,
+          "kernel_constraint": null,
+          "bias_constraint": null
+        }
+      },
+      {
+        "class_name": "Activation",
+        "config": {
+          "name": "activation_1",
+          "trainable": true,
+          "dtype": "float32",
+          "activation": "relu"
+        }
+      },
+      {
+        "class_name": "Dense",
+        "config": {
+          "name": "dense_2",
+          "trainable": true,
+          "dtype": "float32",
+          "units": 10,
+          "activation": "linear",
+          "use_bias": true,
+          "kernel_initializer": {
+            "class_name": "VarianceScaling",
+            "config": {
+              "scale": 1.0,
+              "mode": "fan_avg",
+              "distribution": "uniform",
+              "seed": null
+            }
+          },
+          "bias_initializer": {
+            "class_name": "Zeros",
+            "config": {}
+          },
+          "kernel_regularizer": null,
+          "bias_regularizer": null,
+          "activity_regularizer": null,
+          "kernel_constraint": null,
+          "bias_constraint": null
+        }
+      },
+      {
+        "class_name": "Activation",
+        "config": {
+          "name": "activation_2",
+          "trainable": true,
+          "dtype": "float32",
+          "activation": "softmax"
+        }
+      }
+    ]
+  },
+  "keras_version": "2.3.1",
+  "backend": "tensorflow"
+}
\ No newline at end of file
b
diff -r 000000000000 -r af2624d5ab32 test-data/keras02.json
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/keras02.json Sat May 01 01:24:32 2021 +0000
[
b'@@ -0,0 +1,385 @@\n+{\n+  "class_name": "Model",\n+  "config": {\n+    "name": "model_1",\n+    "layers": [\n+      {\n+        "name": "main_input",\n+        "class_name": "InputLayer",\n+        "config": {\n+          "batch_input_shape": [\n+            null,\n+            100\n+          ],\n+          "dtype": "int32",\n+          "sparse": false,\n+          "name": "main_input"\n+        },\n+        "inbound_nodes": []\n+      },\n+      {\n+        "name": "embedding_1",\n+        "class_name": "Embedding",\n+        "config": {\n+          "name": "embedding_1",\n+          "trainable": true,\n+          "batch_input_shape": [\n+            null,\n+            100\n+          ],\n+          "dtype": "float32",\n+          "input_dim": 10000,\n+          "output_dim": 512,\n+          "embeddings_initializer": {\n+            "class_name": "RandomUniform",\n+            "config": {\n+              "minval": -0.05,\n+              "maxval": 0.05,\n+              "seed": null\n+            }\n+          },\n+          "embeddings_regularizer": null,\n+          "activity_regularizer": null,\n+          "embeddings_constraint": null,\n+          "mask_zero": false,\n+          "input_length": 100\n+        },\n+        "inbound_nodes": [\n+          [\n+            [\n+              "main_input",\n+              0,\n+              0,\n+              {}\n+            ]\n+          ]\n+        ]\n+      },\n+      {\n+        "name": "lstm_1",\n+        "class_name": "LSTM",\n+        "config": {\n+          "name": "lstm_1",\n+          "trainable": true,\n+          "dtype": "float32",\n+          "return_sequences": false,\n+          "return_state": false,\n+          "go_backwards": false,\n+          "stateful": false,\n+          "unroll": false,\n+          "units": 32,\n+          "activation": "tanh",\n+          "recurrent_activation": "sigmoid",\n+          "use_bias": true,\n+          "kernel_initializer": {\n+            "class_name": "VarianceScaling",\n+            "config": {\n+              "scale": 1.0,\n+              "mode": "fan_avg",\n+              "distribution": "uniform",\n+              "seed": null\n+            }\n+          },\n+          "recurrent_initializer": {\n+            "class_name": "Orthogonal",\n+            "config": {\n+              "gain": 1.0,\n+              "seed": null\n+            }\n+          },\n+          "bias_initializer": {\n+            "class_name": "Zeros",\n+            "config": {}\n+          },\n+          "unit_forget_bias": true,\n+          "kernel_regularizer": null,\n+          "recurrent_regularizer": null,\n+          "bias_regularizer": null,\n+          "activity_regularizer": null,\n+          "kernel_constraint": null,\n+          "recurrent_constraint": null,\n+          "bias_constraint": null,\n+          "dropout": 0.0,\n+          "recurrent_dropout": 0.0,\n+          "implementation": 2\n+        },\n+        "inbound_nodes": [\n+          [\n+            [\n+              "embedding_1",\n+              0,\n+              0,\n+              {}\n+            ]\n+          ]\n+        ]\n+      },\n+      {\n+        "name": "dense_1",\n+        "class_name": "Dense",\n+        "config": {\n+          "name": "dense_1",\n+          "trainable": true,\n+          "dtype": "float32",\n+          "units": 1,\n+          "activation": "sigmoid",\n+          "use_bias": true,\n+          "kernel_initializer": {\n+            "class_name": "VarianceScaling",\n+            "config": {\n+              "scale": 1.0,\n+              "mode": "fan_avg",\n+              "distribution": "uniform",\n+              "seed": null\n+            }\n+          },\n+          "bias_initializer": {\n+            "class_name": "Zeros",\n+            "config": {}\n+          },\n+          "kernel_regularizer": null,\n+          "bias_regularizer": null,\n+          "activity_regularizer": null,\n+          "kernel_constraint": null,\n+          "bias_constraint": null\n+        },\n+        "inbound_nodes": [\n+          [\n+            [\n+              "lstm_1",\n+              0,\n+              0,\n+       '..b'"bias_initializer": {\n+            "class_name": "Zeros",\n+            "config": {}\n+          },\n+          "kernel_regularizer": null,\n+          "bias_regularizer": null,\n+          "activity_regularizer": null,\n+          "kernel_constraint": null,\n+          "bias_constraint": null\n+        },\n+        "inbound_nodes": [\n+          [\n+            [\n+              "concatenate_1",\n+              0,\n+              0,\n+              {}\n+            ]\n+          ]\n+        ]\n+      },\n+      {\n+        "name": "dense_3",\n+        "class_name": "Dense",\n+        "config": {\n+          "name": "dense_3",\n+          "trainable": true,\n+          "dtype": "float32",\n+          "units": 64,\n+          "activation": "relu",\n+          "use_bias": true,\n+          "kernel_initializer": {\n+            "class_name": "VarianceScaling",\n+            "config": {\n+              "scale": 1.0,\n+              "mode": "fan_avg",\n+              "distribution": "uniform",\n+              "seed": null\n+            }\n+          },\n+          "bias_initializer": {\n+            "class_name": "Zeros",\n+            "config": {}\n+          },\n+          "kernel_regularizer": null,\n+          "bias_regularizer": null,\n+          "activity_regularizer": null,\n+          "kernel_constraint": null,\n+          "bias_constraint": null\n+        },\n+        "inbound_nodes": [\n+          [\n+            [\n+              "dense_2",\n+              0,\n+              0,\n+              {}\n+            ]\n+          ]\n+        ]\n+      },\n+      {\n+        "name": "dense_4",\n+        "class_name": "Dense",\n+        "config": {\n+          "name": "dense_4",\n+          "trainable": true,\n+          "dtype": "float32",\n+          "units": 64,\n+          "activation": "relu",\n+          "use_bias": true,\n+          "kernel_initializer": {\n+            "class_name": "VarianceScaling",\n+            "config": {\n+              "scale": 1.0,\n+              "mode": "fan_avg",\n+              "distribution": "uniform",\n+              "seed": null\n+            }\n+          },\n+          "bias_initializer": {\n+            "class_name": "Zeros",\n+            "config": {}\n+          },\n+          "kernel_regularizer": null,\n+          "bias_regularizer": null,\n+          "activity_regularizer": null,\n+          "kernel_constraint": null,\n+          "bias_constraint": null\n+        },\n+        "inbound_nodes": [\n+          [\n+            [\n+              "dense_3",\n+              0,\n+              0,\n+              {}\n+            ]\n+          ]\n+        ]\n+      },\n+      {\n+        "name": "dense_5",\n+        "class_name": "Dense",\n+        "config": {\n+          "name": "dense_5",\n+          "trainable": true,\n+          "dtype": "float32",\n+          "units": 1,\n+          "activation": "sigmoid",\n+          "use_bias": true,\n+          "kernel_initializer": {\n+            "class_name": "VarianceScaling",\n+            "config": {\n+              "scale": 1.0,\n+              "mode": "fan_avg",\n+              "distribution": "uniform",\n+              "seed": null\n+            }\n+          },\n+          "bias_initializer": {\n+            "class_name": "Zeros",\n+            "config": {}\n+          },\n+          "kernel_regularizer": null,\n+          "bias_regularizer": null,\n+          "activity_regularizer": null,\n+          "kernel_constraint": null,\n+          "bias_constraint": null\n+        },\n+        "inbound_nodes": [\n+          [\n+            [\n+              "dense_4",\n+              0,\n+              0,\n+              {}\n+            ]\n+          ]\n+        ]\n+      }\n+    ],\n+    "input_layers": [\n+      [\n+        "main_input",\n+        0,\n+        0\n+      ],\n+      [\n+        "aux_input",\n+        0,\n+        0\n+      ]\n+    ],\n+    "output_layers": [\n+      [\n+        "dense_1",\n+        0,\n+        0\n+      ],\n+      [\n+        "dense_5",\n+        0,\n+        0\n+      ]\n+    ]\n+  },\n+  "keras_version": "2.3.1",\n+  "backend": "tensorflow"\n+}\n\\ No newline at end of file\n'
b
diff -r 000000000000 -r af2624d5ab32 test-data/keras03.json
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/keras03.json Sat May 01 01:24:32 2021 +0000
[
@@ -0,0 +1,1 @@
+{"class_name": "Sequential", "config": {"name": "sequential_1", "layers": [{"class_name": "Dense", "config": {"name": "dense_1", "trainable": true, "batch_input_shape": [null, 17], "dtype": "float32", "units": 100, "activation": "linear", "use_bias": true, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"scale": 1.0, "mode": "fan_avg", "distribution": "uniform", "seed": 0}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}}, {"class_name": "Dropout", "config": {"name": "dropout_1", "trainable": true, "rate": 0.1, "noise_shape": null, "seed": 0}}, {"class_name": "Dense", "config": {"name": "dense_2", "trainable": true, "units": 1, "activation": "linear", "use_bias": true, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"scale": 1.0, "mode": "fan_avg", "distribution": "uniform", "seed": 0}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}}]}, "keras_version": "2.2.4", "backend": "tensorflow"}
\ No newline at end of file
b
diff -r 000000000000 -r af2624d5ab32 test-data/keras04.json
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/keras04.json Sat May 01 01:24:32 2021 +0000
[
@@ -0,0 +1,90 @@
+{
+  "class_name": "Sequential",
+  "config": {
+    "name": "sequential_1",
+    "layers": [
+      {
+        "class_name": "Dense",
+        "config": {
+          "name": "dense_1",
+          "trainable": true,
+          "batch_input_shape": [
+            null,
+            17
+          ],
+          "dtype": "float32",
+          "units": 32,
+          "activation": "linear",
+          "use_bias": true,
+          "kernel_initializer": {
+            "class_name": "VarianceScaling",
+            "config": {
+              "scale": 1.0,
+              "mode": "fan_avg",
+              "distribution": "uniform",
+              "seed": null
+            }
+          },
+          "bias_initializer": {
+            "class_name": "Zeros",
+            "config": {}
+          },
+          "kernel_regularizer": null,
+          "bias_regularizer": null,
+          "activity_regularizer": null,
+          "kernel_constraint": null,
+          "bias_constraint": null
+        }
+      },
+      {
+        "class_name": "Activation",
+        "config": {
+          "name": "activation_1",
+          "trainable": true,
+          "dtype": "float32",
+          "activation": "linear"
+        }
+      },
+      {
+        "class_name": "Dense",
+        "config": {
+          "name": "dense_2",
+          "trainable": true,
+          "dtype": "float32",
+          "units": 1,
+          "activation": "linear",
+          "use_bias": true,
+          "kernel_initializer": {
+            "class_name": "VarianceScaling",
+            "config": {
+              "scale": 1.0,
+              "mode": "fan_avg",
+              "distribution": "uniform",
+              "seed": null
+            }
+          },
+          "bias_initializer": {
+            "class_name": "Zeros",
+            "config": {}
+          },
+          "kernel_regularizer": null,
+          "bias_regularizer": null,
+          "activity_regularizer": null,
+          "kernel_constraint": null,
+          "bias_constraint": null
+        }
+      },
+      {
+        "class_name": "Activation",
+        "config": {
+          "name": "activation_2",
+          "trainable": true,
+          "dtype": "float32",
+          "activation": "linear"
+        }
+      }
+    ]
+  },
+  "keras_version": "2.3.1",
+  "backend": "tensorflow"
+}
\ No newline at end of file
b
diff -r 000000000000 -r af2624d5ab32 test-data/keras_batch_model01
b
Binary file test-data/keras_batch_model01 has changed
b
diff -r 000000000000 -r af2624d5ab32 test-data/keras_batch_model02
b
Binary file test-data/keras_batch_model02 has changed
b
diff -r 000000000000 -r af2624d5ab32 test-data/keras_batch_model03
b
Binary file test-data/keras_batch_model03 has changed
b
diff -r 000000000000 -r af2624d5ab32 test-data/keras_batch_model04
b
Binary file test-data/keras_batch_model04 has changed
b
diff -r 000000000000 -r af2624d5ab32 test-data/keras_batch_params01.tabular
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/keras_batch_params01.tabular Sat May 01 01:24:32 2021 +0000
[
b'@@ -0,0 +1,96 @@\n+\tParameter\tValue\n+@\tamsgrad\tamsgrad: None\n+@\tbatch_size\tbatch_size: 32\n+@\tbeta_1\tbeta_1: None\n+@\tbeta_2\tbeta_2: None\n+@\tcallbacks\tcallbacks: [{\'callback_selection\': {\'callback_type\': \'None\'}}]\n+@\tclass_positive_factor\tclass_positive_factor: 1.0\n+@\tconfig\tconfig: {\'name\': \'sequential_1\', \'layers\': [{\'class_name\': \'Dense\', \'config\': {\'name\': \'dense_1\', \'trainable\n+@\tdata_batch_generator\t"data_batch_generator: FastaDNABatchGenerator(fasta_path=\'to_be_determined\', seed=999, seq_length=1000,\n+                       shuffle=True)"\n+@\tdecay\tdecay: 0.0\n+@\tepochs\tepochs: 100\n+@\tlayers_0_Dense\tlayers_0_Dense: {\'class_name\': \'Dense\', \'config\': {\'name\': \'dense_1\', \'trainable\': True, \'batch_input_shape\': [None,\n+@\tlayers_1_Activation\tlayers_1_Activation: {\'class_name\': \'Activation\', \'config\': {\'name\': \'activation_1\', \'trainable\': True, \'dtype\': \'float32\n+@\tlayers_2_Dense\tlayers_2_Dense: {\'class_name\': \'Dense\', \'config\': {\'name\': \'dense_2\', \'trainable\': True, \'dtype\': \'float32\', \'units\'\n+@\tlayers_3_Activation\tlayers_3_Activation: {\'class_name\': \'Activation\', \'config\': {\'name\': \'activation_2\', \'trainable\': True, \'dtype\': \'float32\n+@\tloss\tloss: \'binary_crossentropy\'\n+@\tlr\tlr: 0.01\n+@\tmetrics\tmetrics: [\'acc\']\n+@\tmodel_type\tmodel_type: \'sequential\'\n+@\tmomentum\tmomentum: 0.0\n+*\tn_jobs\tn_jobs: 1\n+@\tnesterov\tnesterov: False\n+@\toptimizer\toptimizer: \'sgd\'\n+@\tprediction_steps\tprediction_steps: None\n+@\trho\trho: None\n+@\tschedule_decay\tschedule_decay: None\n+@\tseed\tseed: None\n+@\tsteps_per_epoch\tsteps_per_epoch: None\n+@\tvalidation_fraction\tvalidation_fraction: 0.1\n+@\tvalidation_steps\tvalidation_steps: None\n+@\tverbose\tverbose: 0\n+*\tdata_batch_generator__fasta_path\tdata_batch_generator__fasta_path: \'to_be_determined\'\n+@\tdata_batch_generator__seed\tdata_batch_generator__seed: 999\n+@\tdata_batch_generator__seq_length\tdata_batch_generator__seq_length: 1000\n+@\tdata_batch_generator__shuffle\tdata_batch_generator__shuffle: True\n+*\tlayers_0_Dense__class_name\tlayers_0_Dense__class_name: \'Dense\'\n+@\tlayers_0_Dense__config\tlayers_0_Dense__config: {\'name\': \'dense_1\', \'trainable\': True, \'batch_input_shape\': [None, 784], \'dtype\': \'float32\', \'units\'\n+@\tlayers_0_Dense__config__activation\tlayers_0_Dense__config__activation: \'linear\'\n+@\tlayers_0_Dense__config__activity_regularizer\tlayers_0_Dense__config__activity_regularizer: None\n+@\tlayers_0_Dense__config__batch_input_shape\tlayers_0_Dense__config__batch_input_shape: [None, 784]\n+@\tlayers_0_Dense__config__bias_constraint\tlayers_0_Dense__config__bias_constraint: None\n+@\tlayers_0_Dense__config__bias_initializer\tlayers_0_Dense__config__bias_initializer: {\'class_name\': \'Zeros\', \'config\': {}}\n+*\tlayers_0_Dense__config__bias_initializer__class_name\tlayers_0_Dense__config__bias_initializer__class_name: \'Zeros\'\n+@\tlayers_0_Dense__config__bias_initializer__config\tlayers_0_Dense__config__bias_initializer__config: {}\n+@\tlayers_0_Dense__config__bias_regularizer\tlayers_0_Dense__config__bias_regularizer: None\n+@\tlayers_0_Dense__config__dtype\tlayers_0_Dense__config__dtype: \'float32\'\n+@\tlayers_0_Dense__config__kernel_constraint\tlayers_0_Dense__config__kernel_constraint: None\n+@\tlayers_0_Dense__config__kernel_initializer\tlayers_0_Dense__config__kernel_initializer: {\'class_name\': \'VarianceScaling\', \'config\': {\'scale\': 1.0, \'mode\': \'fan_avg\', \'distribution\': \'unifo\n+*\tlayers_0_Dense__config__kernel_initializer__class_name\tlayers_0_Dense__config__kernel_initializer__class_name: \'VarianceScaling\'\n+@\tlayers_0_Dense__config__kernel_initializer__config\tlayers_0_Dense__config__kernel_initializer__config: {\'scale\': 1.0, \'mode\': \'fan_avg\', \'distribution\': \'uniform\', \'seed\': None}\n+@\tlayers_0_Dense__config__kernel_initializer__config__distribution\tlayers_0_Dense__config__kernel_initializer__config__distribution: \'uniform\'\n+@\tlayers_0_Dense__config__kernel_initializer__config__mode\tlayers_0_Dense__config__kernel_initializer__config__mode: \'fan_avg\'\n+@\tlayers_0_Dense__config__kernel_initializer__config__scale\tlayers_0_Dense__config__kerne'..b"s_0_Dense__config__name\tlayers_0_Dense__config__name: 'dense_1'\n+@\tlayers_0_Dense__config__trainable\tlayers_0_Dense__config__trainable: True\n+@\tlayers_0_Dense__config__units\tlayers_0_Dense__config__units: 32\n+@\tlayers_0_Dense__config__use_bias\tlayers_0_Dense__config__use_bias: True\n+*\tlayers_1_Activation__class_name\tlayers_1_Activation__class_name: 'Activation'\n+@\tlayers_1_Activation__config\tlayers_1_Activation__config: {'name': 'activation_1', 'trainable': True, 'dtype': 'float32', 'activation': 'relu'}\n+@\tlayers_1_Activation__config__activation\tlayers_1_Activation__config__activation: 'relu'\n+@\tlayers_1_Activation__config__dtype\tlayers_1_Activation__config__dtype: 'float32'\n+*\tlayers_1_Activation__config__name\tlayers_1_Activation__config__name: 'activation_1'\n+@\tlayers_1_Activation__config__trainable\tlayers_1_Activation__config__trainable: True\n+*\tlayers_2_Dense__class_name\tlayers_2_Dense__class_name: 'Dense'\n+@\tlayers_2_Dense__config\tlayers_2_Dense__config: {'name': 'dense_2', 'trainable': True, 'dtype': 'float32', 'units': 10, 'activation': 'linear', 'use\n+@\tlayers_2_Dense__config__activation\tlayers_2_Dense__config__activation: 'linear'\n+@\tlayers_2_Dense__config__activity_regularizer\tlayers_2_Dense__config__activity_regularizer: None\n+@\tlayers_2_Dense__config__bias_constraint\tlayers_2_Dense__config__bias_constraint: None\n+@\tlayers_2_Dense__config__bias_initializer\tlayers_2_Dense__config__bias_initializer: {'class_name': 'Zeros', 'config': {}}\n+*\tlayers_2_Dense__config__bias_initializer__class_name\tlayers_2_Dense__config__bias_initializer__class_name: 'Zeros'\n+@\tlayers_2_Dense__config__bias_initializer__config\tlayers_2_Dense__config__bias_initializer__config: {}\n+@\tlayers_2_Dense__config__bias_regularizer\tlayers_2_Dense__config__bias_regularizer: None\n+@\tlayers_2_Dense__config__dtype\tlayers_2_Dense__config__dtype: 'float32'\n+@\tlayers_2_Dense__config__kernel_constraint\tlayers_2_Dense__config__kernel_constraint: None\n+@\tlayers_2_Dense__config__kernel_initializer\tlayers_2_Dense__config__kernel_initializer: {'class_name': 'VarianceScaling', 'config': {'scale': 1.0, 'mode': 'fan_avg', 'distribution': 'unifo\n+*\tlayers_2_Dense__config__kernel_initializer__class_name\tlayers_2_Dense__config__kernel_initializer__class_name: 'VarianceScaling'\n+@\tlayers_2_Dense__config__kernel_initializer__config\tlayers_2_Dense__config__kernel_initializer__config: {'scale': 1.0, 'mode': 'fan_avg', 'distribution': 'uniform', 'seed': None}\n+@\tlayers_2_Dense__config__kernel_initializer__config__distribution\tlayers_2_Dense__config__kernel_initializer__config__distribution: 'uniform'\n+@\tlayers_2_Dense__config__kernel_initializer__config__mode\tlayers_2_Dense__config__kernel_initializer__config__mode: 'fan_avg'\n+@\tlayers_2_Dense__config__kernel_initializer__config__scale\tlayers_2_Dense__config__kernel_initializer__config__scale: 1.0\n+@\tlayers_2_Dense__config__kernel_initializer__config__seed\tlayers_2_Dense__config__kernel_initializer__config__seed: None\n+@\tlayers_2_Dense__config__kernel_regularizer\tlayers_2_Dense__config__kernel_regularizer: None\n+*\tlayers_2_Dense__config__name\tlayers_2_Dense__config__name: 'dense_2'\n+@\tlayers_2_Dense__config__trainable\tlayers_2_Dense__config__trainable: True\n+@\tlayers_2_Dense__config__units\tlayers_2_Dense__config__units: 10\n+@\tlayers_2_Dense__config__use_bias\tlayers_2_Dense__config__use_bias: True\n+*\tlayers_3_Activation__class_name\tlayers_3_Activation__class_name: 'Activation'\n+@\tlayers_3_Activation__config\tlayers_3_Activation__config: {'name': 'activation_2', 'trainable': True, 'dtype': 'float32', 'activation': 'softmax'}\n+@\tlayers_3_Activation__config__activation\tlayers_3_Activation__config__activation: 'softmax'\n+@\tlayers_3_Activation__config__dtype\tlayers_3_Activation__config__dtype: 'float32'\n+*\tlayers_3_Activation__config__name\tlayers_3_Activation__config__name: 'activation_2'\n+@\tlayers_3_Activation__config__trainable\tlayers_3_Activation__config__trainable: True\n+\tNote:\t@, params eligible for search in searchcv tool.\n"
b
diff -r 000000000000 -r af2624d5ab32 test-data/keras_batch_params04.tabular
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/keras_batch_params04.tabular Sat May 01 01:24:32 2021 +0000
[
@@ -0,0 +1,91 @@
+ Parameter Value
+@ amsgrad amsgrad: None
+@ batch_size batch_size: 32
+@ beta_1 beta_1: None
+@ beta_2 beta_2: None
+@ callbacks callbacks: [{'callback_selection': {'callback_type': 'None'}}]
+@ class_positive_factor class_positive_factor: 1.0
+@ config config: {'name': 'sequential_1', 'layers': [{'class_name': 'Dense', 'config': {'name': 'dense_1', 'trainable
+@ data_batch_generator data_batch_generator: None
+@ decay decay: 0.0
+@ epochs epochs: 100
+@ layers_0_Dense layers_0_Dense: {'class_name': 'Dense', 'config': {'name': 'dense_1', 'trainable': True, 'batch_input_shape': [None,
+@ layers_1_Activation layers_1_Activation: {'class_name': 'Activation', 'config': {'name': 'activation_1', 'trainable': True, 'dtype': 'float32
+@ layers_2_Dense layers_2_Dense: {'class_name': 'Dense', 'config': {'name': 'dense_2', 'trainable': True, 'dtype': 'float32', 'units'
+@ layers_3_Activation layers_3_Activation: {'class_name': 'Activation', 'config': {'name': 'activation_2', 'trainable': True, 'dtype': 'float32
+@ loss loss: 'binary_crossentropy'
+@ lr lr: 0.01
+@ metrics metrics: ['acc']
+@ model_type model_type: 'sequential'
+@ momentum momentum: 0.0
+* n_jobs n_jobs: 1
+@ nesterov nesterov: False
+@ optimizer optimizer: 'sgd'
+@ prediction_steps prediction_steps: None
+@ rho rho: None
+@ schedule_decay schedule_decay: None
+@ seed seed: None
+@ steps_per_epoch steps_per_epoch: None
+@ validation_fraction validation_fraction: 0.1
+@ validation_steps validation_steps: None
+@ verbose verbose: 0
+* layers_0_Dense__class_name layers_0_Dense__class_name: 'Dense'
+@ layers_0_Dense__config layers_0_Dense__config: {'name': 'dense_1', 'trainable': True, 'batch_input_shape': [None, 784], 'dtype': 'float32', 'units'
+@ layers_0_Dense__config__activation layers_0_Dense__config__activation: 'linear'
+@ layers_0_Dense__config__activity_regularizer layers_0_Dense__config__activity_regularizer: None
+@ layers_0_Dense__config__batch_input_shape layers_0_Dense__config__batch_input_shape: [None, 784]
+@ layers_0_Dense__config__bias_constraint layers_0_Dense__config__bias_constraint: None
+@ layers_0_Dense__config__bias_initializer layers_0_Dense__config__bias_initializer: {'class_name': 'Zeros', 'config': {}}
+* layers_0_Dense__config__bias_initializer__class_name layers_0_Dense__config__bias_initializer__class_name: 'Zeros'
+@ layers_0_Dense__config__bias_initializer__config layers_0_Dense__config__bias_initializer__config: {}
+@ layers_0_Dense__config__bias_regularizer layers_0_Dense__config__bias_regularizer: None
+@ layers_0_Dense__config__dtype layers_0_Dense__config__dtype: 'float32'
+@ layers_0_Dense__config__kernel_constraint layers_0_Dense__config__kernel_constraint: None
+@ layers_0_Dense__config__kernel_initializer layers_0_Dense__config__kernel_initializer: {'class_name': 'VarianceScaling', 'config': {'scale': 1.0, 'mode': 'fan_avg', 'distribution': 'unifo
+* layers_0_Dense__config__kernel_initializer__class_name layers_0_Dense__config__kernel_initializer__class_name: 'VarianceScaling'
+@ layers_0_Dense__config__kernel_initializer__config layers_0_Dense__config__kernel_initializer__config: {'scale': 1.0, 'mode': 'fan_avg', 'distribution': 'uniform', 'seed': None}
+@ layers_0_Dense__config__kernel_initializer__config__distribution layers_0_Dense__config__kernel_initializer__config__distribution: 'uniform'
+@ layers_0_Dense__config__kernel_initializer__config__mode layers_0_Dense__config__kernel_initializer__config__mode: 'fan_avg'
+@ layers_0_Dense__config__kernel_initializer__config__scale layers_0_Dense__config__kernel_initializer__config__scale: 1.0
+@ layers_0_Dense__config__kernel_initializer__config__seed layers_0_Dense__config__kernel_initializer__config__seed: None
+@ layers_0_Dense__config__kernel_regularizer layers_0_Dense__config__kernel_regularizer: None
+* layers_0_Dense__config__name layers_0_Dense__config__name: 'dense_1'
+@ layers_0_Dense__config__trainable layers_0_Dense__config__trainable: True
+@ layers_0_Dense__config__units layers_0_Dense__config__units: 32
+@ layers_0_Dense__config__use_bias layers_0_Dense__config__use_bias: True
+* layers_1_Activation__class_name layers_1_Activation__class_name: 'Activation'
+@ layers_1_Activation__config layers_1_Activation__config: {'name': 'activation_1', 'trainable': True, 'dtype': 'float32', 'activation': 'relu'}
+@ layers_1_Activation__config__activation layers_1_Activation__config__activation: 'relu'
+@ layers_1_Activation__config__dtype layers_1_Activation__config__dtype: 'float32'
+* layers_1_Activation__config__name layers_1_Activation__config__name: 'activation_1'
+@ layers_1_Activation__config__trainable layers_1_Activation__config__trainable: True
+* layers_2_Dense__class_name layers_2_Dense__class_name: 'Dense'
+@ layers_2_Dense__config layers_2_Dense__config: {'name': 'dense_2', 'trainable': True, 'dtype': 'float32', 'units': 10, 'activation': 'linear', 'use
+@ layers_2_Dense__config__activation layers_2_Dense__config__activation: 'linear'
+@ layers_2_Dense__config__activity_regularizer layers_2_Dense__config__activity_regularizer: None
+@ layers_2_Dense__config__bias_constraint layers_2_Dense__config__bias_constraint: None
+@ layers_2_Dense__config__bias_initializer layers_2_Dense__config__bias_initializer: {'class_name': 'Zeros', 'config': {}}
+* layers_2_Dense__config__bias_initializer__class_name layers_2_Dense__config__bias_initializer__class_name: 'Zeros'
+@ layers_2_Dense__config__bias_initializer__config layers_2_Dense__config__bias_initializer__config: {}
+@ layers_2_Dense__config__bias_regularizer layers_2_Dense__config__bias_regularizer: None
+@ layers_2_Dense__config__dtype layers_2_Dense__config__dtype: 'float32'
+@ layers_2_Dense__config__kernel_constraint layers_2_Dense__config__kernel_constraint: None
+@ layers_2_Dense__config__kernel_initializer layers_2_Dense__config__kernel_initializer: {'class_name': 'VarianceScaling', 'config': {'scale': 1.0, 'mode': 'fan_avg', 'distribution': 'unifo
+* layers_2_Dense__config__kernel_initializer__class_name layers_2_Dense__config__kernel_initializer__class_name: 'VarianceScaling'
+@ layers_2_Dense__config__kernel_initializer__config layers_2_Dense__config__kernel_initializer__config: {'scale': 1.0, 'mode': 'fan_avg', 'distribution': 'uniform', 'seed': None}
+@ layers_2_Dense__config__kernel_initializer__config__distribution layers_2_Dense__config__kernel_initializer__config__distribution: 'uniform'
+@ layers_2_Dense__config__kernel_initializer__config__mode layers_2_Dense__config__kernel_initializer__config__mode: 'fan_avg'
+@ layers_2_Dense__config__kernel_initializer__config__scale layers_2_Dense__config__kernel_initializer__config__scale: 1.0
+@ layers_2_Dense__config__kernel_initializer__config__seed layers_2_Dense__config__kernel_initializer__config__seed: None
+@ layers_2_Dense__config__kernel_regularizer layers_2_Dense__config__kernel_regularizer: None
+* layers_2_Dense__config__name layers_2_Dense__config__name: 'dense_2'
+@ layers_2_Dense__config__trainable layers_2_Dense__config__trainable: True
+@ layers_2_Dense__config__units layers_2_Dense__config__units: 10
+@ layers_2_Dense__config__use_bias layers_2_Dense__config__use_bias: True
+* layers_3_Activation__class_name layers_3_Activation__class_name: 'Activation'
+@ layers_3_Activation__config layers_3_Activation__config: {'name': 'activation_2', 'trainable': True, 'dtype': 'float32', 'activation': 'softmax'}
+@ layers_3_Activation__config__activation layers_3_Activation__config__activation: 'softmax'
+@ layers_3_Activation__config__dtype layers_3_Activation__config__dtype: 'float32'
+* layers_3_Activation__config__name layers_3_Activation__config__name: 'activation_2'
+@ layers_3_Activation__config__trainable layers_3_Activation__config__trainable: True
+ Note: @, params eligible for search in searchcv tool.
b
diff -r 000000000000 -r af2624d5ab32 test-data/keras_model01
b
Binary file test-data/keras_model01 has changed
b
diff -r 000000000000 -r af2624d5ab32 test-data/keras_model02
b
Binary file test-data/keras_model02 has changed
b
diff -r 000000000000 -r af2624d5ab32 test-data/keras_model04
b
Binary file test-data/keras_model04 has changed
b
diff -r 000000000000 -r af2624d5ab32 test-data/keras_params04.tabular
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/keras_params04.tabular Sat May 01 01:24:32 2021 +0000
[
@@ -0,0 +1,87 @@
+ Parameter Value
+@ amsgrad amsgrad: False
+@ batch_size batch_size: 32
+@ beta_1 beta_1: 0.9
+@ beta_2 beta_2: 0.999
+@ callbacks callbacks: [{'callback_selection': {'callback_type': 'None'}}]
+@ config config: {'name': 'sequential_1', 'layers': [{'class_name': 'Dense', 'config': {'name': 'dense_1', 'trainable
+@ decay decay: 0.0
+@ epochs epochs: 100
+@ layers_0_Dense layers_0_Dense: {'class_name': 'Dense', 'config': {'name': 'dense_1', 'trainable': True, 'batch_input_shape': [None,
+@ layers_1_Activation layers_1_Activation: {'class_name': 'Activation', 'config': {'name': 'activation_1', 'trainable': True, 'dtype': 'float32
+@ layers_2_Dense layers_2_Dense: {'class_name': 'Dense', 'config': {'name': 'dense_2', 'trainable': True, 'dtype': 'float32', 'units'
+@ layers_3_Activation layers_3_Activation: {'class_name': 'Activation', 'config': {'name': 'activation_2', 'trainable': True, 'dtype': 'float32
+@ loss loss: 'mean_squared_error'
+@ lr lr: 0.001
+@ metrics metrics: ['mse']
+@ model_type model_type: 'sequential'
+@ momentum momentum: None
+@ nesterov nesterov: None
+@ optimizer optimizer: 'adam'
+@ rho rho: None
+@ schedule_decay schedule_decay: None
+@ seed seed: 42
+@ steps_per_epoch steps_per_epoch: None
+@ validation_fraction validation_fraction: 0.1
+@ validation_steps validation_steps: None
+@ verbose verbose: 0
+* layers_0_Dense__class_name layers_0_Dense__class_name: 'Dense'
+@ layers_0_Dense__config layers_0_Dense__config: {'name': 'dense_1', 'trainable': True, 'batch_input_shape': [None, 17], 'dtype': 'float32', 'units':
+@ layers_0_Dense__config__activation layers_0_Dense__config__activation: 'linear'
+@ layers_0_Dense__config__activity_regularizer layers_0_Dense__config__activity_regularizer: None
+@ layers_0_Dense__config__batch_input_shape layers_0_Dense__config__batch_input_shape: [None, 17]
+@ layers_0_Dense__config__bias_constraint layers_0_Dense__config__bias_constraint: None
+@ layers_0_Dense__config__bias_initializer layers_0_Dense__config__bias_initializer: {'class_name': 'Zeros', 'config': {}}
+* layers_0_Dense__config__bias_initializer__class_name layers_0_Dense__config__bias_initializer__class_name: 'Zeros'
+@ layers_0_Dense__config__bias_initializer__config layers_0_Dense__config__bias_initializer__config: {}
+@ layers_0_Dense__config__bias_regularizer layers_0_Dense__config__bias_regularizer: None
+@ layers_0_Dense__config__dtype layers_0_Dense__config__dtype: 'float32'
+@ layers_0_Dense__config__kernel_constraint layers_0_Dense__config__kernel_constraint: None
+@ layers_0_Dense__config__kernel_initializer layers_0_Dense__config__kernel_initializer: {'class_name': 'VarianceScaling', 'config': {'scale': 1.0, 'mode': 'fan_avg', 'distribution': 'unifo
+* layers_0_Dense__config__kernel_initializer__class_name layers_0_Dense__config__kernel_initializer__class_name: 'VarianceScaling'
+@ layers_0_Dense__config__kernel_initializer__config layers_0_Dense__config__kernel_initializer__config: {'scale': 1.0, 'mode': 'fan_avg', 'distribution': 'uniform', 'seed': None}
+@ layers_0_Dense__config__kernel_initializer__config__distribution layers_0_Dense__config__kernel_initializer__config__distribution: 'uniform'
+@ layers_0_Dense__config__kernel_initializer__config__mode layers_0_Dense__config__kernel_initializer__config__mode: 'fan_avg'
+@ layers_0_Dense__config__kernel_initializer__config__scale layers_0_Dense__config__kernel_initializer__config__scale: 1.0
+@ layers_0_Dense__config__kernel_initializer__config__seed layers_0_Dense__config__kernel_initializer__config__seed: None
+@ layers_0_Dense__config__kernel_regularizer layers_0_Dense__config__kernel_regularizer: None
+* layers_0_Dense__config__name layers_0_Dense__config__name: 'dense_1'
+@ layers_0_Dense__config__trainable layers_0_Dense__config__trainable: True
+@ layers_0_Dense__config__units layers_0_Dense__config__units: 32
+@ layers_0_Dense__config__use_bias layers_0_Dense__config__use_bias: True
+* layers_1_Activation__class_name layers_1_Activation__class_name: 'Activation'
+@ layers_1_Activation__config layers_1_Activation__config: {'name': 'activation_1', 'trainable': True, 'dtype': 'float32', 'activation': 'linear'}
+@ layers_1_Activation__config__activation layers_1_Activation__config__activation: 'linear'
+@ layers_1_Activation__config__dtype layers_1_Activation__config__dtype: 'float32'
+* layers_1_Activation__config__name layers_1_Activation__config__name: 'activation_1'
+@ layers_1_Activation__config__trainable layers_1_Activation__config__trainable: True
+* layers_2_Dense__class_name layers_2_Dense__class_name: 'Dense'
+@ layers_2_Dense__config layers_2_Dense__config: {'name': 'dense_2', 'trainable': True, 'dtype': 'float32', 'units': 1, 'activation': 'linear', 'use_
+@ layers_2_Dense__config__activation layers_2_Dense__config__activation: 'linear'
+@ layers_2_Dense__config__activity_regularizer layers_2_Dense__config__activity_regularizer: None
+@ layers_2_Dense__config__bias_constraint layers_2_Dense__config__bias_constraint: None
+@ layers_2_Dense__config__bias_initializer layers_2_Dense__config__bias_initializer: {'class_name': 'Zeros', 'config': {}}
+* layers_2_Dense__config__bias_initializer__class_name layers_2_Dense__config__bias_initializer__class_name: 'Zeros'
+@ layers_2_Dense__config__bias_initializer__config layers_2_Dense__config__bias_initializer__config: {}
+@ layers_2_Dense__config__bias_regularizer layers_2_Dense__config__bias_regularizer: None
+@ layers_2_Dense__config__dtype layers_2_Dense__config__dtype: 'float32'
+@ layers_2_Dense__config__kernel_constraint layers_2_Dense__config__kernel_constraint: None
+@ layers_2_Dense__config__kernel_initializer layers_2_Dense__config__kernel_initializer: {'class_name': 'VarianceScaling', 'config': {'scale': 1.0, 'mode': 'fan_avg', 'distribution': 'unifo
+* layers_2_Dense__config__kernel_initializer__class_name layers_2_Dense__config__kernel_initializer__class_name: 'VarianceScaling'
+@ layers_2_Dense__config__kernel_initializer__config layers_2_Dense__config__kernel_initializer__config: {'scale': 1.0, 'mode': 'fan_avg', 'distribution': 'uniform', 'seed': None}
+@ layers_2_Dense__config__kernel_initializer__config__distribution layers_2_Dense__config__kernel_initializer__config__distribution: 'uniform'
+@ layers_2_Dense__config__kernel_initializer__config__mode layers_2_Dense__config__kernel_initializer__config__mode: 'fan_avg'
+@ layers_2_Dense__config__kernel_initializer__config__scale layers_2_Dense__config__kernel_initializer__config__scale: 1.0
+@ layers_2_Dense__config__kernel_initializer__config__seed layers_2_Dense__config__kernel_initializer__config__seed: None
+@ layers_2_Dense__config__kernel_regularizer layers_2_Dense__config__kernel_regularizer: None
+* layers_2_Dense__config__name layers_2_Dense__config__name: 'dense_2'
+@ layers_2_Dense__config__trainable layers_2_Dense__config__trainable: True
+@ layers_2_Dense__config__units layers_2_Dense__config__units: 1
+@ layers_2_Dense__config__use_bias layers_2_Dense__config__use_bias: True
+* layers_3_Activation__class_name layers_3_Activation__class_name: 'Activation'
+@ layers_3_Activation__config layers_3_Activation__config: {'name': 'activation_2', 'trainable': True, 'dtype': 'float32', 'activation': 'linear'}
+@ layers_3_Activation__config__activation layers_3_Activation__config__activation: 'linear'
+@ layers_3_Activation__config__dtype layers_3_Activation__config__dtype: 'float32'
+* layers_3_Activation__config__name layers_3_Activation__config__name: 'activation_2'
+@ layers_3_Activation__config__trainable layers_3_Activation__config__trainable: True
+ Note: @, params eligible for search in searchcv tool.
b
diff -r 000000000000 -r af2624d5ab32 test-data/keras_prefitted01.zip
b
Binary file test-data/keras_prefitted01.zip has changed
b
diff -r 000000000000 -r af2624d5ab32 test-data/keras_save_weights01.h5
b
Binary file test-data/keras_save_weights01.h5 has changed
b
diff -r 000000000000 -r af2624d5ab32 test-data/keras_train_eval_y_true02.tabular
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/keras_train_eval_y_true02.tabular Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,54 @@
+0
+54
+54
+41
+48
+46
+74
+57
+52
+54
+54
+45
+57
+54
+51
+68
+71
+68
+68
+40
+46
+79
+46
+49
+55
+68
+76
+85
+42
+79
+77
+80
+64
+59
+48
+67
+50
+77
+88
+76
+75
+66
+61
+89
+49
+59
+71
+60
+55
+77
+75
+54
+75
+60
b
diff -r 000000000000 -r af2624d5ab32 test-data/lda_model01
b
Binary file test-data/lda_model01 has changed
b
diff -r 000000000000 -r af2624d5ab32 test-data/lda_model02
b
Binary file test-data/lda_model02 has changed
b
diff -r 000000000000 -r af2624d5ab32 test-data/lda_prediction_result01.tabular
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/lda_prediction_result01.tabular Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,5 @@
+3.68258022948 2.82110345641 -3.990140724 -1.9523364774 0
+0.015942057224 -0.7119585943469999 0.125502976978 -0.972218263337 0
+2.0869076882499997 0.929399321468 -2.1292408448400004 -1.9971402218799998 0
+1.4132105208399999 0.523750660422 -1.4210539291 -1.49298569451 0
+0.7683140439399999 1.38267855169 -0.989045048734 0.649504257894 1
b
diff -r 000000000000 -r af2624d5ab32 test-data/lda_prediction_result02.tabular
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/lda_prediction_result02.tabular Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,5 @@
+3.68258022948 2.82110345641 -3.990140724 -1.9523364774 0
+0.015942057224 -0.7119585943469999 0.125502976978 -0.972218263337 0
+2.0869076882499997 0.929399321468 -2.1292408448400004 -1.9971402218799998 0
+1.4132105208399999 0.523750660422 -1.4210539291 -1.49298569451 0
+0.7683140439399999 1.38267855169 -0.989045048734 0.649504257894 1
b
diff -r 000000000000 -r af2624d5ab32 test-data/le_input_w_header.tabular
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/le_input_w_header.tabular Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,5 @@
+Class
+Liverpool
+Real Madrid
+Bayern Munich
+A.C. Milan
b
diff -r 000000000000 -r af2624d5ab32 test-data/le_input_wo_header.tabular
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/le_input_wo_header.tabular Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,4 @@
+Liverpool
+Real Madrid
+Bayern Munich
+A.C. Milan
b
diff -r 000000000000 -r af2624d5ab32 test-data/le_output.tabular
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/le_output.tabular Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,4 @@
+2
+3
+1
+0
b
diff -r 000000000000 -r af2624d5ab32 test-data/lgb_class_model.txt
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/lgb_class_model.txt Sat May 01 01:24:32 2021 +0000
[
@@ -0,0 +1,151 @@
+tree
+version=v3
+num_class=1
+num_tree_per_iteration=1
+label_index=0
+max_feature_idx=3
+objective=binary sigmoid:1
+feature_names=Column_0 Column_1 Column_2 Column_3
+feature_infos=none none none none
+tree_sizes=228
+
+Tree=0
+num_leaves=1
+num_cat=0
+split_feature=
+split_gain=
+threshold=
+decision_type=
+left_child=
+right_child=
+leaf_value=-0.40546510810816427
+leaf_weight=
+leaf_count=
+internal_value=
+internal_weight=
+internal_count=
+shrinkage=1
+
+
+end of trees
+
+feature importances:
+
+parameters:
+[boosting: gbdt]
+[objective: binary]
+[metric: binary_logloss]
+[tree_learner: serial]
+[device_type: cpu]
+[data: ]
+[valid: ]
+[num_iterations: 100]
+[learning_rate: 0.02]
+[num_leaves: 32]
+[num_threads: 0]
+[max_depth: 8]
+[min_data_in_leaf: 20]
+[min_sum_hessian_in_leaf: 39]
+[bagging_fraction: 0.9]
+[pos_bagging_fraction: 1]
+[neg_bagging_fraction: 1]
+[bagging_freq: 0]
+[bagging_seed: 18467]
+[feature_fraction: 0.9]
+[feature_fraction_bynode: 1]
+[feature_fraction_seed: 26500]
+[early_stopping_round: 0]
+[first_metric_only: 0]
+[max_delta_step: 0]
+[lambda_l1: 0.04]
+[lambda_l2: 0.07]
+[min_gain_to_split: 0.02]
+[drop_rate: 0.1]
+[max_drop: 50]
+[skip_drop: 0.5]
+[xgboost_dart_mode: 0]
+[uniform_drop: 0]
+[drop_seed: 6334]
+[top_rate: 0.2]
+[other_rate: 0.1]
+[min_data_per_group: 100]
+[max_cat_threshold: 32]
+[cat_l2: 10]
+[cat_smooth: 10]
+[max_cat_to_onehot: 4]
+[top_k: 20]
+[monotone_constraints: ]
+[feature_contri: ]
+[forcedsplits_filename: ]
+[forcedbins_filename: ]
+[refit_decay_rate: 0.9]
+[cegb_tradeoff: 1]
+[cegb_penalty_split: 0]
+[cegb_penalty_feature_lazy: ]
+[cegb_penalty_feature_coupled: ]
+[verbosity: -1]
+[max_bin: 255]
+[max_bin_by_feature: ]
+[min_data_in_bin: 3]
+[bin_construct_sample_cnt: 200000]
+[histogram_pool_size: -1]
+[data_random_seed: 41]
+[output_model: LightGBM_model.txt]
+[snapshot_freq: -1]
+[input_model: ]
+[output_result: LightGBM_predict_result.txt]
+[initscore_filename: ]
+[valid_data_initscores: ]
+[pre_partition: 0]
+[enable_bundle: 1]
+[max_conflict_rate: 0]
+[is_enable_sparse: 1]
+[sparse_threshold: 0.8]
+[use_missing: 1]
+[zero_as_missing: 0]
+[two_round: 0]
+[save_binary: 0]
+[header: 0]
+[label_column: ]
+[weight_column: ]
+[group_column: ]
+[ignore_column: ]
+[categorical_feature: ]
+[predict_raw_score: 0]
+[predict_leaf_index: 0]
+[predict_contrib: 0]
+[num_iteration_predict: -1]
+[pred_early_stop: 0]
+[pred_early_stop_freq: 10]
+[pred_early_stop_margin: 10]
+[convert_model_language: ]
+[convert_model: gbdt_prediction.cpp]
+[num_class: 1]
+[is_unbalance: 0]
+[scale_pos_weight: 1]
+[sigmoid: 1]
+[boost_from_average: 1]
+[reg_sqrt: 0]
+[alpha: 0.9]
+[fair_c: 1]
+[poisson_max_delta_step: 0.7]
+[tweedie_variance_power: 1.5]
+[max_position: 20]
+[lambdamart_norm: 1]
+[label_gain: ]
+[metric_freq: 1]
+[is_provide_training_metric: 0]
+[eval_at: ]
+[multi_error_top_k: 1]
+[num_machines: 1]
+[local_listen_port: 12400]
+[time_out: 120]
+[machine_list_filename: ]
+[machines: ]
+[gpu_platform_id: -1]
+[gpu_device_id: -1]
+[gpu_use_dp: 0]
+
+end of parameters
+
+pandas_categorical:null
b
diff -r 000000000000 -r af2624d5ab32 test-data/lgb_prediction_result01.tabular
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/lgb_prediction_result01.tabular Sat May 01 01:24:32 2021 +0000
b
b'@@ -0,0 +1,262 @@\n+year\tmonth\tday\ttemp_2\ttemp_1\taverage\tforecast_noaa\tforecast_acc\tforecast_under\tfriend\tweek_Fri\tweek_Mon\tweek_Sat\tweek_Sun\tweek_Thurs\tweek_Tues\tweek_Wed\tpredicted\n+2016\t9\t19\t68\t69\t69.7\t65\t74\t71\t88\t0\t1\t0\t0\t0\t0\t0\t71.89319490976423\n+2016\t4\t14\t60\t59\t58.1\t57\t63\t58\t66\t0\t0\t0\t0\t1\t0\t0\t59.01499037390416\n+2016\t7\t30\t85\t88\t77.3\t75\t79\t77\t70\t0\t0\t1\t0\t0\t0\t0\t75.7624470867011\n+2016\t5\t15\t82\t65\t64.7\t63\t69\t64\t58\t0\t0\t0\t1\t0\t0\t0\t57.569131115445174\n+2016\t1\t18\t54\t50\t47.5\t44\t48\t49\t58\t0\t1\t0\t0\t0\t0\t0\t53.09785655110459\n+2016\t1\t25\t48\t51\t48.2\t45\t51\t49\t63\t0\t1\t0\t0\t0\t0\t0\t53.51723077599964\n+2016\t11\t25\t49\t52\t48.6\t45\t52\t47\t41\t1\t0\t0\t0\t0\t0\t0\t51.95292617354113\n+2016\t7\t20\t73\t78\t76.7\t75\t78\t77\t66\t0\t0\t0\t0\t0\t0\t1\t80.03391243189029\n+2016\t12\t17\t39\t35\t45.2\t43\t47\t46\t38\t0\t0\t1\t0\t0\t0\t0\t38.021020662843554\n+2016\t12\t8\t42\t40\t46.1\t45\t51\t47\t36\t0\t0\t0\t0\t1\t0\t0\t43.871817980640564\n+2016\t12\t28\t42\t47\t45.3\t41\t49\t44\t58\t0\t0\t0\t0\t0\t0\t1\t45.76312225952035\n+2016\t7\t17\t76\t72\t76.3\t76\t78\t77\t88\t0\t0\t0\t1\t0\t0\t0\t78.44319295537714\n+2016\t7\t7\t69\t76\t74.4\t73\t77\t74\t72\t0\t0\t0\t0\t1\t0\t0\t70.5335293567219\n+2016\t12\t15\t40\t39\t45.3\t45\t49\t47\t46\t0\t0\t0\t0\t1\t0\t0\t39.057420195088596\n+2016\t6\t27\t71\t78\t72.2\t70\t74\t72\t84\t0\t1\t0\t0\t0\t0\t0\t82.20245159198711\n+2016\t5\t31\t64\t71\t67.3\t63\t72\t68\t85\t0\t0\t0\t0\t0\t1\t0\t78.30191181424183\n+2016\t1\t20\t54\t48\t47.7\t44\t52\t49\t61\t0\t0\t0\t0\t0\t0\t1\t54.04964089659319\n+2016\t8\t10\t73\t72\t77.0\t77\t78\t77\t68\t0\t0\t0\t0\t0\t0\t1\t76.39576480057465\n+2016\t3\t23\t56\t57\t54.7\t50\t58\t55\t70\t0\t0\t0\t0\t0\t0\t1\t52.935205395366545\n+2016\t12\t24\t45\t40\t45.1\t44\t47\t46\t39\t0\t0\t1\t0\t0\t0\t0\t40.72928485821922\n+2016\t1\t19\t50\t54\t47.6\t47\t49\t48\t53\t0\t0\t0\t0\t0\t1\t0\t48.15143238233738\n+2016\t11\t6\t65\t58\t53.2\t52\t57\t55\t71\t0\t0\t0\t1\t0\t0\t0\t61.18233215509339\n+2016\t4\t17\t60\t68\t58.6\t58\t62\t59\t54\t0\t0\t0\t1\t0\t0\t0\t77.18078446802005\n+2016\t10\t29\t60\t65\t55.3\t55\t59\t55\t65\t0\t0\t1\t0\t0\t0\t0\t67.20288900944993\n+2016\t2\t1\t48\t47\t48.8\t46\t49\t49\t51\t0\t1\t0\t0\t0\t0\t0\t48.34602414815062\n+2016\t12\t12\t44\t44\t45.6\t43\t50\t45\t42\t0\t1\t0\t0\t0\t0\t0\t44.253448105719876\n+2016\t5\t30\t64\t64\t67.1\t64\t70\t66\t69\t0\t1\t0\t0\t0\t0\t0\t71.3927492219339\n+2016\t10\t23\t59\t62\t57.1\t57\t58\t59\t67\t0\t0\t0\t1\t0\t0\t0\t62.58006444737433\n+2016\t9\t30\t68\t66\t65.7\t64\t67\t65\t74\t1\t0\t0\t0\t0\t0\t0\t65.68744660437471\n+2016\t9\t12\t77\t70\t71.8\t67\t73\t73\t90\t0\t1\t0\t0\t0\t0\t0\t73.72156656653756\n+2016\t11\t2\t59\t57\t54.2\t54\t58\t55\t70\t0\t0\t0\t0\t0\t0\t1\t57.84328293804783\n+2016\t11\t17\t55\t50\t50.5\t46\t51\t50\t57\t0\t0\t0\t0\t1\t0\t0\t50.42632486665048\n+2016\t3\t3\t58\t55\t51.8\t49\t54\t50\t71\t0\t0\t0\t0\t1\t0\t0\t59.623494716733035\n+2016\t11\t21\t57\t55\t49.5\t46\t51\t49\t67\t0\t1\t0\t0\t0\t0\t0\t53.32237486832612\n+2016\t12\t27\t42\t42\t45.2\t41\t50\t47\t47\t0\t0\t0\t0\t0\t1\t0\t46.480428465622566\n+2016\t4\t24\t64\t65\t60.1\t57\t61\t60\t41\t0\t0\t0\t1\t0\t0\t0\t55.57021075899771\n+2016\t5\t20\t64\t63\t65.6\t63\t70\t64\t73\t1\t0\t0\t0\t0\t0\t0\t65.97337851386187\n+2016\t1\t16\t49\t48\t47.3\t45\t52\t46\t28\t0\t0\t1\t0\t0\t0\t0\t51.12832230287266\n+2016\t12\t7\t40\t42\t46.3\t44\t51\t46\t62\t0\t0\t0\t0\t0\t0\t1\t40.14107546376078\n+2016\t1\t7\t44\t51\t46.2\t45\t49\t46\t38\t0\t0\t0\t0\t1\t0\t0\t43.30978565286583\n+2016\t9\t24\t67\t64\t68.0\t65\t71\t66\t64\t0\t0\t1\t0\t0\t0\t0\t67.6354117078402\n+2016\t8\t30\t79\t75\t74.6\t74\t76\t75\t63\t0\t0\t0\t0\t0\t1\t0\t70.28790811869037\n+2016\t1\t11\t50\t52\t46.7\t42\t48\t48\t39\t0\t1\t0\t0\t0\t0\t0\t46.11736014295371\n+2016\t6\t9\t85\t67\t68.6\t66\t73\t69\t80\t0\t0\t0\t0\t1\t0\t0\t63.20117179031277\n+2016\t9\t22\t67\t68\t68.7\t65\t70\t69\t56\t0\t0\t0\t0\t1\t0\t0\t67.0947545497616\n+2016\t3\t25\t53\t54\t55.0\t53\t57\t57\t42\t1\t0\t0\t0\t0\t0\t0\t56.770929191177046\n+2016\t10\t24\t62\t62\t56.8\t52\t61\t57\t70\t0\t1\t0\t0\t0\t0\t0\t60.93905202931022\n+2016\t7\t16\t77\t76\t76.1\t76\t78\t75\t61\t0\t0\t1\t0\t0\t0\t0\t72.66331027774964\n+2016\t7\t1\t74\t73\t73.1\t71\t75\t72\t93\t1\t0\t0\t0\t0\t0\t0\t73.83790969748735\n+2016\t11\t18\t50\t52\t50.3\t50\t53\t50\t35\t1\t0\t0\t0\t0\t0\t0\t53.62951439199429\n+2016\t9\t3\t75\t70\t73.9\t71\t75\t73\t68\t0\t0\t1\t0\t0\t0\t0\t68.25054582286273\n+2016\t8\t2\t73\t77\t77.4\t75\t80\t79\t62\t0\t0\t0\t0\t0\t1\t0\t73.40030750588237\n+2016\t4\t5\t69\t60\t56.6\t52\t58\t56\t72\t0\t0\t0\t0\t0\t1\t0\t56.524806994243974\n+2016\t3\t13\t55\t52\t53.3\t50\t55\t53\t54\t0\t0\t0\t1\t0\t0\t0\t55.040326173834494\n+2016\t8\t28\t81\t79\t75.0\t71\t77\t76\t85\t0\t0\t0\t1\t0\t0\t0\t78.6959854541002\n+2016\t4\t9\t77\t76\t57.2\t53\t61\t57\t74\t0\t0\t1\t0\t0\t0\t0\t65.6864466867755\n+2016\t5\t26\t66\t66\t66.5\t64\t70\t65\t85\t0\t0\t0\t0\t1\t0\t0\t64.55452338839596\n+2016\t10\t10\t68\t57\t61.8\t58\t64\t61\t62\t0\t1\t0\t0\t0\t0\t0\t'..b'\t0\t0\t1\t0\t0\t0\t0\t74.99121824276693\n+2016\t6\t13\t65\t70\t69.3\t66\t72\t69\t79\t0\t1\t0\t0\t0\t0\t0\t68.0732655379396\n+2016\t2\t15\t55\t58\t49.9\t46\t52\t49\t53\t0\t1\t0\t0\t0\t0\t0\t55.102004217211054\n+2016\t8\t8\t72\t72\t77.1\t76\t78\t77\t65\t0\t1\t0\t0\t0\t0\t0\t72.67136576622894\n+2016\t7\t12\t74\t74\t75.4\t74\t77\t77\t71\t0\t0\t0\t0\t0\t1\t0\t78.09245255865027\n+2016\t10\t3\t63\t65\t64.5\t63\t68\t65\t49\t0\t1\t0\t0\t0\t0\t0\t61.87759257833735\n+2016\t4\t18\t68\t77\t58.8\t55\t59\t57\t39\t0\t1\t0\t0\t0\t0\t0\t88.05552200437032\n+2016\t2\t25\t60\t59\t50.9\t49\t51\t49\t35\t0\t0\t0\t0\t1\t0\t0\t60.080453480066495\n+2016\t1\t2\t44\t45\t45.7\t41\t50\t44\t61\t0\t0\t1\t0\t0\t0\t0\t42.90260865929038\n+2016\t2\t21\t51\t53\t50.5\t49\t54\t52\t46\t0\t0\t0\t1\t0\t0\t0\t51.27916079433249\n+2016\t3\t24\t57\t53\t54.9\t54\t56\t56\t72\t0\t0\t0\t0\t1\t0\t0\t53.574452562775726\n+2016\t7\t27\t85\t79\t77.3\t73\t78\t79\t79\t0\t0\t0\t0\t0\t0\t1\t79.10426435183506\n+2016\t2\t4\t51\t49\t49.0\t44\t54\t51\t44\t0\t0\t0\t0\t1\t0\t0\t49.689040256262984\n+2016\t10\t7\t66\t63\t62.9\t62\t67\t64\t78\t1\t0\t0\t0\t0\t0\t0\t64.03671566307656\n+2016\t4\t4\t63\t69\t56.5\t54\t59\t56\t45\t0\t1\t0\t0\t0\t0\t0\t60.302013981268445\n+2016\t2\t24\t51\t60\t50.8\t47\t53\t50\t46\t0\t0\t0\t0\t0\t0\t1\t60.45570099663864\n+2016\t10\t8\t63\t64\t62.5\t60\t65\t61\t73\t0\t0\t1\t0\t0\t0\t0\t67.37545950141302\n+2016\t9\t15\t75\t79\t71.0\t66\t76\t69\t64\t0\t0\t0\t0\t1\t0\t0\t71.13704674726802\n+2016\t1\t14\t49\t55\t47.0\t43\t47\t46\t58\t0\t0\t0\t0\t1\t0\t0\t48.22108131604957\n+2016\t4\t1\t68\t73\t56.0\t54\t59\t55\t41\t1\t0\t0\t0\t0\t0\t0\t70.48625922502303\n+2016\t10\t17\t62\t60\t59.1\t57\t63\t59\t62\t0\t1\t0\t0\t0\t0\t0\t60.636430182256134\n+2016\t6\t18\t71\t67\t70.2\t67\t75\t69\t77\t0\t0\t1\t0\t0\t0\t0\t66.4433387859395\n+2016\t12\t26\t41\t42\t45.2\t45\t48\t46\t58\t0\t1\t0\t0\t0\t0\t0\t45.76312225952035\n+2016\t5\t17\t57\t60\t65.0\t62\t65\t65\t55\t0\t0\t0\t0\t0\t1\t0\t69.19684320311531\n+2016\t11\t20\t55\t57\t49.8\t47\t54\t48\t30\t0\t0\t0\t1\t0\t0\t0\t54.710033660556284\n+2016\t12\t18\t35\t35\t45.2\t44\t46\t46\t36\t0\t0\t0\t1\t0\t0\t0\t39.46756549798724\n+2016\t9\t17\t71\t75\t70.3\t66\t73\t70\t84\t0\t0\t1\t0\t0\t0\t0\t69.35617071475366\n+2016\t2\t26\t59\t61\t51.1\t48\t56\t53\t65\t1\t0\t0\t0\t0\t0\t0\t59.73185678845597\n+2016\t2\t22\t53\t51\t50.6\t46\t51\t50\t59\t0\t1\t0\t0\t0\t0\t0\t53.18020992777652\n+2016\t6\t26\t69\t71\t71.9\t67\t74\t72\t70\t0\t0\t0\t1\t0\t0\t0\t78.40488138310084\n+2016\t7\t11\t71\t74\t75.3\t74\t79\t75\t71\t0\t1\t0\t0\t0\t0\t0\t73.09136153134294\n+2016\t12\t30\t48\t48\t45.4\t44\t46\t44\t42\t1\t0\t0\t0\t0\t0\t0\t50.28091536128874\n+2016\t7\t9\t68\t74\t74.9\t70\t79\t76\t60\t0\t0\t1\t0\t0\t0\t0\t71.20339563359369\n+2016\t6\t21\t70\t76\t70.8\t68\t75\t71\t57\t0\t0\t0\t0\t0\t1\t0\t72.95771553550574\n+2016\t3\t2\t54\t58\t51.6\t47\t54\t52\t37\t0\t0\t0\t0\t0\t0\t1\t55.415808249340266\n+2016\t2\t20\t53\t51\t50.4\t48\t55\t51\t43\t0\t0\t1\t0\t0\t0\t0\t53.467113223494465\n+2016\t9\t9\t67\t72\t72.6\t68\t77\t71\t78\t1\t0\t0\t0\t0\t0\t0\t73.51226363759592\n+2016\t9\t26\t67\t76\t67.2\t64\t69\t69\t74\t0\t1\t0\t0\t0\t0\t0\t77.37773578267426\n+2016\t1\t22\t52\t52\t47.9\t47\t48\t48\t60\t1\t0\t0\t0\t0\t0\t0\t55.28241093592609\n+2016\t11\t27\t52\t53\t48.2\t48\t49\t49\t53\t0\t0\t0\t1\t0\t0\t0\t49.4682256394598\n+2016\t6\t12\t67\t65\t69.1\t65\t73\t70\t83\t0\t0\t0\t1\t0\t0\t0\t69.67028083708267\n+2016\t10\t20\t61\t58\t58.1\t58\t59\t58\t43\t0\t0\t0\t0\t1\t0\t0\t62.70580969857187\n+2016\t7\t13\t74\t77\t75.6\t74\t78\t76\t56\t0\t0\t0\t0\t0\t0\t1\t75.83483000433928\n+2016\t11\t7\t58\t61\t52.9\t51\t56\t51\t35\t0\t1\t0\t0\t0\t0\t0\t63.36296331230509\n+2016\t10\t1\t66\t67\t65.3\t64\t70\t64\t54\t0\t0\t1\t0\t0\t0\t0\t63.164537625242914\n+2016\t11\t22\t55\t54\t49.3\t46\t54\t49\t58\t0\t0\t0\t0\t0\t1\t0\t52.82424210029568\n+2016\t6\t1\t71\t79\t67.4\t65\t69\t66\t58\t0\t0\t0\t0\t0\t0\t1\t74.74970804919086\n+2016\t5\t13\t81\t77\t64.3\t63\t67\t66\t67\t1\t0\t0\t0\t0\t0\t0\t80.80644721526915\n+2016\t6\t3\t75\t71\t67.7\t64\t71\t66\t55\t1\t0\t0\t0\t0\t0\t0\t79.42582159310099\n+2016\t4\t12\t59\t58\t57.7\t54\t59\t57\t61\t0\t0\t0\t0\t0\t1\t0\t60.16800954211399\n+2016\t3\t31\t64\t68\t55.9\t55\t59\t56\t56\t0\t0\t0\t0\t1\t0\t0\t72.6475912037824\n+2016\t12\t14\t43\t40\t45.4\t45\t48\t45\t49\t0\t0\t0\t0\t0\t0\t1\t39.528254308940774\n+2016\t8\t5\t75\t80\t77.3\t75\t81\t78\t71\t1\t0\t0\t0\t0\t0\t0\t79.29604852292455\n+2016\t5\t4\t87\t74\t62.3\t59\t65\t64\t61\t0\t0\t0\t0\t0\t0\t1\t61.10486980813424\n+2016\t12\t31\t48\t57\t45.5\t42\t48\t47\t57\t0\t0\t1\t0\t0\t0\t0\t43.43550339863723\n+2016\t1\t21\t48\t52\t47.8\t43\t51\t46\t57\t0\t0\t0\t0\t1\t0\t0\t52.14359963846155\n+2016\t7\t10\t74\t71\t75.1\t71\t77\t76\t95\t0\t0\t0\t1\t0\t0\t0\t74.74359573124775\n+2016\t3\t15\t54\t49\t53.6\t49\t58\t52\t70\t0\t0\t0\t0\t0\t1\t0\t51.087519902003045\n+2016\t4\t19\t77\t89\t59.0\t59\t63\t59\t61\t0\t0\t0\t0\t0\t1\t0\t79.9965354251416\n+2016\t10\t14\t66\t60\t60.2\t56\t64\t60\t78\t1\t0\t0\t0\t0\t0\t0\t59.60815096341522\n+2016\t4\t15\t59\t59\t58.3\t58\t61\t60\t40\t1\t0\t0\t0\t0\t0\t0\t59.51666216464264\n'
b
diff -r 000000000000 -r af2624d5ab32 test-data/lgb_regr_model.txt
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/lgb_regr_model.txt Sat May 01 01:24:32 2021 +0000
[
b'@@ -0,0 +1,180142 @@\n+tree\n+version=v3\n+num_class=1\n+num_tree_per_iteration=1\n+label_index=0\n+max_feature_idx=16\n+objective=regression\n+feature_names=Column_0 Column_1 Column_2 Column_3 Column_4 Column_5 Column_6 Column_7 Column_8 Column_9 Column_10 Column_11 Column_12 Column_13 Column_14 Column_15 Column_16\n+feature_infos=none [1:12] [1:31] [35:89] [35:117] [45.100000000000001:77.400000000000006] [41:77] [46:81] [44:79] [28:95] [0:1] [0:1] [0:1] [0:1] [0:1] [0:1] [0:1]\n+tree_sizes=518 530 532 530 531 532 530 534 533 535 535 532 532 535 534 530 534 533 532 533 533 537 534 456 531 456 534 458 458 536 538 532 536 613 538 532 536 458 533 536 613 533 537 536 533 537 534 537 537 536 458 532 535 612 538 537 614 617 615 615 538 539 615 616 617 539 616 536 615 537 615 539 540 618 619 617 538 540 458 539 457 459 461 460 538 616 613 612 614 459 617 542 618 461 541 615 617 540 619 542 618 622 541 618 622 543 541 541 618 619 538 619 617 462 542 620 618 459 618 541 544 457 460 623 619 461 545 459 625 461 624 624 544 620 623 543 622 623 544 544 622 621 544 545 464 544 462 624 621 543 623 463 543 622 622 545 462 543 624 464 622 464 624 616 463 464 624 464 624 543 541 623 464 617 541 545 625 541 620 542 541 541 542 542 518 543 539 547 542 547 541 546 542 543 541 540 626 543 544 543 545 548 541 545 546 542 543 545 547 548 546 544 546 543 542 548 544 547 541 543 546 547 540 544 545 542 545 626 543 539 463 544 544 542 544 545 545 544 539 546 628 544 545 547 543 545 547 543 541 543 543 543 545 620 544 545 522 543 544 545 544 549 546 543 543 545 603 546 544 602 543 547 623 544 546 544 623 544 549 544 624 545 546 623 543 624 543 545 623 543 546 542 546 545 546 623 547 464 542 544 544 545 543 548 622 466 546 624 542 545 624 544 624 545 547 623 544 546 543 465 547 542 625 545 545 625 543 625 542 547 624 544 549 544 545 626 621 545 542 465 549 545 466 624 465 621 465 627 622 542 623 466 539 464 548 626 543 545 543 465 625 546 466 624 626 623 542 540 544 623 544 625 544 545 546 466 621 546 623 467 546 547 544 543 546 546 544 543 543 546 624 547 466 541 547 546 465 548 543 546 544 547 546 547 544 465 543 625 547 546 546 544 543 546 625 464 625 544 544 624 546 466 623 546 623 627 543 624 546 544 546 545 550 625 548 545 543 547 544 627 465 547 622 628 545 627 543 460 627 466 546 544 544 629 624 628 623 545 543 624 547 544 625 545 544 548 544 543 466 549 547 627 465 629 623 544 467 548 545 622 550 546 548 627 621 548 548 466 625 464 544 624 546 544 626 465 630 545 546 547 546 547 630 619 548 628 547 624 545 626 546 627 549 544 468 547 542 545 549 548 627 628 549 628 546 549 546 544 621 624 544 628 547 545 546 465 629 626 546 546 545 545 545 625 546 464 549 545 545 628 546 550 467 544 630 628 547 547 543 628 544 543 624 546 548 548 465 631 545 624 546 623 467 546 626 467 543 548 630 549 545 547 548 549 547 545 629 466 548 624 549 548 540 630 468 546 542 544 546 546 627 630 469 549 543 547 548 467 625 629 627 547 548 546 545 540 544 549 625 541 546 467 548 546 548 631 544 627 468 550 622 629 543 544 548 548 547 467 630 467 549 628 468 547 468 549 627 467 546 546 548 546 630 543 468 544 626 546 545 627 627 548 545 549 548 625 626 631 626 546 625 547 629 547 469 548 545 548 547 469 546 629 547 545 468 549 549 548 544 548 630 545 547 549 467 547 543 630 544 547 467 547 548 627 623 626 465 469 547 547 547 546 629 548 548 546 546 547 545 548 627 467 550 545 548 548 546 545 552 548 546 629 546 547 543 625 548 548 550 545 547 543 545 548 548 548 549 466 547 549 543 548 547 548 628 469 626 546 626 547 545 550 628 549 544 623 552 544 549 468 552 539 546 467 548 630 544 548 545 627 548 549 624 551 551 545 468 549 468 544 548 548 550 546 548 549 628 548 549 628 548 547 548 550 546 546 550 550 548 550 629 547 547 551 551 546 468 550 466 546 547 545 550 549 628 552 550 545 549 551 549 630 630 467 547 549 551 545 624 468 552 547 549 469 548 543 549 546 548 546 624 546 625 469 552 549 630 544 549 551 547 548 543 550 628 545 551 549 549 545 549 551 550 545 468 547 550 550 '..b'98 -0.0264908\n+internal_weight=0 166 98 95\n+internal_count=261 166 98 95\n+shrinkage=0.02\n+\n+\n+Tree=9998\n+num_leaves=5\n+num_cat=0\n+split_feature=6 9 2 1\n+split_gain=0.0814517 0.423897 0.49947 0.560638\n+threshold=69.500000000000014 71.500000000000014 13.500000000000002 4.5000000000000009\n+decision_type=2 2 2 2\n+left_child=1 2 -1 -4\n+right_child=-2 -3 3 -5\n+leaf_value=0.0015287226576175053 0.00082953833355114027 -0.0021162448255171214 0.0011383155735161057 -0.0019402952165041626\n+leaf_weight=73 48 39 41 60\n+leaf_count=73 48 39 41 60\n+internal_value=0 -0.00938209 0.0120356 -0.0341362\n+internal_weight=0 213 174 101\n+internal_count=261 213 174 101\n+shrinkage=0.02\n+\n+\n+Tree=9999\n+num_leaves=5\n+num_cat=0\n+split_feature=5 5 4 9\n+split_gain=0.0787592 0.317423 0.245512 0.291236\n+threshold=70.000000000000014 64.200000000000003 60.500000000000007 45.500000000000007\n+decision_type=2 2 2 2\n+left_child=1 2 3 -1\n+right_child=-2 -3 -4 -5\n+leaf_value=0.00095524065220772586 0.00069620116702481252 -0.001859639079375311 0.0015142907661079197 -0.0011629911884827296\n+leaf_weight=46 62 40 44 69\n+leaf_count=46 62 40 44 69\n+internal_value=0 -0.0108758 0.00956023 -0.015434\n+internal_weight=0 199 159 115\n+internal_count=261 199 159 115\n+shrinkage=0.02\n+\n+\n+end of trees\n+\n+feature importances:\n+Column_9=8322\n+Column_2=6585\n+Column_5=5272\n+Column_4=4915\n+Column_6=4114\n+Column_3=3831\n+Column_1=3507\n+Column_7=2717\n+Column_8=2340\n+\n+parameters:\n+[boosting: gbdt]\n+[objective: regression]\n+[metric: ]\n+[tree_learner: serial]\n+[device_type: cpu]\n+[data: ]\n+[valid: ]\n+[num_iterations: 100]\n+[learning_rate: 0.02]\n+[num_leaves: 32]\n+[num_threads: 0]\n+[max_depth: 8]\n+[min_data_in_leaf: 20]\n+[min_sum_hessian_in_leaf: 39]\n+[bagging_fraction: 0.9]\n+[pos_bagging_fraction: 1]\n+[neg_bagging_fraction: 1]\n+[bagging_freq: 0]\n+[bagging_seed: 18467]\n+[feature_fraction: 0.9]\n+[feature_fraction_bynode: 1]\n+[feature_fraction_seed: 26500]\n+[early_stopping_round: 0]\n+[first_metric_only: 0]\n+[max_delta_step: 0]\n+[lambda_l1: 0.04]\n+[lambda_l2: 0.07]\n+[min_gain_to_split: 0.02]\n+[drop_rate: 0.1]\n+[max_drop: 50]\n+[skip_drop: 0.5]\n+[xgboost_dart_mode: 0]\n+[uniform_drop: 0]\n+[drop_seed: 6334]\n+[top_rate: 0.2]\n+[other_rate: 0.1]\n+[min_data_per_group: 100]\n+[max_cat_threshold: 32]\n+[cat_l2: 10]\n+[cat_smooth: 10]\n+[max_cat_to_onehot: 4]\n+[top_k: 20]\n+[monotone_constraints: ]\n+[feature_contri: ]\n+[forcedsplits_filename: ]\n+[forcedbins_filename: ]\n+[refit_decay_rate: 0.9]\n+[cegb_tradeoff: 1]\n+[cegb_penalty_split: 0]\n+[cegb_penalty_feature_lazy: ]\n+[cegb_penalty_feature_coupled: ]\n+[verbosity: -1]\n+[max_bin: 255]\n+[max_bin_by_feature: ]\n+[min_data_in_bin: 3]\n+[bin_construct_sample_cnt: 200000]\n+[histogram_pool_size: -1]\n+[data_random_seed: 41]\n+[output_model: LightGBM_model.txt]\n+[snapshot_freq: -1]\n+[input_model: ]\n+[output_result: LightGBM_predict_result.txt]\n+[initscore_filename: ]\n+[valid_data_initscores: ]\n+[pre_partition: 0]\n+[enable_bundle: 1]\n+[max_conflict_rate: 0]\n+[is_enable_sparse: 1]\n+[sparse_threshold: 0.8]\n+[use_missing: 1]\n+[zero_as_missing: 0]\n+[two_round: 0]\n+[save_binary: 0]\n+[header: 0]\n+[label_column: ]\n+[weight_column: ]\n+[group_column: ]\n+[ignore_column: ]\n+[categorical_feature: ]\n+[predict_raw_score: 0]\n+[predict_leaf_index: 0]\n+[predict_contrib: 0]\n+[num_iteration_predict: -1]\n+[pred_early_stop: 0]\n+[pred_early_stop_freq: 10]\n+[pred_early_stop_margin: 10]\n+[convert_model_language: ]\n+[convert_model: gbdt_prediction.cpp]\n+[num_class: 1]\n+[is_unbalance: 0]\n+[scale_pos_weight: 1]\n+[sigmoid: 1]\n+[boost_from_average: 1]\n+[reg_sqrt: 0]\n+[alpha: 0.9]\n+[fair_c: 1]\n+[poisson_max_delta_step: 0.7]\n+[tweedie_variance_power: 1.5]\n+[max_position: 20]\n+[lambdamart_norm: 1]\n+[label_gain: ]\n+[metric_freq: 1]\n+[is_provide_training_metric: 0]\n+[eval_at: ]\n+[multi_error_top_k: 1]\n+[num_machines: 1]\n+[local_listen_port: 12400]\n+[time_out: 120]\n+[machine_list_filename: ]\n+[machines: ]\n+[gpu_platform_id: -1]\n+[gpu_device_id: -1]\n+[gpu_use_dp: 0]\n+\n+end of parameters\n+\n+pandas_categorical:null\n'
b
diff -r 000000000000 -r af2624d5ab32 test-data/log_loss.txt
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/log_loss.txt Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,2 @@
+log_loss : 
+3.7248735402728403
b
diff -r 000000000000 -r af2624d5ab32 test-data/matthews_corrcoef.txt
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/matthews_corrcoef.txt Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,2 @@
+matthews_corrcoef : 
+1.0
b
diff -r 000000000000 -r af2624d5ab32 test-data/mba_input_int_w.tabular
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/mba_input_int_w.tabular Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,6 @@
+Transactions
+10 11 12 13 14 15
+16 11 12 13 14 15
+10 17 13 14
+10 18 19 13 15
+19 11 11 13 20 14
b
diff -r 000000000000 -r af2624d5ab32 test-data/mba_input_int_wo.tabular
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/mba_input_int_wo.tabular Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,5 @@
+10 11 12 13 14 15
+16 11 12 13 14 15
+10 17 13 14
+10 18 19 13 15
+19 11 11 13 20 14
b
diff -r 000000000000 -r af2624d5ab32 test-data/mba_input_str_w.tabular
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/mba_input_str_w.tabular Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,6 @@
+Transactions
+Milk Onion Nutmeg Kidney Beans Eggs Yogurt
+Dill Onion Nutmeg Kidney Beans Eggs Yogurt
+Milk Apple Kidney Beans Eggs
+Milk Unicorn Corn Kidney Beans Yogurt
+Corn Onion Onion Kidney Beans Ice cream Eggs
b
diff -r 000000000000 -r af2624d5ab32 test-data/mba_input_str_wo.tabular
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/mba_input_str_wo.tabular Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,5 @@
+Milk Onion Nutmeg Kidney Beans Eggs Yogurt
+Dill Onion Nutmeg Kidney Beans Eggs Yogurt
+Milk Apple Kidney Beans Eggs
+Milk Unicorn Corn Kidney Beans Yogurt
+Corn Onion Onion Kidney Beans Ice cream Eggs
b
diff -r 000000000000 -r af2624d5ab32 test-data/mba_out_str.tabular
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/mba_out_str.tabular Sat May 01 01:24:32 2021 +0000
[
@@ -0,0 +1,7 @@
+antecedents consequents antecedent support consequent support support confidence lift leverage conviction
+['Eggs'] ['Kidney Beans', 'Onion'] 0.8 0.6 0.6 0.7499999999999999 1.2499999999999998 0.12 1.5999999999999994
+['Eggs'] ['Onion'] 0.8 0.6 0.6 0.7499999999999999 1.2499999999999998 0.12 1.5999999999999994
+['Eggs', 'Kidney Beans'] ['Onion'] 0.8 0.6 0.6 0.7499999999999999 1.2499999999999998 0.12 1.5999999999999994
+['Kidney Beans', 'Onion'] ['Eggs'] 0.6 0.8 0.6 1.0 1.25 0.12 inf
+['Onion'] ['Eggs'] 0.6 0.8 0.6 1.0 1.25 0.12 inf
+['Onion'] ['Eggs', 'Kidney Beans'] 0.6 0.8 0.6 1.0 1.25 0.12 inf
b
diff -r 000000000000 -r af2624d5ab32 test-data/mba_output_int.tabular
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/mba_output_int.tabular Sat May 01 01:24:32 2021 +0000
[
@@ -0,0 +1,7 @@
+antecedents consequents antecedent support consequent support support confidence lift leverage conviction
+['11'] ['13', '14'] 0.6 0.8 0.6 1.0 1.25 0.12 inf
+['11'] ['14'] 0.6 0.8 0.6 1.0 1.25 0.12 inf
+['11', '13'] ['14'] 0.6 0.8 0.6 1.0 1.25 0.12 inf
+['13', '14'] ['11'] 0.8 0.6 0.6 0.7499999999999999 1.2499999999999998 0.12 1.5999999999999994
+['14'] ['11'] 0.8 0.6 0.6 0.7499999999999999 1.2499999999999998 0.12 1.5999999999999994
+['14'] ['11', '13'] 0.8 0.6 0.6 0.7499999999999999 1.2499999999999998 0.12 1.5999999999999994
b
diff -r 000000000000 -r af2624d5ab32 test-data/mba_output_str.tabular
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/mba_output_str.tabular Sat May 01 01:24:32 2021 +0000
[
@@ -0,0 +1,7 @@
+antecedents consequents antecedent support consequent support support confidence lift leverage conviction
+['Eggs'] ['Kidney Beans', 'Onion'] 0.8 0.6 0.6 0.7499999999999999 1.2499999999999998 0.12 1.5999999999999994
+['Eggs'] ['Onion'] 0.8 0.6 0.6 0.7499999999999999 1.2499999999999998 0.12 1.5999999999999994
+['Eggs', 'Kidney Beans'] ['Onion'] 0.8 0.6 0.6 0.7499999999999999 1.2499999999999998 0.12 1.5999999999999994
+['Kidney Beans', 'Onion'] ['Eggs'] 0.6 0.8 0.6 1.0 1.25 0.12 inf
+['Onion'] ['Eggs'] 0.6 0.8 0.6 1.0 1.25 0.12 inf
+['Onion'] ['Eggs', 'Kidney Beans'] 0.6 0.8 0.6 1.0 1.25 0.12 inf
b
diff -r 000000000000 -r af2624d5ab32 test-data/ml_confusion_predicted.tabular
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/ml_confusion_predicted.tabular Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,34 @@
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
b
diff -r 000000000000 -r af2624d5ab32 test-data/ml_confusion_true.tabular
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/ml_confusion_true.tabular Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,34 @@
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+1
+1
+1
+1
+0
+0
+1
+1
+0
+1
+1
+1
+1
+1
+1
+1
+0
+0
+0
+0
+0
+0
b
diff -r 000000000000 -r af2624d5ab32 test-data/ml_confusion_viz.png
b
Binary file test-data/ml_confusion_viz.png has changed
b
diff -r 000000000000 -r af2624d5ab32 test-data/ml_vis01.html
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/ml_vis01.html Sat May 01 01:24:32 2021 +0000
[
b'@@ -0,0 +1,31 @@\n+<html>\n+<head><meta charset="utf-8" /></head>\n+<body>\n+    <div>\n+        \n+                <script type="text/javascript">window.PlotlyConfig = {MathJaxConfig: \'local\'};</script>\n+        <script type="text/javascript">/**\n+* plotly.js v1.51.1\n+* Copyright 2012-2019, Plotly, Inc.\n+* All rights reserved.\n+* Licensed under the MIT license\n+*/\n+!function(t){if("object"==typeof exports&&"undefined"!=typeof module)module.exports=t();else if("function"==typeof define&&define.amd)define([],t);else{("undefined"!=typeof window?window:"undefined"!=typeof global?global:"undefined"!=typeof self?self:this).Plotly=t()}}(function(){return function(){return function t(e,r,n){function a(o,s){if(!r[o]){if(!e[o]){var l="function"==typeof require&&require;if(!s&&l)return l(o,!0);if(i)return i(o,!0);var c=new Error("Cannot find module \'"+o+"\'");throw c.code="MODULE_NOT_FOUND",c}var u=r[o]={exports:{}};e[o][0].call(u.exports,function(t){return a(e[o][1][t]||t)},u,u.exports,t,e,r,n)}return r[o].exports}for(var i="function"==typeof require&&require,o=0;o<n.length;o++)a(n[o]);return a}}()({1:[function(t,e,r){"use strict";var n=t("../src/lib"),a={"X,X div":"direction:ltr;font-family:\'Open Sans\', verdana, arial, sans-serif;margin:0;padding:0;","X input,X button":"font-family:\'Open Sans\', verdana, arial, sans-serif;","X input:focus,X button:focus":"outline:none;","X a":"text-decoration:none;","X a:hover":"text-decoration:none;","X .crisp":"shape-rendering:crispEdges;","X .user-select-none":"-webkit-user-select:none;-moz-user-select:none;-ms-user-select:none;-o-user-select:none;user-select:none;","X svg":"overflow:hidden;","X svg a":"fill:#447adb;","X svg a:hover":"fill:#3c6dc5;","X .main-svg":"position:absolute;top:0;left:0;pointer-events:none;","X .main-svg .draglayer":"pointer-events:all;","X .cursor-default":"cursor:default;","X .cursor-pointer":"cursor:pointer;","X .cursor-crosshair":"cursor:crosshair;","X .cursor-move":"cursor:move;","X .cursor-col-resize":"cursor:col-resize;","X .cursor-row-resize":"cursor:row-resize;","X .cursor-ns-resize":"cursor:ns-resize;","X .cursor-ew-resize":"cursor:ew-resize;","X .cursor-sw-resize":"cursor:sw-resize;","X .cursor-s-resize":"cursor:s-resize;","X .cursor-se-resize":"cursor:se-resize;","X .cursor-w-resize":"cursor:w-resize;","X .cursor-e-resize":"cursor:e-resize;","X .cursor-nw-resize":"cursor:nw-resize;","X .cursor-n-resize":"cursor:n-resize;","X .cursor-ne-resize":"cursor:ne-resize;","X .cursor-grab":"cursor:-webkit-grab;cursor:grab;","X .modebar":"position:absolute;top:2px;right:2px;","X .ease-bg":"-webkit-transition:background-color 0.3s ease 0s;-moz-transition:background-color 0.3s ease 0s;-ms-transition:background-color 0.3s ease 0s;-o-transition:background-color 0.3s ease 0s;transition:background-color 0.3s ease 0s;","X .modebar--hover>:not(.watermark)":"opacity:0;-webkit-transition:opacity 0.3s ease 0s;-moz-transition:opacity 0.3s ease 0s;-ms-transition:opacity 0.3s ease 0s;-o-transition:opacity 0.3s ease 0s;transition:opacity 0.3s ease 0s;","X:hover .modebar--hover .modebar-group":"opacity:1;","X .modebar-group":"float:left;display:inline-block;box-sizing:border-box;padding-left:8px;position:relative;vertical-align:middle;white-space:nowrap;","X .modebar-btn":"position:relative;font-size:16px;padding:3px 4px;height:22px;cursor:pointer;line-height:normal;box-sizing:border-box;","X .modebar-btn svg":"position:relative;top:2px;","X .modebar.vertical":"display:flex;flex-direction:column;flex-wrap:wrap;align-content:flex-end;max-height:100%;","X .modebar.vertical svg":"top:-1px;","X .modebar.vertical .modebar-group":"display:block;float:none;padding-left:0px;padding-bottom:8px;","X .modebar.vertical .modebar-group .modebar-btn":"display:block;text-align:center;","X [data-title]:before,X [data-title]:after":"position:absolute;-webkit-transform:translate3d(0, 0, 0);-moz-transform:translate3d(0, 0, 0);-ms-transform:translate3d(0, 0, 0);-o-transform:translate3d(0, 0, 0);transform:translate3d'..b' "scattergl"}], "scattermapbox": [{"marker": {"colorbar": {"outlinewidth": 0, "ticks": ""}}, "type": "scattermapbox"}], "scatterpolar": [{"marker": {"colorbar": {"outlinewidth": 0, "ticks": ""}}, "type": "scatterpolar"}], "scatterpolargl": [{"marker": {"colorbar": {"outlinewidth": 0, "ticks": ""}}, "type": "scatterpolargl"}], "scatterternary": [{"marker": {"colorbar": {"outlinewidth": 0, "ticks": ""}}, "type": "scatterternary"}], "surface": [{"colorbar": {"outlinewidth": 0, "ticks": ""}, "colorscale": [[0.0, "#0d0887"], [0.1111111111111111, "#46039f"], [0.2222222222222222, "#7201a8"], [0.3333333333333333, "#9c179e"], [0.4444444444444444, "#bd3786"], [0.5555555555555556, "#d8576b"], [0.6666666666666666, "#ed7953"], [0.7777777777777778, "#fb9f3a"], [0.8888888888888888, "#fdca26"], [1.0, "#f0f921"]], "type": "surface"}], "table": [{"cells": {"fill": {"color": "#EBF0F8"}, "line": {"color": "white"}}, "header": {"fill": {"color": "#C8D4E3"}, "line": {"color": "white"}}, "type": "table"}]}, "layout": {"annotationdefaults": {"arrowcolor": "#2a3f5f", "arrowhead": 0, "arrowwidth": 1}, "coloraxis": {"colorbar": {"outlinewidth": 0, "ticks": ""}}, "colorscale": {"diverging": [[0, "#8e0152"], [0.1, "#c51b7d"], [0.2, "#de77ae"], [0.3, "#f1b6da"], [0.4, "#fde0ef"], [0.5, "#f7f7f7"], [0.6, "#e6f5d0"], [0.7, "#b8e186"], [0.8, "#7fbc41"], [0.9, "#4d9221"], [1, "#276419"]], "sequential": [[0.0, "#0d0887"], [0.1111111111111111, "#46039f"], [0.2222222222222222, "#7201a8"], [0.3333333333333333, "#9c179e"], [0.4444444444444444, "#bd3786"], [0.5555555555555556, "#d8576b"], [0.6666666666666666, "#ed7953"], [0.7777777777777778, "#fb9f3a"], [0.8888888888888888, "#fdca26"], [1.0, "#f0f921"]], "sequentialminus": [[0.0, "#0d0887"], [0.1111111111111111, "#46039f"], [0.2222222222222222, "#7201a8"], [0.3333333333333333, "#9c179e"], [0.4444444444444444, "#bd3786"], [0.5555555555555556, "#d8576b"], [0.6666666666666666, "#ed7953"], [0.7777777777777778, "#fb9f3a"], [0.8888888888888888, "#fdca26"], [1.0, "#f0f921"]]}, "colorway": ["#636efa", "#EF553B", "#00cc96", "#ab63fa", "#FFA15A", "#19d3f3", "#FF6692", "#B6E880", "#FF97FF", "#FECB52"], "font": {"color": "#2a3f5f"}, "geo": {"bgcolor": "white", "lakecolor": "white", "landcolor": "#E5ECF6", "showlakes": true, "showland": true, "subunitcolor": "white"}, "hoverlabel": {"align": "left"}, "hovermode": "closest", "mapbox": {"style": "light"}, "paper_bgcolor": "white", "plot_bgcolor": "#E5ECF6", "polar": {"angularaxis": {"gridcolor": "white", "linecolor": "white", "ticks": ""}, "bgcolor": "#E5ECF6", "radialaxis": {"gridcolor": "white", "linecolor": "white", "ticks": ""}}, "scene": {"xaxis": {"backgroundcolor": "#E5ECF6", "gridcolor": "white", "gridwidth": 2, "linecolor": "white", "showbackground": true, "ticks": "", "zerolinecolor": "white"}, "yaxis": {"backgroundcolor": "#E5ECF6", "gridcolor": "white", "gridwidth": 2, "linecolor": "white", "showbackground": true, "ticks": "", "zerolinecolor": "white"}, "zaxis": {"backgroundcolor": "#E5ECF6", "gridcolor": "white", "gridwidth": 2, "linecolor": "white", "showbackground": true, "ticks": "", "zerolinecolor": "white"}}, "shapedefaults": {"line": {"color": "#2a3f5f"}}, "ternary": {"aaxis": {"gridcolor": "white", "linecolor": "white", "ticks": ""}, "baxis": {"gridcolor": "white", "linecolor": "white", "ticks": ""}, "bgcolor": "#E5ECF6", "caxis": {"gridcolor": "white", "linecolor": "white", "ticks": ""}}, "title": {"x": 0.05}, "xaxis": {"automargin": true, "gridcolor": "white", "linecolor": "white", "ticks": "", "title": {"standoff": 15}, "zerolinecolor": "white", "zerolinewidth": 2}, "yaxis": {"automargin": true, "gridcolor": "white", "linecolor": "white", "ticks": "", "title": {"standoff": 15}, "zerolinecolor": "white", "zerolinewidth": 2}}}, "title": {"text": "Feature Importances"}},\n+                        {"responsive": true}\n+                    )\n+                };\n+                \n+            </script>\n+        </div>\n+</body>\n+</html>\n\\ No newline at end of file\n'
b
diff -r 000000000000 -r af2624d5ab32 test-data/ml_vis02.html
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/ml_vis02.html Sat May 01 01:24:32 2021 +0000
[
b'@@ -0,0 +1,31 @@\n+<html>\n+<head><meta charset="utf-8" /></head>\n+<body>\n+    <div>\n+        \n+                <script type="text/javascript">window.PlotlyConfig = {MathJaxConfig: \'local\'};</script>\n+        <script type="text/javascript">/**\n+* plotly.js v1.51.1\n+* Copyright 2012-2019, Plotly, Inc.\n+* All rights reserved.\n+* Licensed under the MIT license\n+*/\n+!function(t){if("object"==typeof exports&&"undefined"!=typeof module)module.exports=t();else if("function"==typeof define&&define.amd)define([],t);else{("undefined"!=typeof window?window:"undefined"!=typeof global?global:"undefined"!=typeof self?self:this).Plotly=t()}}(function(){return function(){return function t(e,r,n){function a(o,s){if(!r[o]){if(!e[o]){var l="function"==typeof require&&require;if(!s&&l)return l(o,!0);if(i)return i(o,!0);var c=new Error("Cannot find module \'"+o+"\'");throw c.code="MODULE_NOT_FOUND",c}var u=r[o]={exports:{}};e[o][0].call(u.exports,function(t){return a(e[o][1][t]||t)},u,u.exports,t,e,r,n)}return r[o].exports}for(var i="function"==typeof require&&require,o=0;o<n.length;o++)a(n[o]);return a}}()({1:[function(t,e,r){"use strict";var n=t("../src/lib"),a={"X,X div":"direction:ltr;font-family:\'Open Sans\', verdana, arial, sans-serif;margin:0;padding:0;","X input,X button":"font-family:\'Open Sans\', verdana, arial, sans-serif;","X input:focus,X button:focus":"outline:none;","X a":"text-decoration:none;","X a:hover":"text-decoration:none;","X .crisp":"shape-rendering:crispEdges;","X .user-select-none":"-webkit-user-select:none;-moz-user-select:none;-ms-user-select:none;-o-user-select:none;user-select:none;","X svg":"overflow:hidden;","X svg a":"fill:#447adb;","X svg a:hover":"fill:#3c6dc5;","X .main-svg":"position:absolute;top:0;left:0;pointer-events:none;","X .main-svg .draglayer":"pointer-events:all;","X .cursor-default":"cursor:default;","X .cursor-pointer":"cursor:pointer;","X .cursor-crosshair":"cursor:crosshair;","X .cursor-move":"cursor:move;","X .cursor-col-resize":"cursor:col-resize;","X .cursor-row-resize":"cursor:row-resize;","X .cursor-ns-resize":"cursor:ns-resize;","X .cursor-ew-resize":"cursor:ew-resize;","X .cursor-sw-resize":"cursor:sw-resize;","X .cursor-s-resize":"cursor:s-resize;","X .cursor-se-resize":"cursor:se-resize;","X .cursor-w-resize":"cursor:w-resize;","X .cursor-e-resize":"cursor:e-resize;","X .cursor-nw-resize":"cursor:nw-resize;","X .cursor-n-resize":"cursor:n-resize;","X .cursor-ne-resize":"cursor:ne-resize;","X .cursor-grab":"cursor:-webkit-grab;cursor:grab;","X .modebar":"position:absolute;top:2px;right:2px;","X .ease-bg":"-webkit-transition:background-color 0.3s ease 0s;-moz-transition:background-color 0.3s ease 0s;-ms-transition:background-color 0.3s ease 0s;-o-transition:background-color 0.3s ease 0s;transition:background-color 0.3s ease 0s;","X .modebar--hover>:not(.watermark)":"opacity:0;-webkit-transition:opacity 0.3s ease 0s;-moz-transition:opacity 0.3s ease 0s;-ms-transition:opacity 0.3s ease 0s;-o-transition:opacity 0.3s ease 0s;transition:opacity 0.3s ease 0s;","X:hover .modebar--hover .modebar-group":"opacity:1;","X .modebar-group":"float:left;display:inline-block;box-sizing:border-box;padding-left:8px;position:relative;vertical-align:middle;white-space:nowrap;","X .modebar-btn":"position:relative;font-size:16px;padding:3px 4px;height:22px;cursor:pointer;line-height:normal;box-sizing:border-box;","X .modebar-btn svg":"position:relative;top:2px;","X .modebar.vertical":"display:flex;flex-direction:column;flex-wrap:wrap;align-content:flex-end;max-height:100%;","X .modebar.vertical svg":"top:-1px;","X .modebar.vertical .modebar-group":"display:block;float:none;padding-left:0px;padding-bottom:8px;","X .modebar.vertical .modebar-group .modebar-btn":"display:block;text-align:center;","X [data-title]:before,X [data-title]:after":"position:absolute;-webkit-transform:translate3d(0, 0, 0);-moz-transform:translate3d(0, 0, 0);-ms-transform:translate3d(0, 0, 0);-o-transform:translate3d(0, 0, 0);transform:translate3d'..b'rbar": {"outlinewidth": 0, "ticks": ""}}, "type": "scatterpolar"}], "scatterpolargl": [{"marker": {"colorbar": {"outlinewidth": 0, "ticks": ""}}, "type": "scatterpolargl"}], "scatterternary": [{"marker": {"colorbar": {"outlinewidth": 0, "ticks": ""}}, "type": "scatterternary"}], "surface": [{"colorbar": {"outlinewidth": 0, "ticks": ""}, "colorscale": [[0.0, "#0d0887"], [0.1111111111111111, "#46039f"], [0.2222222222222222, "#7201a8"], [0.3333333333333333, "#9c179e"], [0.4444444444444444, "#bd3786"], [0.5555555555555556, "#d8576b"], [0.6666666666666666, "#ed7953"], [0.7777777777777778, "#fb9f3a"], [0.8888888888888888, "#fdca26"], [1.0, "#f0f921"]], "type": "surface"}], "table": [{"cells": {"fill": {"color": "#EBF0F8"}, "line": {"color": "white"}}, "header": {"fill": {"color": "#C8D4E3"}, "line": {"color": "white"}}, "type": "table"}]}, "layout": {"annotationdefaults": {"arrowcolor": "#2a3f5f", "arrowhead": 0, "arrowwidth": 1}, "coloraxis": {"colorbar": {"outlinewidth": 0, "ticks": ""}}, "colorscale": {"diverging": [[0, "#8e0152"], [0.1, "#c51b7d"], [0.2, "#de77ae"], [0.3, "#f1b6da"], [0.4, "#fde0ef"], [0.5, "#f7f7f7"], [0.6, "#e6f5d0"], [0.7, "#b8e186"], [0.8, "#7fbc41"], [0.9, "#4d9221"], [1, "#276419"]], "sequential": [[0.0, "#0d0887"], [0.1111111111111111, "#46039f"], [0.2222222222222222, "#7201a8"], [0.3333333333333333, "#9c179e"], [0.4444444444444444, "#bd3786"], [0.5555555555555556, "#d8576b"], [0.6666666666666666, "#ed7953"], [0.7777777777777778, "#fb9f3a"], [0.8888888888888888, "#fdca26"], [1.0, "#f0f921"]], "sequentialminus": [[0.0, "#0d0887"], [0.1111111111111111, "#46039f"], [0.2222222222222222, "#7201a8"], [0.3333333333333333, "#9c179e"], [0.4444444444444444, "#bd3786"], [0.5555555555555556, "#d8576b"], [0.6666666666666666, "#ed7953"], [0.7777777777777778, "#fb9f3a"], [0.8888888888888888, "#fdca26"], [1.0, "#f0f921"]]}, "colorway": ["#636efa", "#EF553B", "#00cc96", "#ab63fa", "#FFA15A", "#19d3f3", "#FF6692", "#B6E880", "#FF97FF", "#FECB52"], "font": {"color": "#2a3f5f"}, "geo": {"bgcolor": "white", "lakecolor": "white", "landcolor": "#E5ECF6", "showlakes": true, "showland": true, "subunitcolor": "white"}, "hoverlabel": {"align": "left"}, "hovermode": "closest", "mapbox": {"style": "light"}, "paper_bgcolor": "white", "plot_bgcolor": "#E5ECF6", "polar": {"angularaxis": {"gridcolor": "white", "linecolor": "white", "ticks": ""}, "bgcolor": "#E5ECF6", "radialaxis": {"gridcolor": "white", "linecolor": "white", "ticks": ""}}, "scene": {"xaxis": {"backgroundcolor": "#E5ECF6", "gridcolor": "white", "gridwidth": 2, "linecolor": "white", "showbackground": true, "ticks": "", "zerolinecolor": "white"}, "yaxis": {"backgroundcolor": "#E5ECF6", "gridcolor": "white", "gridwidth": 2, "linecolor": "white", "showbackground": true, "ticks": "", "zerolinecolor": "white"}, "zaxis": {"backgroundcolor": "#E5ECF6", "gridcolor": "white", "gridwidth": 2, "linecolor": "white", "showbackground": true, "ticks": "", "zerolinecolor": "white"}}, "shapedefaults": {"line": {"color": "#2a3f5f"}}, "ternary": {"aaxis": {"gridcolor": "white", "linecolor": "white", "ticks": ""}, "baxis": {"gridcolor": "white", "linecolor": "white", "ticks": ""}, "bgcolor": "#E5ECF6", "caxis": {"gridcolor": "white", "linecolor": "white", "ticks": ""}}, "title": {"x": 0.05}, "xaxis": {"automargin": true, "gridcolor": "white", "linecolor": "white", "ticks": "", "title": {"standoff": 15}, "zerolinecolor": "white", "zerolinewidth": 2}, "yaxis": {"automargin": true, "gridcolor": "white", "linecolor": "white", "ticks": "", "title": {"standoff": 15}, "zerolinecolor": "white", "zerolinewidth": 2}}}, "title": {"text": "Learning Curve", "x": 0.5, "xanchor": "center", "y": 0.92, "yanchor": "top"}, "xaxis": {"title": {"text": "No. of samples"}}, "yaxis": {"title": {"text": "Performance Score"}}},\n+                        {"responsive": true}\n+                    )\n+                };\n+                \n+            </script>\n+        </div>\n+</body>\n+</html>\n\\ No newline at end of file\n'
b
diff -r 000000000000 -r af2624d5ab32 test-data/ml_vis03.html
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/ml_vis03.html Sat May 01 01:24:32 2021 +0000
[
b'@@ -0,0 +1,31 @@\n+<html>\n+<head><meta charset="utf-8" /></head>\n+<body>\n+    <div>\n+        \n+                <script type="text/javascript">window.PlotlyConfig = {MathJaxConfig: \'local\'};</script>\n+        <script type="text/javascript">/**\n+* plotly.js v1.51.1\n+* Copyright 2012-2019, Plotly, Inc.\n+* All rights reserved.\n+* Licensed under the MIT license\n+*/\n+!function(t){if("object"==typeof exports&&"undefined"!=typeof module)module.exports=t();else if("function"==typeof define&&define.amd)define([],t);else{("undefined"!=typeof window?window:"undefined"!=typeof global?global:"undefined"!=typeof self?self:this).Plotly=t()}}(function(){return function(){return function t(e,r,n){function a(o,s){if(!r[o]){if(!e[o]){var l="function"==typeof require&&require;if(!s&&l)return l(o,!0);if(i)return i(o,!0);var c=new Error("Cannot find module \'"+o+"\'");throw c.code="MODULE_NOT_FOUND",c}var u=r[o]={exports:{}};e[o][0].call(u.exports,function(t){return a(e[o][1][t]||t)},u,u.exports,t,e,r,n)}return r[o].exports}for(var i="function"==typeof require&&require,o=0;o<n.length;o++)a(n[o]);return a}}()({1:[function(t,e,r){"use strict";var n=t("../src/lib"),a={"X,X div":"direction:ltr;font-family:\'Open Sans\', verdana, arial, sans-serif;margin:0;padding:0;","X input,X button":"font-family:\'Open Sans\', verdana, arial, sans-serif;","X input:focus,X button:focus":"outline:none;","X a":"text-decoration:none;","X a:hover":"text-decoration:none;","X .crisp":"shape-rendering:crispEdges;","X .user-select-none":"-webkit-user-select:none;-moz-user-select:none;-ms-user-select:none;-o-user-select:none;user-select:none;","X svg":"overflow:hidden;","X svg a":"fill:#447adb;","X svg a:hover":"fill:#3c6dc5;","X .main-svg":"position:absolute;top:0;left:0;pointer-events:none;","X .main-svg .draglayer":"pointer-events:all;","X .cursor-default":"cursor:default;","X .cursor-pointer":"cursor:pointer;","X .cursor-crosshair":"cursor:crosshair;","X .cursor-move":"cursor:move;","X .cursor-col-resize":"cursor:col-resize;","X .cursor-row-resize":"cursor:row-resize;","X .cursor-ns-resize":"cursor:ns-resize;","X .cursor-ew-resize":"cursor:ew-resize;","X .cursor-sw-resize":"cursor:sw-resize;","X .cursor-s-resize":"cursor:s-resize;","X .cursor-se-resize":"cursor:se-resize;","X .cursor-w-resize":"cursor:w-resize;","X .cursor-e-resize":"cursor:e-resize;","X .cursor-nw-resize":"cursor:nw-resize;","X .cursor-n-resize":"cursor:n-resize;","X .cursor-ne-resize":"cursor:ne-resize;","X .cursor-grab":"cursor:-webkit-grab;cursor:grab;","X .modebar":"position:absolute;top:2px;right:2px;","X .ease-bg":"-webkit-transition:background-color 0.3s ease 0s;-moz-transition:background-color 0.3s ease 0s;-ms-transition:background-color 0.3s ease 0s;-o-transition:background-color 0.3s ease 0s;transition:background-color 0.3s ease 0s;","X .modebar--hover>:not(.watermark)":"opacity:0;-webkit-transition:opacity 0.3s ease 0s;-moz-transition:opacity 0.3s ease 0s;-ms-transition:opacity 0.3s ease 0s;-o-transition:opacity 0.3s ease 0s;transition:opacity 0.3s ease 0s;","X:hover .modebar--hover .modebar-group":"opacity:1;","X .modebar-group":"float:left;display:inline-block;box-sizing:border-box;padding-left:8px;position:relative;vertical-align:middle;white-space:nowrap;","X .modebar-btn":"position:relative;font-size:16px;padding:3px 4px;height:22px;cursor:pointer;line-height:normal;box-sizing:border-box;","X .modebar-btn svg":"position:relative;top:2px;","X .modebar.vertical":"display:flex;flex-direction:column;flex-wrap:wrap;align-content:flex-end;max-height:100%;","X .modebar.vertical svg":"top:-1px;","X .modebar.vertical .modebar-group":"display:block;float:none;padding-left:0px;padding-bottom:8px;","X .modebar.vertical .modebar-group .modebar-btn":"display:block;text-align:center;","X [data-title]:before,X [data-title]:after":"position:absolute;-webkit-transform:translate3d(0, 0, 0);-moz-transform:translate3d(0, 0, 0);-ms-transform:translate3d(0, 0, 0);-o-transform:translate3d(0, 0, 0);transform:translate3d'..b'[{"marker": {"colorbar": {"outlinewidth": 0, "ticks": ""}}, "type": "scatterpolargl"}], "scatterternary": [{"marker": {"colorbar": {"outlinewidth": 0, "ticks": ""}}, "type": "scatterternary"}], "surface": [{"colorbar": {"outlinewidth": 0, "ticks": ""}, "colorscale": [[0.0, "#0d0887"], [0.1111111111111111, "#46039f"], [0.2222222222222222, "#7201a8"], [0.3333333333333333, "#9c179e"], [0.4444444444444444, "#bd3786"], [0.5555555555555556, "#d8576b"], [0.6666666666666666, "#ed7953"], [0.7777777777777778, "#fb9f3a"], [0.8888888888888888, "#fdca26"], [1.0, "#f0f921"]], "type": "surface"}], "table": [{"cells": {"fill": {"color": "#EBF0F8"}, "line": {"color": "white"}}, "header": {"fill": {"color": "#C8D4E3"}, "line": {"color": "white"}}, "type": "table"}]}, "layout": {"annotationdefaults": {"arrowcolor": "#2a3f5f", "arrowhead": 0, "arrowwidth": 1}, "coloraxis": {"colorbar": {"outlinewidth": 0, "ticks": ""}}, "colorscale": {"diverging": [[0, "#8e0152"], [0.1, "#c51b7d"], [0.2, "#de77ae"], [0.3, "#f1b6da"], [0.4, "#fde0ef"], [0.5, "#f7f7f7"], [0.6, "#e6f5d0"], [0.7, "#b8e186"], [0.8, "#7fbc41"], [0.9, "#4d9221"], [1, "#276419"]], "sequential": [[0.0, "#0d0887"], [0.1111111111111111, "#46039f"], [0.2222222222222222, "#7201a8"], [0.3333333333333333, "#9c179e"], [0.4444444444444444, "#bd3786"], [0.5555555555555556, "#d8576b"], [0.6666666666666666, "#ed7953"], [0.7777777777777778, "#fb9f3a"], [0.8888888888888888, "#fdca26"], [1.0, "#f0f921"]], "sequentialminus": [[0.0, "#0d0887"], [0.1111111111111111, "#46039f"], [0.2222222222222222, "#7201a8"], [0.3333333333333333, "#9c179e"], [0.4444444444444444, "#bd3786"], [0.5555555555555556, "#d8576b"], [0.6666666666666666, "#ed7953"], [0.7777777777777778, "#fb9f3a"], [0.8888888888888888, "#fdca26"], [1.0, "#f0f921"]]}, "colorway": ["#636efa", "#EF553B", "#00cc96", "#ab63fa", "#FFA15A", "#19d3f3", "#FF6692", "#B6E880", "#FF97FF", "#FECB52"], "font": {"color": "#2a3f5f"}, "geo": {"bgcolor": "white", "lakecolor": "white", "landcolor": "#E5ECF6", "showlakes": true, "showland": true, "subunitcolor": "white"}, "hoverlabel": {"align": "left"}, "hovermode": "closest", "mapbox": {"style": "light"}, "paper_bgcolor": "white", "plot_bgcolor": "#E5ECF6", "polar": {"angularaxis": {"gridcolor": "white", "linecolor": "white", "ticks": ""}, "bgcolor": "#E5ECF6", "radialaxis": {"gridcolor": "white", "linecolor": "white", "ticks": ""}}, "scene": {"xaxis": {"backgroundcolor": "#E5ECF6", "gridcolor": "white", "gridwidth": 2, "linecolor": "white", "showbackground": true, "ticks": "", "zerolinecolor": "white"}, "yaxis": {"backgroundcolor": "#E5ECF6", "gridcolor": "white", "gridwidth": 2, "linecolor": "white", "showbackground": true, "ticks": "", "zerolinecolor": "white"}, "zaxis": {"backgroundcolor": "#E5ECF6", "gridcolor": "white", "gridwidth": 2, "linecolor": "white", "showbackground": true, "ticks": "", "zerolinecolor": "white"}}, "shapedefaults": {"line": {"color": "#2a3f5f"}}, "ternary": {"aaxis": {"gridcolor": "white", "linecolor": "white", "ticks": ""}, "baxis": {"gridcolor": "white", "linecolor": "white", "ticks": ""}, "bgcolor": "#E5ECF6", "caxis": {"gridcolor": "white", "linecolor": "white", "ticks": ""}}, "title": {"x": 0.05}, "xaxis": {"automargin": true, "gridcolor": "white", "linecolor": "white", "ticks": "", "title": {"standoff": 15}, "zerolinecolor": "white", "zerolinewidth": 2}, "yaxis": {"automargin": true, "gridcolor": "white", "linecolor": "white", "ticks": "", "title": {"standoff": 15}, "zerolinecolor": "white", "zerolinewidth": 2}}}, "title": {"text": "Precision-Recall Curve", "x": 0.5, "xanchor": "center", "y": 0.92, "yanchor": "top"}, "xaxis": {"linecolor": "lightslategray", "linewidth": 1, "title": {"text": "Recall"}}, "yaxis": {"linecolor": "lightslategray", "linewidth": 1, "title": {"text": "Precision"}}},\n+                        {"responsive": true}\n+                    )\n+                };\n+                \n+            </script>\n+        </div>\n+</body>\n+</html>\n\\ No newline at end of file\n'
b
diff -r 000000000000 -r af2624d5ab32 test-data/ml_vis04.html
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/ml_vis04.html Sat May 01 01:24:32 2021 +0000
[
b'@@ -0,0 +1,31 @@\n+<html>\n+<head><meta charset="utf-8" /></head>\n+<body>\n+    <div>\n+        \n+                <script type="text/javascript">window.PlotlyConfig = {MathJaxConfig: \'local\'};</script>\n+        <script type="text/javascript">/**\n+* plotly.js v1.51.1\n+* Copyright 2012-2019, Plotly, Inc.\n+* All rights reserved.\n+* Licensed under the MIT license\n+*/\n+!function(t){if("object"==typeof exports&&"undefined"!=typeof module)module.exports=t();else if("function"==typeof define&&define.amd)define([],t);else{("undefined"!=typeof window?window:"undefined"!=typeof global?global:"undefined"!=typeof self?self:this).Plotly=t()}}(function(){return function(){return function t(e,r,n){function a(o,s){if(!r[o]){if(!e[o]){var l="function"==typeof require&&require;if(!s&&l)return l(o,!0);if(i)return i(o,!0);var c=new Error("Cannot find module \'"+o+"\'");throw c.code="MODULE_NOT_FOUND",c}var u=r[o]={exports:{}};e[o][0].call(u.exports,function(t){return a(e[o][1][t]||t)},u,u.exports,t,e,r,n)}return r[o].exports}for(var i="function"==typeof require&&require,o=0;o<n.length;o++)a(n[o]);return a}}()({1:[function(t,e,r){"use strict";var n=t("../src/lib"),a={"X,X div":"direction:ltr;font-family:\'Open Sans\', verdana, arial, sans-serif;margin:0;padding:0;","X input,X button":"font-family:\'Open Sans\', verdana, arial, sans-serif;","X input:focus,X button:focus":"outline:none;","X a":"text-decoration:none;","X a:hover":"text-decoration:none;","X .crisp":"shape-rendering:crispEdges;","X .user-select-none":"-webkit-user-select:none;-moz-user-select:none;-ms-user-select:none;-o-user-select:none;user-select:none;","X svg":"overflow:hidden;","X svg a":"fill:#447adb;","X svg a:hover":"fill:#3c6dc5;","X .main-svg":"position:absolute;top:0;left:0;pointer-events:none;","X .main-svg .draglayer":"pointer-events:all;","X .cursor-default":"cursor:default;","X .cursor-pointer":"cursor:pointer;","X .cursor-crosshair":"cursor:crosshair;","X .cursor-move":"cursor:move;","X .cursor-col-resize":"cursor:col-resize;","X .cursor-row-resize":"cursor:row-resize;","X .cursor-ns-resize":"cursor:ns-resize;","X .cursor-ew-resize":"cursor:ew-resize;","X .cursor-sw-resize":"cursor:sw-resize;","X .cursor-s-resize":"cursor:s-resize;","X .cursor-se-resize":"cursor:se-resize;","X .cursor-w-resize":"cursor:w-resize;","X .cursor-e-resize":"cursor:e-resize;","X .cursor-nw-resize":"cursor:nw-resize;","X .cursor-n-resize":"cursor:n-resize;","X .cursor-ne-resize":"cursor:ne-resize;","X .cursor-grab":"cursor:-webkit-grab;cursor:grab;","X .modebar":"position:absolute;top:2px;right:2px;","X .ease-bg":"-webkit-transition:background-color 0.3s ease 0s;-moz-transition:background-color 0.3s ease 0s;-ms-transition:background-color 0.3s ease 0s;-o-transition:background-color 0.3s ease 0s;transition:background-color 0.3s ease 0s;","X .modebar--hover>:not(.watermark)":"opacity:0;-webkit-transition:opacity 0.3s ease 0s;-moz-transition:opacity 0.3s ease 0s;-ms-transition:opacity 0.3s ease 0s;-o-transition:opacity 0.3s ease 0s;transition:opacity 0.3s ease 0s;","X:hover .modebar--hover .modebar-group":"opacity:1;","X .modebar-group":"float:left;display:inline-block;box-sizing:border-box;padding-left:8px;position:relative;vertical-align:middle;white-space:nowrap;","X .modebar-btn":"position:relative;font-size:16px;padding:3px 4px;height:22px;cursor:pointer;line-height:normal;box-sizing:border-box;","X .modebar-btn svg":"position:relative;top:2px;","X .modebar.vertical":"display:flex;flex-direction:column;flex-wrap:wrap;align-content:flex-end;max-height:100%;","X .modebar.vertical svg":"top:-1px;","X .modebar.vertical .modebar-group":"display:block;float:none;padding-left:0px;padding-bottom:8px;","X .modebar.vertical .modebar-group .modebar-btn":"display:block;text-align:center;","X [data-title]:before,X [data-title]:after":"position:absolute;-webkit-transform:translate3d(0, 0, 0);-moz-transform:translate3d(0, 0, 0);-ms-transform:translate3d(0, 0, 0);-o-transform:translate3d(0, 0, 0);transform:translate3d'..b'"ticks": ""}}, "type": "scatterpolargl"}], "scatterternary": [{"marker": {"colorbar": {"outlinewidth": 0, "ticks": ""}}, "type": "scatterternary"}], "surface": [{"colorbar": {"outlinewidth": 0, "ticks": ""}, "colorscale": [[0.0, "#0d0887"], [0.1111111111111111, "#46039f"], [0.2222222222222222, "#7201a8"], [0.3333333333333333, "#9c179e"], [0.4444444444444444, "#bd3786"], [0.5555555555555556, "#d8576b"], [0.6666666666666666, "#ed7953"], [0.7777777777777778, "#fb9f3a"], [0.8888888888888888, "#fdca26"], [1.0, "#f0f921"]], "type": "surface"}], "table": [{"cells": {"fill": {"color": "#EBF0F8"}, "line": {"color": "white"}}, "header": {"fill": {"color": "#C8D4E3"}, "line": {"color": "white"}}, "type": "table"}]}, "layout": {"annotationdefaults": {"arrowcolor": "#2a3f5f", "arrowhead": 0, "arrowwidth": 1}, "coloraxis": {"colorbar": {"outlinewidth": 0, "ticks": ""}}, "colorscale": {"diverging": [[0, "#8e0152"], [0.1, "#c51b7d"], [0.2, "#de77ae"], [0.3, "#f1b6da"], [0.4, "#fde0ef"], [0.5, "#f7f7f7"], [0.6, "#e6f5d0"], [0.7, "#b8e186"], [0.8, "#7fbc41"], [0.9, "#4d9221"], [1, "#276419"]], "sequential": [[0.0, "#0d0887"], [0.1111111111111111, "#46039f"], [0.2222222222222222, "#7201a8"], [0.3333333333333333, "#9c179e"], [0.4444444444444444, "#bd3786"], [0.5555555555555556, "#d8576b"], [0.6666666666666666, "#ed7953"], [0.7777777777777778, "#fb9f3a"], [0.8888888888888888, "#fdca26"], [1.0, "#f0f921"]], "sequentialminus": [[0.0, "#0d0887"], [0.1111111111111111, "#46039f"], [0.2222222222222222, "#7201a8"], [0.3333333333333333, "#9c179e"], [0.4444444444444444, "#bd3786"], [0.5555555555555556, "#d8576b"], [0.6666666666666666, "#ed7953"], [0.7777777777777778, "#fb9f3a"], [0.8888888888888888, "#fdca26"], [1.0, "#f0f921"]]}, "colorway": ["#636efa", "#EF553B", "#00cc96", "#ab63fa", "#FFA15A", "#19d3f3", "#FF6692", "#B6E880", "#FF97FF", "#FECB52"], "font": {"color": "#2a3f5f"}, "geo": {"bgcolor": "white", "lakecolor": "white", "landcolor": "#E5ECF6", "showlakes": true, "showland": true, "subunitcolor": "white"}, "hoverlabel": {"align": "left"}, "hovermode": "closest", "mapbox": {"style": "light"}, "paper_bgcolor": "white", "plot_bgcolor": "#E5ECF6", "polar": {"angularaxis": {"gridcolor": "white", "linecolor": "white", "ticks": ""}, "bgcolor": "#E5ECF6", "radialaxis": {"gridcolor": "white", "linecolor": "white", "ticks": ""}}, "scene": {"xaxis": {"backgroundcolor": "#E5ECF6", "gridcolor": "white", "gridwidth": 2, "linecolor": "white", "showbackground": true, "ticks": "", "zerolinecolor": "white"}, "yaxis": {"backgroundcolor": "#E5ECF6", "gridcolor": "white", "gridwidth": 2, "linecolor": "white", "showbackground": true, "ticks": "", "zerolinecolor": "white"}, "zaxis": {"backgroundcolor": "#E5ECF6", "gridcolor": "white", "gridwidth": 2, "linecolor": "white", "showbackground": true, "ticks": "", "zerolinecolor": "white"}}, "shapedefaults": {"line": {"color": "#2a3f5f"}}, "ternary": {"aaxis": {"gridcolor": "white", "linecolor": "white", "ticks": ""}, "baxis": {"gridcolor": "white", "linecolor": "white", "ticks": ""}, "bgcolor": "#E5ECF6", "caxis": {"gridcolor": "white", "linecolor": "white", "ticks": ""}}, "title": {"x": 0.05}, "xaxis": {"automargin": true, "gridcolor": "white", "linecolor": "white", "ticks": "", "title": {"standoff": 15}, "zerolinecolor": "white", "zerolinewidth": 2}, "yaxis": {"automargin": true, "gridcolor": "white", "linecolor": "white", "ticks": "", "title": {"standoff": 15}, "zerolinecolor": "white", "zerolinewidth": 2}}}, "title": {"text": "Receiver Operating Characteristic (ROC) Curve", "x": 0.5, "xanchor": "center", "y": 0.92, "yanchor": "top"}, "xaxis": {"linecolor": "lightslategray", "linewidth": 1, "title": {"text": "False Positive Rate"}}, "yaxis": {"linecolor": "lightslategray", "linewidth": 1, "title": {"text": "True Positive Rate"}}},\n+                        {"responsive": true}\n+                    )\n+                };\n+                \n+            </script>\n+        </div>\n+</body>\n+</html>\n\\ No newline at end of file\n'
b
diff -r 000000000000 -r af2624d5ab32 test-data/ml_vis05.html
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/ml_vis05.html Sat May 01 01:24:32 2021 +0000
[
b'@@ -0,0 +1,31 @@\n+<html>\n+<head><meta charset="utf-8" /></head>\n+<body>\n+    <div>\n+        \n+                <script type="text/javascript">window.PlotlyConfig = {MathJaxConfig: \'local\'};</script>\n+        <script type="text/javascript">/**\n+* plotly.js v1.51.1\n+* Copyright 2012-2019, Plotly, Inc.\n+* All rights reserved.\n+* Licensed under the MIT license\n+*/\n+!function(t){if("object"==typeof exports&&"undefined"!=typeof module)module.exports=t();else if("function"==typeof define&&define.amd)define([],t);else{("undefined"!=typeof window?window:"undefined"!=typeof global?global:"undefined"!=typeof self?self:this).Plotly=t()}}(function(){return function(){return function t(e,r,n){function a(o,s){if(!r[o]){if(!e[o]){var l="function"==typeof require&&require;if(!s&&l)return l(o,!0);if(i)return i(o,!0);var c=new Error("Cannot find module \'"+o+"\'");throw c.code="MODULE_NOT_FOUND",c}var u=r[o]={exports:{}};e[o][0].call(u.exports,function(t){return a(e[o][1][t]||t)},u,u.exports,t,e,r,n)}return r[o].exports}for(var i="function"==typeof require&&require,o=0;o<n.length;o++)a(n[o]);return a}}()({1:[function(t,e,r){"use strict";var n=t("../src/lib"),a={"X,X div":"direction:ltr;font-family:\'Open Sans\', verdana, arial, sans-serif;margin:0;padding:0;","X input,X button":"font-family:\'Open Sans\', verdana, arial, sans-serif;","X input:focus,X button:focus":"outline:none;","X a":"text-decoration:none;","X a:hover":"text-decoration:none;","X .crisp":"shape-rendering:crispEdges;","X .user-select-none":"-webkit-user-select:none;-moz-user-select:none;-ms-user-select:none;-o-user-select:none;user-select:none;","X svg":"overflow:hidden;","X svg a":"fill:#447adb;","X svg a:hover":"fill:#3c6dc5;","X .main-svg":"position:absolute;top:0;left:0;pointer-events:none;","X .main-svg .draglayer":"pointer-events:all;","X .cursor-default":"cursor:default;","X .cursor-pointer":"cursor:pointer;","X .cursor-crosshair":"cursor:crosshair;","X .cursor-move":"cursor:move;","X .cursor-col-resize":"cursor:col-resize;","X .cursor-row-resize":"cursor:row-resize;","X .cursor-ns-resize":"cursor:ns-resize;","X .cursor-ew-resize":"cursor:ew-resize;","X .cursor-sw-resize":"cursor:sw-resize;","X .cursor-s-resize":"cursor:s-resize;","X .cursor-se-resize":"cursor:se-resize;","X .cursor-w-resize":"cursor:w-resize;","X .cursor-e-resize":"cursor:e-resize;","X .cursor-nw-resize":"cursor:nw-resize;","X .cursor-n-resize":"cursor:n-resize;","X .cursor-ne-resize":"cursor:ne-resize;","X .cursor-grab":"cursor:-webkit-grab;cursor:grab;","X .modebar":"position:absolute;top:2px;right:2px;","X .ease-bg":"-webkit-transition:background-color 0.3s ease 0s;-moz-transition:background-color 0.3s ease 0s;-ms-transition:background-color 0.3s ease 0s;-o-transition:background-color 0.3s ease 0s;transition:background-color 0.3s ease 0s;","X .modebar--hover>:not(.watermark)":"opacity:0;-webkit-transition:opacity 0.3s ease 0s;-moz-transition:opacity 0.3s ease 0s;-ms-transition:opacity 0.3s ease 0s;-o-transition:opacity 0.3s ease 0s;transition:opacity 0.3s ease 0s;","X:hover .modebar--hover .modebar-group":"opacity:1;","X .modebar-group":"float:left;display:inline-block;box-sizing:border-box;padding-left:8px;position:relative;vertical-align:middle;white-space:nowrap;","X .modebar-btn":"position:relative;font-size:16px;padding:3px 4px;height:22px;cursor:pointer;line-height:normal;box-sizing:border-box;","X .modebar-btn svg":"position:relative;top:2px;","X .modebar.vertical":"display:flex;flex-direction:column;flex-wrap:wrap;align-content:flex-end;max-height:100%;","X .modebar.vertical svg":"top:-1px;","X .modebar.vertical .modebar-group":"display:block;float:none;padding-left:0px;padding-bottom:8px;","X .modebar.vertical .modebar-group .modebar-btn":"display:block;text-align:center;","X [data-title]:before,X [data-title]:after":"position:absolute;-webkit-transform:translate3d(0, 0, 0);-moz-transform:translate3d(0, 0, 0);-ms-transform:translate3d(0, 0, 0);-o-transform:translate3d(0, 0, 0);transform:translate3d'..b': {"colorbar": {"outlinewidth": 0, "ticks": ""}}, "type": "scatterpolar"}], "scatterpolargl": [{"marker": {"colorbar": {"outlinewidth": 0, "ticks": ""}}, "type": "scatterpolargl"}], "scatterternary": [{"marker": {"colorbar": {"outlinewidth": 0, "ticks": ""}}, "type": "scatterternary"}], "surface": [{"colorbar": {"outlinewidth": 0, "ticks": ""}, "colorscale": [[0.0, "#0d0887"], [0.1111111111111111, "#46039f"], [0.2222222222222222, "#7201a8"], [0.3333333333333333, "#9c179e"], [0.4444444444444444, "#bd3786"], [0.5555555555555556, "#d8576b"], [0.6666666666666666, "#ed7953"], [0.7777777777777778, "#fb9f3a"], [0.8888888888888888, "#fdca26"], [1.0, "#f0f921"]], "type": "surface"}], "table": [{"cells": {"fill": {"color": "#EBF0F8"}, "line": {"color": "white"}}, "header": {"fill": {"color": "#C8D4E3"}, "line": {"color": "white"}}, "type": "table"}]}, "layout": {"annotationdefaults": {"arrowcolor": "#2a3f5f", "arrowhead": 0, "arrowwidth": 1}, "coloraxis": {"colorbar": {"outlinewidth": 0, "ticks": ""}}, "colorscale": {"diverging": [[0, "#8e0152"], [0.1, "#c51b7d"], [0.2, "#de77ae"], [0.3, "#f1b6da"], [0.4, "#fde0ef"], [0.5, "#f7f7f7"], [0.6, "#e6f5d0"], [0.7, "#b8e186"], [0.8, "#7fbc41"], [0.9, "#4d9221"], [1, "#276419"]], "sequential": [[0.0, "#0d0887"], [0.1111111111111111, "#46039f"], [0.2222222222222222, "#7201a8"], [0.3333333333333333, "#9c179e"], [0.4444444444444444, "#bd3786"], [0.5555555555555556, "#d8576b"], [0.6666666666666666, "#ed7953"], [0.7777777777777778, "#fb9f3a"], [0.8888888888888888, "#fdca26"], [1.0, "#f0f921"]], "sequentialminus": [[0.0, "#0d0887"], [0.1111111111111111, "#46039f"], [0.2222222222222222, "#7201a8"], [0.3333333333333333, "#9c179e"], [0.4444444444444444, "#bd3786"], [0.5555555555555556, "#d8576b"], [0.6666666666666666, "#ed7953"], [0.7777777777777778, "#fb9f3a"], [0.8888888888888888, "#fdca26"], [1.0, "#f0f921"]]}, "colorway": ["#636efa", "#EF553B", "#00cc96", "#ab63fa", "#FFA15A", "#19d3f3", "#FF6692", "#B6E880", "#FF97FF", "#FECB52"], "font": {"color": "#2a3f5f"}, "geo": {"bgcolor": "white", "lakecolor": "white", "landcolor": "#E5ECF6", "showlakes": true, "showland": true, "subunitcolor": "white"}, "hoverlabel": {"align": "left"}, "hovermode": "closest", "mapbox": {"style": "light"}, "paper_bgcolor": "white", "plot_bgcolor": "#E5ECF6", "polar": {"angularaxis": {"gridcolor": "white", "linecolor": "white", "ticks": ""}, "bgcolor": "#E5ECF6", "radialaxis": {"gridcolor": "white", "linecolor": "white", "ticks": ""}}, "scene": {"xaxis": {"backgroundcolor": "#E5ECF6", "gridcolor": "white", "gridwidth": 2, "linecolor": "white", "showbackground": true, "ticks": "", "zerolinecolor": "white"}, "yaxis": {"backgroundcolor": "#E5ECF6", "gridcolor": "white", "gridwidth": 2, "linecolor": "white", "showbackground": true, "ticks": "", "zerolinecolor": "white"}, "zaxis": {"backgroundcolor": "#E5ECF6", "gridcolor": "white", "gridwidth": 2, "linecolor": "white", "showbackground": true, "ticks": "", "zerolinecolor": "white"}}, "shapedefaults": {"line": {"color": "#2a3f5f"}}, "ternary": {"aaxis": {"gridcolor": "white", "linecolor": "white", "ticks": ""}, "baxis": {"gridcolor": "white", "linecolor": "white", "ticks": ""}, "bgcolor": "#E5ECF6", "caxis": {"gridcolor": "white", "linecolor": "white", "ticks": ""}}, "title": {"x": 0.05}, "xaxis": {"automargin": true, "gridcolor": "white", "linecolor": "white", "ticks": "", "title": {"standoff": 15}, "zerolinecolor": "white", "zerolinewidth": 2}, "yaxis": {"automargin": true, "gridcolor": "white", "linecolor": "white", "ticks": "", "title": {"standoff": 15}, "zerolinecolor": "white", "zerolinewidth": 2}}}, "title": {"x": 0.5, "xanchor": "center", "y": 0.92, "yanchor": "top"}, "xaxis": {"title": {"text": "Number of features selected"}}, "yaxis": {"title": {"text": "Cross validation score"}}},\n+                        {"responsive": true}\n+                    )\n+                };\n+                \n+            </script>\n+        </div>\n+</body>\n+</html>\n\\ No newline at end of file\n'
b
diff -r 000000000000 -r af2624d5ab32 test-data/ml_vis05.png
b
Binary file test-data/ml_vis05.png has changed
b
diff -r 000000000000 -r af2624d5ab32 test-data/model_fit01
b
Binary file test-data/model_fit01 has changed
b
diff -r 000000000000 -r af2624d5ab32 test-data/model_fit02
b
Binary file test-data/model_fit02 has changed
b
diff -r 000000000000 -r af2624d5ab32 test-data/model_fit02.h5
b
Binary file test-data/model_fit02.h5 has changed
b
diff -r 000000000000 -r af2624d5ab32 test-data/model_pred01.tabular
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/model_pred01.tabular Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,262 @@
+Predicted
+71.129364
+60.96111
+77.885765
+57.212738
+51.806957
+52.089592
+51.571884
+80.762184
+36.772987
+41.643093
+46.386948
+77.97063
+72.768776
+40.0386
+79.81385
+74.40216
+52.089592
+75.51107
+55.705868
+39.944202
+49.643826
+59.17941
+69.848915
+64.62096
+48.310116
+43.391766
+68.25893
+60.198105
+65.16974
+72.130005
+56.351482
+53.20132
+56.86578
+54.342987
+43.521133
+59.663773
+66.097626
+51.960022
+41.559486
+45.16049
+66.40008
+71.488754
+45.16049
+63.34996
+69.83631
+55.652687
+61.311596
+71.85501
+75.12588
+54.93247
+70.09855
+74.20223
+57.898273
+55.23022
+75.70524
+66.94729
+65.12762
+59.3189
+61.22922
+61.2382
+54.017147
+51.633373
+51.633373
+65.16974
+65.16873
+57.874527
+59.740753
+43.990814
+66.06423
+64.436615
+41.245773
+63.278465
+63.27533
+71.13793
+65.47819
+72.620995
+62.598015
+36.986706
+73.2002
+71.966644
+72.912926
+75.46711
+55.12616
+46.19641
+87.20736
+72.11753
+57.952766
+84.67858
+69.21688
+64.257095
+43.59384
+44.723145
+67.051605
+50.021965
+69.202095
+75.10072
+70.80699
+83.08025
+69.62026
+42.441116
+64.38655
+59.430386
+69.366035
+73.87479
+59.973484
+75.76153
+56.195892
+71.16636
+60.419106
+61.630756
+51.81593
+54.924137
+60.73048
+78.496635
+77.921555
+73.66453
+60.904953
+71.26717
+72.01454
+53.52841
+46.66952
+54.504898
+56.28563
+59.398067
+72.71433
+51.745968
+67.80466
+51.571823
+52.010742
+54.19355
+74.193825
+64.57627
+67.48214
+68.41867
+82.102806
+55.8638
+76.90198
+62.577324
+73.70229
+78.93923
+73.51925
+54.81887
+65.2422
+59.700085
+84.08965
+64.35592
+54.001873
+41.397793
+64.64837
+62.784557
+42.990005
+45.430832
+52.089592
+60.374348
+51.67288
+62.4257
+79.536285
+76.4169
+55.978775
+74.43581
+76.89248
+65.3203
+72.10233
+59.23278
+51.736633
+73.13266
+59.45746
+73.0939
+70.58273
+53.08009
+49.893116
+73.89228
+52.64392
+54.801548
+63.534626
+68.1002
+63.70472
+63.8851
+63.268097
+62.438057
+61.989746
+71.47914
+73.92875
+48.089043
+54.874943
+50.261494
+69.11724
+57.448387
+50.528027
+58.67657
+73.969376
+53.745205
+74.81751
+85.582954
+75.10767
+48.855537
+70.66616
+41.341694
+48.55276
+63.48302
+73.02358
+69.50546
+55.603634
+74.26824
+76.03213
+62.601646
+81.99045
+59.26651
+44.504597
+53.54178
+55.247334
+82.123795
+51.84111
+66.27524
+66.23033
+58.565033
+67.452
+72.54107
+49.840427
+70.26608
+62.447872
+67.045
+42.600086
+64.88309
+55.31232
+39.07865
+71.81975
+59.447086
+53.20132
+75.12621
+72.9902
+53.1043
+72.42816
+72.10233
+55.836628
+53.2467
+74.670074
+74.5721
+54.103737
+49.212822
+67.238785
+60.09495
+74.5011
+63.0043
+67.7362
+53.029213
+74.860016
+78.597946
+75.369064
+60.000134
+68.83947
+40.24504
+81.21449
+61.465557
+42.74572
+52.089592
+73.162025
+52.033802
+79.690926
+62.542553
+59.557045
b
diff -r 000000000000 -r af2624d5ab32 test-data/model_pred02.tabular
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/model_pred02.tabular Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,262 @@
+Predicted
+71.08584
+61.23427
+75.80197
+66.19323
+52.35754
+52.987312
+51.777576
+75.66966
+49.61427
+51.20531
+49.255173
+76.143936
+74.00767
+50.80104
+72.37281
+68.69481
+52.816956
+76.27541
+57.82054
+49.72029
+52.400383
+57.968666
+61.28138
+58.4683
+53.114418
+50.45093
+67.63649
+60.31344
+66.52325
+72.48887
+58.755577
+53.332912
+55.175415
+53.437675
+50.452156
+61.153603
+66.69711
+51.1279
+51.37375
+50.732525
+67.677734
+74.2334
+51.287792
+70.154366
+68.460396
+58.35005
+59.828957
+74.98557
+73.3624
+54.043793
+73.04924
+77.22285
+59.452316
+56.143288
+74.41183
+60.254143
+67.18662
+63.53044
+60.43683
+60.07025
+57.257767
+52.143753
+52.872334
+67.748436
+63.986977
+55.532387
+59.70022
+49.43772
+65.30266
+67.30055
+49.907486
+57.864845
+56.207542
+70.46542
+55.503044
+73.822784
+63.741142
+49.693428
+71.36254
+71.87617
+72.02608
+65.63652
+54.059746
+51.300495
+76.06125
+73.98534
+63.071587
+75.93381
+69.479454
+63.85415
+51.218174
+49.468956
+68.23912
+50.83457
+70.77809
+72.129776
+74.53812
+68.9107
+72.47451
+50.62992
+62.99655
+56.105698
+72.927025
+65.86492
+58.282486
+75.063446
+54.558403
+65.59456
+57.257263
+58.336494
+51.988983
+57.355415
+56.631332
+62.632957
+76.11209
+76.99285
+65.670746
+74.464355
+68.042145
+54.761986
+51.070145
+56.55138
+55.53712
+57.753426
+75.02803
+57.397556
+71.05187
+51.134808
+53.119152
+52.581924
+70.8574
+66.85955
+67.29634
+66.589584
+76.06389
+54.559666
+60.37111
+63.455887
+72.6416
+75.51883
+63.990837
+53.491386
+59.82952
+60.56826
+76.53373
+66.729385
+52.592728
+48.729107
+68.03414
+56.391117
+50.800247
+50.053703
+52.03207
+55.326523
+52.58854
+60.38707
+75.923096
+75.2882
+54.893684
+78.00183
+76.06732
+60.791916
+70.38205
+60.582397
+53.582005
+77.20325
+54.903778
+68.63178
+70.27207
+54.5502
+53.928703
+74.93919
+52.267735
+51.70433
+59.89312
+74.00166
+66.61868
+70.04806
+55.62455
+65.638214
+55.330837
+65.8484
+65.45604
+50.942883
+56.04741
+52.147808
+69.9472
+52.90547
+51.568893
+57.65322
+76.28175
+53.421043
+73.63155
+77.357666
+77.49912
+51.669907
+67.80663
+49.745773
+52.792336
+62.308838
+76.21391
+70.10635
+53.58763
+76.36336
+75.63791
+66.51898
+59.851395
+53.114918
+50.095005
+54.76951
+58.387985
+76.39301
+53.754196
+66.004395
+59.4105
+53.724583
+63.857407
+70.29119
+50.46862
+58.864563
+61.946457
+70.4472
+50.738815
+65.65154
+52.600437
+49.42977
+70.38036
+56.012196
+53.824024
+71.119225
+75.3495
+49.078987
+74.36192
+71.18959
+54.9702
+54.477818
+72.231705
+68.62958
+52.298077
+52.34682
+70.110405
+60.08683
+74.98835
+55.85307
+66.53965
+53.608902
+67.770744
+66.93648
+68.07121
+59.94021
+58.784706
+50.237366
+77.0887
+65.06997
+50.1484
+51.08928
+74.907234
+56.82161
+62.303955
+62.67704
+61.49601
b
diff -r 000000000000 -r af2624d5ab32 test-data/moons.txt
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/moons.txt Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,101 @@
+0 1 0
+-0.71834935009773 0.69568255060349 0
+0.28165064990227 -0.19568255060349 1
+-0.09602302590768 0.99537911294920 0
+-0.87131870412339 0.49071755200394 0
+-0.62348980185873 0.78183148246803 0
+-0.03205157757165 0.99948621620069 0
+-0.67230089026132 0.74027799707532 0
+-0.51839256831052 0.85514276300535 0
+1.15959989503338 -0.48718178341445 1
+0.22252093395631 0.97492791218182 0
+1.62348980185873 -0.28183148246803 1
+0.00000000000000 0.50000000000000 1
+1.90096886790242 0.06611626088244 1
+0.65463494557869 -0.43846842204976 1
+1.98155915699107 0.30884137129863 1
+1.22252093395631 -0.47492791218182 1
+0.84040010496662 -0.48718178341445 1
+0.94905574701067 0.31510821802362 0
+-0.83808810489184 0.54553490121055 0
+0.76144595836913 0.64822839530779 0
+-0.28452758663103 0.95866785303666 0
+1.51839256831052 -0.35514276300535 1
+1.46253829024084 -0.38659930637300 1
+-0.92691675734602 0.37526700487937 0
+-0.22252093395631 0.97492791218182 0
+0.15959989503338 0.98718178341445 0
+0.09602302590768 0.99537911294920 0
+1.83808810489184 -0.04553490121055 1
+1.67230089026132 -0.24027799707532 1
+0.28452758663103 0.95866785303666 0
+-0.46253829024084 0.88659930637300 0
+0.92691675734602 0.37526700487937 0
+0.98155915699107 0.19115862870137 0
+0.71834935009773 0.69568255060349 0
+0.00205460724966 0.43592978001929 1
+1.57211666012217 -0.32017225459696 1
+0.90397697409232 -0.49537911294920 1
+0.37651019814127 -0.28183148246803 1
+0.03270513696097 0.24634541609049 1
+0.59521665687761 -0.41441262301581 1
+1.03205157757165 -0.49948621620069 1
+0.96794842242834 -0.49948621620069 1
+0.09903113209758 0.06611626088244 1
+-0.90096886790242 0.43388373911756 0
+1.09602302590768 -0.49537911294920 1
+2.00000000000000 0.50000000000000 1
+0.46253829024084 0.88659930637300 0
+1.92691675734602 0.12473299512063 1
+0.32769910973868 -0.24027799707532 1
+1.00000000000000 0.00000000000000 0
+0.40478334312239 0.91441262301581 0
+-0.94905574701067 0.31510821802362 0
+-0.76144595836913 0.64822839530779 0
+-0.96729486303903 0.25365458390951 0
+-0.80141362186796 0.59811053049122 0
+0.71547241336897 -0.45866785303666 1
+1.94905574701067 0.18489178197638 1
+-0.34536505442131 0.93846842204976 0
+0.77747906604369 -0.47492791218182 1
+-0.98155915699107 0.19115862870137 0
+0.07308324265398 0.12473299512063 1
+1.34536505442131 -0.43846842204976 1
+1.87131870412339 0.00928244799606 1
+1.76144595836913 -0.14822839530779 1
+0.34536505442131 0.93846842204976 0
+1.40478334312239 -0.41441262301581 1
+0.05094425298933 0.18489178197638 1
+1.80141362186796 -0.09811053049122 1
+0.23855404163087 -0.14822839530779 1
+0.42788333987783 -0.32017225459696 1
+-0.99179001382325 0.12787716168451 0
+-1.00000000000000 0.00000000000000 0
+0.90096886790242 0.43388373911756 0
+1.99179001382325 0.37212283831549 1
+0.16191189510816 -0.04553490121055 1
+1.96729486303903 0.24634541609049 1
+0.67230089026132 0.74027799707532 0
+0.99794539275034 0.06407021998071 0
+-0.40478334312239 0.91441262301581 0
+0.96729486303903 0.25365458390951 0
+-0.15959989503338 0.98718178341445 0
+0.99179001382325 0.12787716168451 0
+0.83808810489184 0.54553490121055 0
+0.51839256831053 0.85514276300535 0
+0.12868129587661 0.00928244799606 1
+0.19858637813204 -0.09811053049122 1
+0.57211666012217 0.82017225459696 0
+1.99794539275034 0.43592978001929 1
+-0.57211666012217 0.82017225459696 0
+0.62348980185873 0.78183148246803 0
+0.48160743168947 -0.35514276300535 1
+0.01844084300893 0.30884137129863 1
+0.00820998617675 0.37212283831549 1
+0.80141362186796 0.59811053049122 0
+-0.99794539275034 0.06407021998071 0
+1.28452758663103 -0.45866785303666 1
+1.71834935009773 -0.19568255060349 1
+0.53746170975916 -0.38659930637300 1
+0.03205157757166 0.99948621620069 0
+0.87131870412339 0.49071755200394 0
b
diff -r 000000000000 -r af2624d5ab32 test-data/mv_result02.tabular
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/mv_result02.tabular Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,11 @@
+Predicted
+1.578912095858962
+-1.199072894940544
+-0.7173258906076226
+0.3255908318822695
+0.21919344304093213
+-0.6841926371423699
+1.1144698671662865
+0.19379531649046616
+0.9405094785593062
+1.2581284896870837
b
diff -r 000000000000 -r af2624d5ab32 test-data/mv_result03.tabular
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/mv_result03.tabular Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,6 @@
+train_sizes_abs mean_train_scores std_train_scores mean_test_scores std_test_scores
+17 0.9668700841937653 0.00277836829836518 0.7008862995946905 0.03857541198731935
+56 0.9730008602419361 0.006839342612121988 0.7963376762427242 0.004846330083938778
+95 0.9728783377589098 0.0037790183626530663 0.814592845745573 0.020457691766770824
+134 0.9739086338111185 0.001627343246847077 0.7985540571195479 0.03954641079310707
+174 0.9726218628287785 0.0032867750457225182 0.8152971572131146 0.04280261115004303
b
diff -r 000000000000 -r af2624d5ab32 test-data/mv_result05.tabular
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/mv_result05.tabular Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,262 @@
+Predicted
+70.16
+62.06
+83.04
+62.84
+48.63
+51.25
+54.98
+80.3
+42.84
+41.52
+43.83
+73.15
+74.22
+42.88
+74.93
+72.9
+53.74
+78.86
+59.0
+40.28
+54.52
+58.34
+62.74
+62.35
+49.15
+41.92
+65.59
+59.91
+66.49
+72.08
+60.44
+53.84
+54.82
+52.66
+42.37
+61.3
+63.14
+50.62
+42.75
+47.39
+67.8
+73.58
+49.97
+67.04
+67.45
+54.67
+64.87
+77.23
+73.52
+53.55
+70.53
+77.98
+61.99
+53.08
+78.12
+66.55
+63.95
+60.57
+61.6
+60.37
+55.29
+54.31
+52.54
+65.31
+61.51
+57.3
+60.02
+43.64
+74.78
+68.26
+42.72
+61.26
+61.25
+71.58
+61.03
+70.53
+70.25
+43.4
+71.39
+72.31
+72.7
+72.11
+53.55
+43.4
+80.6
+73.72
+58.86
+76.71
+68.36
+60.26
+48.56
+38.96
+69.67
+52.9
+67.63
+75.12
+70.92
+70.89
+67.05
+43.89
+59.94
+62.98
+71.1
+79.22
+77.31
+79.06
+61.11
+66.32
+54.7
+61.1
+54.59
+58.7
+59.6
+73.79
+72.69
+81.83
+61.08
+69.21
+74.8
+54.37
+50.85
+53.07
+58.53
+55.44
+72.62
+54.14
+68.12
+48.81
+50.11
+56.06
+73.63
+63.29
+71.0
+74.87
+81.24
+54.67
+66.96
+61.37
+74.84
+76.71
+69.27
+56.53
+71.91
+58.74
+77.83
+64.57
+51.93
+42.84
+64.11
+59.47
+42.46
+43.79
+51.75
+63.98
+54.71
+64.95
+79.72
+72.12
+60.66
+79.3
+71.26
+59.9
+74.25
+59.68
+52.37
+78.52
+58.52
+71.98
+71.77
+54.48
+48.96
+81.42
+54.08
+53.52
+64.38
+70.79
+63.95
+67.48
+61.76
+66.15
+62.1
+75.68
+69.72
+43.8
+56.27
+53.38
+81.31
+57.54
+48.15
+59.47
+78.01
+56.39
+72.33
+78.8
+78.66
+52.01
+66.68
+48.56
+47.75
+65.67
+77.93
+72.68
+58.0
+77.83
+73.37
+65.39
+69.79
+55.98
+46.35
+54.31
+55.58
+79.69
+52.76
+62.62
+66.54
+60.29
+62.57
+74.86
+48.05
+65.09
+65.02
+67.84
+41.86
+62.28
+57.05
+43.68
+72.0
+63.04
+54.41
+73.37
+75.11
+42.65
+73.16
+71.68
+58.61
+53.54
+73.33
+72.16
+49.96
+54.78
+64.24
+60.13
+76.46
+61.53
+68.36
+53.1
+71.33
+76.12
+70.86
+61.35
+67.12
+43.25
+80.2
+71.16
+58.63
+52.37
+74.93
+53.34
+76.41
+63.87
+59.97
b
diff -r 000000000000 -r af2624d5ab32 test-data/named_steps.txt
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/named_steps.txt Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,6 @@
+{'preprocessing_1': SelectKBest(k=10, score_func=<function f_regression at 0x11b4ba8c8>), 'estimator': XGBRegressor(base_score=0.5, booster='gbtree', colsample_bylevel=1,
+             colsample_bytree=1, gamma=0, learning_rate=0.1, max_delta_step=0,
+             max_depth=3, min_child_weight=1, missing=nan, n_estimators=100,
+             n_jobs=1, nthread=None, objective='reg:linear', random_state=10,
+             reg_alpha=0, reg_lambda=1, scale_pos_weight=1, seed=None,
+             silent=True, subsample=1)}
\ No newline at end of file
b
diff -r 000000000000 -r af2624d5ab32 test-data/nn_model01
b
Binary file test-data/nn_model01 has changed
b
diff -r 000000000000 -r af2624d5ab32 test-data/nn_model02
b
Binary file test-data/nn_model02 has changed
b
diff -r 000000000000 -r af2624d5ab32 test-data/nn_model03
b
Binary file test-data/nn_model03 has changed
b
diff -r 000000000000 -r af2624d5ab32 test-data/nn_prediction_result01.tabular
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/nn_prediction_result01.tabular Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,49 @@
+0 58 56 -67 0
+0 44 64 -76 0
+0 51 48 -73 0
+0 58 65 -49 0
+0 43 61 -49 0
+0 45 43 -79 0
+0 42 60 -98 0
+0 50 55 -59 0
+0 53 53 -56 0
+0 45 44 -61 0
+0 43 65 -84 0
+0 35 52 -75 0
+0 56 56 -70 0
+1 -61 86 43 2
+1 -67 93 15 2
+1 -59 94 36 2
+1 -50 92 62 2
+1 -78 91 70 2
+1 -35 87 47 2
+1 -56 91 52 2
+1 -61 81 46 2
+1 -83 78 34 2
+1 -50 87 45 2
+1 -67 73 50 2
+1 -50 97 45 2
+1 -61 111 45 2
+2 -109 23 -92 1
+2 -94 20 -96 1
+2 -85 26 -88 1
+2 -90 33 -114 1
+2 -63 9 -106 1
+2 -79 9 -93 1
+2 -99 26 -108 1
+2 -81 19 -110 1
+2 -108 21 -108 1
+2 -92 27 -106 1
+2 -88 2 -106 1
+2 -88 15 -103 1
+3 54 -74 4 3
+3 42 -92 31 3
+3 39 -99 -7 3
+3 48 -115 -5 3
+3 39 -96 2 3
+3 31 -109 9 3
+3 33 -96 -8 3
+3 23 -102 4 3
+3 38 -90 21 3
+3 34 -107 1 3
+3 35 -78 18 3
b
diff -r 000000000000 -r af2624d5ab32 test-data/nn_prediction_result02.tabular
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/nn_prediction_result02.tabular Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,49 @@
+0 58 56 -67 0
+0 44 64 -76 0
+0 51 48 -73 0
+0 58 65 -49 0
+0 43 61 -49 0
+0 45 43 -79 0
+0 42 60 -98 0
+0 50 55 -59 0
+0 53 53 -56 0
+0 45 44 -61 0
+0 43 65 -84 0
+0 35 52 -75 0
+0 56 56 -70 0
+1 -61 86 43 2
+1 -67 93 15 2
+1 -59 94 36 2
+1 -50 92 62 2
+1 -78 91 70 2
+1 -35 87 47 2
+1 -56 91 52 2
+1 -61 81 46 2
+1 -83 78 34 2
+1 -50 87 45 2
+1 -67 73 50 2
+1 -50 97 45 2
+1 -61 111 45 2
+2 -109 23 -92 1
+2 -94 20 -96 1
+2 -85 26 -88 1
+2 -90 33 -114 1
+2 -63 9 -106 1
+2 -79 9 -93 1
+2 -99 26 -108 1
+2 -81 19 -110 1
+2 -108 21 -108 1
+2 -92 27 -106 1
+2 -88 2 -106 1
+2 -88 15 -103 1
+3 54 -74 4 3
+3 42 -92 31 3
+3 39 -99 -7 3
+3 48 -115 -5 3
+3 39 -96 2 3
+3 31 -109 9 3
+3 33 -96 -8 3
+3 23 -102 4 3
+3 38 -90 21 3
+3 34 -107 1 3
+3 35 -78 18 3
b
diff -r 000000000000 -r af2624d5ab32 test-data/nn_prediction_result03.tabular
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/nn_prediction_result03.tabular Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,49 @@
+0 58 56 -67 0
+0 44 64 -76 0
+0 51 48 -73 0
+0 58 65 -49 0
+0 43 61 -49 0
+0 45 43 -79 0
+0 42 60 -98 0
+0 50 55 -59 0
+0 53 53 -56 0
+0 45 44 -61 0
+0 43 65 -84 0
+0 35 52 -75 0
+0 56 56 -70 0
+1 -61 86 43 2
+1 -67 93 15 2
+1 -59 94 36 2
+1 -50 92 62 2
+1 -78 91 70 2
+1 -35 87 47 2
+1 -56 91 52 2
+1 -61 81 46 2
+1 -83 78 34 2
+1 -50 87 45 2
+1 -67 73 50 2
+1 -50 97 45 2
+1 -61 111 45 2
+2 -109 23 -92 1
+2 -94 20 -96 1
+2 -85 26 -88 1
+2 -90 33 -114 1
+2 -63 9 -106 1
+2 -79 9 -93 1
+2 -99 26 -108 1
+2 -81 19 -110 1
+2 -108 21 -108 1
+2 -92 27 -106 1
+2 -88 2 -106 1
+2 -88 15 -103 1
+3 54 -74 4 3
+3 42 -92 31 3
+3 39 -99 -7 3
+3 48 -115 -5 3
+3 39 -96 2 3
+3 31 -109 9 3
+3 33 -96 -8 3
+3 23 -102 4 3
+3 38 -90 21 3
+3 34 -107 1 3
+3 35 -78 18 3
b
diff -r 000000000000 -r af2624d5ab32 test-data/numeric_values.tabular
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/numeric_values.tabular Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,49 @@
+0 58 56 -67
+0 44 64 -76
+0 51 48 -73
+0 58 65 -49
+0 43 61 -49
+0 45 43 -79
+0 42 60 -98
+0 50 55 -59
+0 53 53 -56
+0 45 44 -61
+0 43 65 -84
+0 35 52 -75
+0 56 56 -70
+1 -61 86 43
+1 -67 93 15
+1 -59 94 36
+1 -50 92 62
+1 -78 91 70
+1 -35 87 47
+1 -56 91 52
+1 -61 81 46
+1 -83 78 34
+1 -50 87 45
+1 -67 73 50
+1 -50 97 45
+1 -61 111 45
+2 -109 23 -92
+2 -94 20 -96
+2 -85 26 -88
+2 -90 33 -114
+2 -63 9 -106
+2 -79 9 -93
+2 -99 26 -108
+2 -81 19 -110
+2 -108 21 -108
+2 -92 27 -106
+2 -88 2 -106
+2 -88 15 -103
+3 54 -74 4
+3 42 -92 31
+3 39 -99 -7
+3 48 -115 -5
+3 39 -96 2
+3 31 -109 9
+3 33 -96 -8
+3 23 -102 4
+3 38 -90 21
+3 34 -107 1
+3 35 -78 18
b
diff -r 000000000000 -r af2624d5ab32 test-data/ohe_in_w_header.tabular
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/ohe_in_w_header.tabular Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,9 @@
+Label
+0
+1
+2
+3
+3
+2
+1
+0
b
diff -r 000000000000 -r af2624d5ab32 test-data/ohe_in_wo_header.tabular
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/ohe_in_wo_header.tabular Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,8 @@
+0
+1
+2
+3
+3
+2
+1
+0
b
diff -r 000000000000 -r af2624d5ab32 test-data/ohe_out_4.tabular
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/ohe_out_4.tabular Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,8 @@
+1 0 0 0
+0 1 0 0
+0 0 1 0
+0 0 0 1
+0 0 0 1
+0 0 1 0
+0 1 0 0
+1 0 0 0
b
diff -r 000000000000 -r af2624d5ab32 test-data/ohe_out_5.tabular
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/ohe_out_5.tabular Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,8 @@
+1 0 0 0 0
+0 1 0 0 0
+0 0 1 0 0
+0 0 0 1 0
+0 0 0 1 0
+0 0 1 0 0
+0 1 0 0 0
+1 0 0 0 0
b
diff -r 000000000000 -r af2624d5ab32 test-data/pca_classical_header_names_output.dat
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/pca_classical_header_names_output.dat Sat May 01 01:24:32 2021 +0000
b
b'@@ -0,0 +1,300 @@\n+-3.9779\t1.8818\t2.6506\t-1.1628\t-0.0983\n+-2.3700\t0.6756\t-1.6186\t-1.4164\t1.0327\n+-3.0925\t2.4577\t-1.8726\t-0.4041\t1.5016\n+-4.0151\t2.3686\t0.3256\t-0.0685\t-0.2168\n+-4.7065\t3.1394\t2.3137\t0.1907\t-1.6303\n+-4.9613\t4.6941\t1.2908\t1.0298\t-0.8817\n+-4.8364\t5.2936\t0.8793\t1.3212\t0.0849\n+-5.0224\t2.7565\t4.6351\t-0.5397\t-1.3489\n+-2.9068\t1.6461\t-1.9066\t-0.5100\t-0.6820\n+-4.7713\t4.0171\t2.1659\t0.5434\t-1.4630\n+-2.7521\t-1.9309\t-0.9117\t-3.5728\t1.7161\n+-3.7407\t1.3845\t0.0141\t-1.5357\t1.3068\n+-4.3751\t1.0682\t2.3906\t-1.5298\t0.9064\n+-2.8532\t0.2430\t-0.8937\t-1.7428\t-1.1825\n+-2.4091\t-0.1393\t-0.0986\t-2.0343\t1.5498\n+-3.8699\t2.7278\t0.9261\t-0.4924\t0.7933\n+-4.6180\t3.2535\t2.4650\t-0.0468\t0.8258\n+-2.3362\t3.2961\t-2.7247\t0.9321\t-0.1294\n+-1.7741\t0.3277\t-3.2195\t-1.6272\t1.9721\n+-2.9188\t3.5196\t-1.5959\t0.6981\t0.3541\n+-4.0500\t2.5696\t1.0924\t-0.1582\t-1.7489\n+-5.3435\t6.4383\t1.7066\t1.9754\t0.9120\n+-4.1454\t6.1296\t-0.3939\t1.8899\t0.9309\n+-2.4452\t-0.6422\t0.2580\t-2.5687\t0.0322\n+-5.2047\t7.4266\t2.4849\t2.8623\t-1.3975\n+-2.5237\t-2.4427\t-1.5498\t-3.1847\t-1.6248\n+-1.7613\t1.6919\t-4.6466\t-0.5331\t1.3921\n+-1.1236\t2.5796\t-4.4469\t0.3771\t0.0692\n+-2.5892\t3.4039\t-1.3071\t0.2542\t0.1349\n+-3.5099\t-0.9352\t1.4462\t-2.5959\t-0.3994\n+-3.9546\t6.3431\t0.5939\t2.3052\t-0.2344\n+-2.0819\t-1.6617\t-1.0140\t-3.0790\t1.1571\n+-2.6320\t0.0703\t-0.0526\t-1.5092\t-1.8043\n+-1.8865\t0.2234\t-2.0628\t-1.9632\t2.3486\n+-4.5803\t3.1525\t2.6718\t-0.3104\t0.7210\n+-6.5473\t2.5731\t6.0056\t-0.6369\t-1.2620\n+-5.8360\t4.1304\t4.6222\t0.5425\t-0.1996\n+-1.1136\t3.4820\t-5.1375\t0.4626\t3.1966\n+-4.7698\t0.9659\t3.3912\t-1.9008\t-0.0558\n+-2.4391\t-0.7627\t-1.6123\t-1.9380\t-0.8111\n+-4.8859\t4.2069\t1.6117\t0.6831\t0.8832\n+-2.2466\t-1.0655\t-1.2645\t-2.4867\t-0.1625\n+-4.4644\t5.0000\t1.1159\t1.6869\t-0.4784\n+-3.7358\t0.9995\t1.6597\t-1.3727\t-1.7457\n+-5.8271\t3.6580\t5.8537\t-0.0174\t-0.9750\n+-4.7392\t4.2895\t2.1369\t0.6907\t-0.6801\n+-1.5432\t2.7403\t-5.0837\t0.0664\t2.6107\n+-5.7249\t7.7343\t3.2678\t3.5103\t-2.0555\n+-4.2566\t5.3778\t1.2450\t1.4052\t0.2429\n+-5.7932\t5.3928\t4.6237\t1.0285\t1.0814\n+-2.9574\t-1.1660\t1.2251\t-2.5803\t-0.5026\n+-2.0365\t4.7362\t-3.8569\t1.8582\t-0.6083\n+-5.1883\t6.2608\t1.6921\t2.1737\t0.9110\n+-5.5934\t1.2903\t5.3088\t-1.4372\t0.2000\n+-1.4178\t0.5340\t-3.0765\t-1.4210\t1.9659\n+-5.1568\t4.3100\t2.6279\t0.8400\t-0.4656\n+-4.2551\t3.3395\t1.2265\t-0.0344\t-0.0296\n+-6.4636\t3.6525\t5.4351\t-0.1493\t1.1392\n+-4.0271\t-0.6214\t2.0667\t-2.5704\t0.5389\n+-3.2885\t2.2421\t0.4406\t-0.5508\t0.4760\n+-3.2320\t3.1264\t0.1610\t0.0045\t-0.3199\n+-2.6003\t5.2398\t-2.1366\t1.6829\t0.7428\n+-4.3207\t1.7506\t1.6012\t-0.9072\t-1.5917\n+-1.9287\t2.7030\t-3.8706\t0.1751\t0.9751\n+-4.6549\t5.5519\t2.1315\t1.7555\t-0.4025\n+-2.4743\t1.5111\t-1.6381\t-0.8537\t-0.4237\n+-1.2837\t3.5483\t-5.9098\t0.8155\t0.5023\n+-3.9514\t5.4703\t0.2135\t1.6665\t0.0226\n+-3.1575\t3.1697\t-2.0242\t-0.1906\t2.4084\n+-6.7971\t3.1578\t6.8243\t-0.5140\t-0.4121\n+-5.9999\t3.1135\t6.0259\t0.1711\t-2.0321\n+-2.3450\t1.9814\t-1.1103\t-0.7338\t0.6581\n+-1.5478\t0.3095\t-3.1375\t-1.9311\t2.3145\n+-3.6067\t1.2237\t-0.4271\t-1.2399\t-0.0987\n+-3.0574\t-0.0303\t-1.0815\t-1.5251\t-1.7385\n+-3.7608\t4.9627\t0.5748\t1.3373\t1.6977\n+-3.3834\t2.2529\t-1.4015\t-0.3531\t-0.8381\n+-5.3297\t2.0845\t4.0157\t-1.0934\t0.1069\n+-4.6415\t5.6565\t1.0886\t1.6713\t-0.3536\n+-4.7611\t4.6882\t1.0939\t0.9883\t1.7929\n+-1.7499\t1.7738\t-2.6457\t-0.0629\t-0.5751\n+-5.1579\t7.5589\t1.1299\t3.1680\t-0.8202\n+-3.4019\t-1.4226\t0.3991\t-2.5729\t-0.9099\n+-1.6689\t1.3580\t-3.7300\t-0.7291\t1.5630\n+-5.5132\t6.6256\t3.6086\t1.9423\t0.3727\n+-4.4010\t7.0180\t1.1796\t2.6417\t0.9847\n+-2.1174\t3.1273\t-2.6107\t-0.2004\t2.3541\n+-2.2818\t-0.7861\t-1.5672\t-1.8685\t-1.2308\n+-4.2055\t4.8158\t-0.1348\t1.2570\t-0.2039\n+-2.2741\t1.1907\t-1.5868\t-1.0998\t-0.5999\n+-3.0433\t3.1513\t-1.8017\t0.1704\t0.3636\n+-5.3872\t1.7330\t5.6772\t-1.1538\t-0.2345\n+-3.5773\t2.5712\t-0.8771\t0.2747\t-1.2405\n+-3.0843\t1.4711\t-0.1928\t-1.2214\t1.2785\n+-1.9572\t3.5730\t-4.2197\t0.3158\t2.0016\n+-2.3444\t4.7106\t-3.7159\t1.0094\t1.7919\n+-3.4024\t1.1605\t0.5845\t-1.1358\t-0.6689\n+-3.2321\t4.3272\t-1.2592\t1.0365\t-0.4073\n+-5.0553\t5.8588\t3.0041\t1.9760\t-0.7261\n+-3.6706\t1.0101\t1.8198\t-1.8471\t1.1714\n+8.9574\t3.4341\t-0.6861\t1.5391\t0.1971\n+10.4081\t3.3686\t0.6688\t0.9791\t2.2503\n+8.8871\t0.8254\t3.9087\t-0.9576\t1.5038\n+2.168'..b'435\t-2.7504\t0.4447\n+0.0868\t-0.4073\t-1.2184\t-1.6489\t-2.2725\n+4.9808\t-1.5720\t2.3203\t-2.7613\t1.7252\n+-2.5810\t-5.5079\t0.0561\t2.7450\t-1.9096\n+0.0370\t-2.1438\t-0.4775\t-3.6280\t1.6759\n+1.0997\t-3.5629\t1.4259\t1.7291\t-1.2305\n+-1.3638\t-2.9435\t-0.1034\t-1.4507\t-0.9311\n+-0.7078\t-4.3400\t-1.1441\t0.4367\t-1.7198\n+-0.5174\t-3.5645\t0.2025\t0.4664\t0.8917\n+-2.6934\t-6.0138\t-1.9617\t2.8409\t-1.2291\n+-0.9761\t-2.4423\t-1.2032\t-2.4114\t-2.0578\n+0.4854\t-5.7924\t0.7961\t2.7641\t0.1446\n+0.2325\t-3.0261\t-0.5759\t-1.7790\t0.5984\n+1.4671\t-1.9240\t0.8327\t-1.8960\t-0.4687\n+-2.5020\t-5.9264\t-1.0911\t2.3968\t0.3954\n+0.5726\t-3.6055\t1.0491\t-0.0200\t1.7178\n+1.2348\t-2.2932\t1.4641\t-1.1600\t0.3434\n+1.5601\t-1.7994\t0.7362\t-1.6966\t0.7891\n+-1.2684\t-4.2416\t-1.5761\t0.0715\t-1.2373\n+-1.2743\t-1.4687\t0.5370\t-2.5136\t1.0946\n+-0.1360\t-4.4661\t-0.0391\t0.5382\t2.6005\n+-0.3517\t-4.6240\t-0.4723\t1.6165\t-2.1852\n+1.4760\t-2.5560\t0.2351\t-0.9202\t0.4155\n+-1.7783\t-5.3517\t0.7277\t3.2291\t1.2118\n+1.6356\t-3.7406\t0.1185\t1.1998\t-2.1563\n+-1.5925\t-4.0036\t-0.4063\t-0.3156\t1.4120\n+0.5318\t-6.1624\t1.3086\t4.4905\t-1.6720\n+-2.1599\t-2.1987\t-1.7868\t-2.7058\t-0.8553\n+-0.1707\t-3.3282\t0.2381\t-0.0381\t0.8412\n+0.2620\t-2.7657\t-0.3002\t-2.0651\t-1.3784\n+-1.4631\t-2.2472\t-0.7439\t-2.3037\t1.4133\n+-0.6767\t-2.6672\t0.4721\t-0.4983\t-0.3085\n+0.8239\t-2.9922\t0.8695\t1.0520\t-1.3905\n+-2.8612\t-4.9086\t-0.2330\t1.8419\t-2.0691\n+0.9513\t-4.1058\t-0.0948\t0.4003\t0.9779\n+-0.7449\t-2.5754\t0.1638\t-0.3664\t-1.0395\n+-2.8790\t-6.2301\t-1.0229\t3.2147\t-1.2238\n+0.4482\t-6.0936\t0.7186\t2.6087\t1.1673\n+-0.8581\t-4.9450\t1.2239\t2.8168\t1.9155\n+-1.9585\t-5.5486\t0.1892\t2.2741\t0.9021\n+-0.7563\t-3.0605\t0.3737\t-1.2120\t-0.5686\n+-1.2176\t-5.9017\t-0.7336\t3.0326\t-1.7117\n+-0.3342\t-6.3764\t-0.2172\t4.3070\t-1.8445\n+-1.8562\t-1.9825\t0.1295\t-1.7173\t1.1371\n+-0.2782\t-4.6277\t1.0273\t1.3582\t1.5796\n+-2.8011\t-3.9132\t-0.0397\t-0.1702\t0.0073\n+0.4691\t-5.7114\t1.0759\t2.0051\t2.1326\n+-0.3390\t-3.6590\t-0.0755\t-0.2534\t1.4111\n+-1.6579\t-2.2403\t0.1202\t-2.8767\t1.5805\n+-1.9985\t-1.4155\t-0.0361\t-2.4219\t1.1876\n+-0.7262\t-2.5969\t-0.9169\t-1.8642\t-1.8831\n+0.2857\t-6.9537\t-0.1326\t4.3486\t-0.2990\n+0.3086\t-4.2096\t1.1225\t0.8869\t2.5561\n+-1.0663\t-4.8302\t-0.2336\t1.1157\t1.8525\n+1.8021\t-2.9988\t0.2358\t-0.4046\t0.6294\n+-0.6291\t-4.2600\t-0.6726\t1.3688\t-0.7486\n+-0.3361\t-2.0217\t-0.1530\t-1.7010\t-2.3760\n+1.2746\t-3.1963\t1.5362\t-0.5552\t0.5358\n+1.3034\t-4.6254\t1.0466\t0.2558\t2.0886\n+-1.8983\t-4.7895\t-0.5294\t2.0973\t-1.6399\n+0.4626\t-5.0153\t1.1064\t3.2728\t-0.2280\n+0.7110\t-3.3192\t-0.2483\t-0.9576\t0.0827\n+-0.1416\t-5.5202\t1.0561\t2.5331\t0.7097\n+-0.7655\t-3.2629\t0.7123\t0.0562\t1.2078\n+-1.3523\t-4.5564\t0.2855\t1.3386\t-2.0493\n+-1.7088\t-2.2341\t0.0825\t-2.0599\t0.8335\n+-3.1236\t-5.9987\t-0.0811\t3.0773\t1.9977\n+1.2663\t-4.4825\t1.5603\t1.2249\t1.6582\n+-0.6526\t-5.8705\t0.9230\t2.7318\t1.7473\n+0.3953\t-2.5365\t-0.0945\t-3.0766\t1.7426\n+-0.1625\t-3.4281\t0.1297\t-0.4754\t1.6961\n+-2.2140\t-5.0133\t-0.0794\t0.9143\t2.5583\n+-0.4613\t-6.2075\t1.2719\t4.1384\t2.7730\n+-1.4160\t-7.0144\t-0.4855\t3.6068\t0.5113\n+0.6889\t-3.8158\t1.3029\t0.6781\t1.8137\n+0.9439\t-4.3010\t0.8782\t0.0062\t2.0116\n+-1.8597\t-3.7948\t-1.2024\t-0.3578\t-2.7539\n+-1.6633\t-3.2509\t-1.2846\t-1.4958\t0.0063\n+-1.6431\t-2.0504\t-0.6246\t-2.3438\t-2.7969\n+-2.3001\t-4.4964\t-0.8472\t-0.0680\t-0.8250\n+-1.5611\t-4.4606\t0.2359\t0.9168\t-0.6425\n+1.3137\t-1.1202\t1.7077\t-2.6874\t1.5428\n+-1.6780\t-5.6718\t0.0590\t2.3780\t-1.0233\n+-2.5783\t-6.3740\t-0.3193\t3.5583\t3.0596\n+-0.6061\t-2.7607\t-1.2368\t-1.6965\t-2.5497\n+-2.6190\t-3.5589\t0.1136\t0.0072\t2.0735\n+-1.0030\t-6.4175\t0.5149\t3.7496\t0.7330\n+-1.7912\t-5.1464\t-1.0372\t1.4546\t-0.7956\n+-1.5792\t-4.4416\t0.1851\t0.9401\t-1.6688\n+0.2980\t-4.1670\t-0.0238\t0.2527\t-0.5423\n+-1.1631\t-3.4712\t0.8661\t-0.5976\t1.4833\n+-0.2476\t-5.4577\t-0.6047\t2.7129\t-1.6734\n+0.2774\t-6.7209\t0.9426\t4.4414\t-0.2085\n+-2.3883\t-2.2052\t-1.6941\t-2.3223\t-1.2636\n+-1.3874\t-4.1603\t0.0421\t0.9842\t1.4190\n+-2.8690\t-5.6070\t0.0897\t3.4202\t1.5811\n+-0.6246\t-6.5834\t0.4792\t3.6943\t1.5019\n+0.0832\t-2.1370\t-1.5266\t-2.4836\t-2.9471\n+0.4752\t-5.1350\t1.0637\t3.0672\t-1.3305\n+-1.1580\t-4.1051\t0.2756\t1.9264\t-1.8723\n+-1.5835\t-4.6959\t-0.7588\t-0.2192\t0.3629\n+-2.3501\t-1.8305\t-1.6013\t-2.4534\t-2.6362\n+-0.9423\t-3.9026\t-0.2186\t0.8125\t-2.2811\n'
b
diff -r 000000000000 -r af2624d5ab32 test-data/pca_classical_output.dat
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/pca_classical_output.dat Sat May 01 01:24:32 2021 +0000
b
b'@@ -0,0 +1,300 @@\n+-3.9779\t1.8818\t2.6506\t-1.1628\t-0.0983\n+-2.3700\t0.6756\t-1.6186\t-1.4164\t1.0327\n+-3.0925\t2.4577\t-1.8726\t-0.4041\t1.5016\n+-4.0151\t2.3686\t0.3256\t-0.0685\t-0.2168\n+-4.7065\t3.1394\t2.3137\t0.1907\t-1.6303\n+-4.9613\t4.6941\t1.2908\t1.0298\t-0.8817\n+-4.8364\t5.2936\t0.8793\t1.3212\t0.0849\n+-5.0224\t2.7565\t4.6351\t-0.5397\t-1.3489\n+-2.9068\t1.6461\t-1.9066\t-0.5100\t-0.6820\n+-4.7713\t4.0171\t2.1659\t0.5434\t-1.4630\n+-2.7521\t-1.9309\t-0.9117\t-3.5728\t1.7161\n+-3.7407\t1.3845\t0.0141\t-1.5357\t1.3068\n+-4.3751\t1.0682\t2.3906\t-1.5298\t0.9064\n+-2.8532\t0.2430\t-0.8937\t-1.7428\t-1.1825\n+-2.4091\t-0.1393\t-0.0986\t-2.0343\t1.5498\n+-3.8699\t2.7278\t0.9261\t-0.4924\t0.7933\n+-4.6180\t3.2535\t2.4650\t-0.0468\t0.8258\n+-2.3362\t3.2961\t-2.7247\t0.9321\t-0.1294\n+-1.7741\t0.3277\t-3.2195\t-1.6272\t1.9721\n+-2.9188\t3.5196\t-1.5959\t0.6981\t0.3541\n+-4.0500\t2.5696\t1.0924\t-0.1582\t-1.7489\n+-5.3435\t6.4383\t1.7066\t1.9754\t0.9120\n+-4.1454\t6.1296\t-0.3939\t1.8899\t0.9309\n+-2.4452\t-0.6422\t0.2580\t-2.5687\t0.0322\n+-5.2047\t7.4266\t2.4849\t2.8623\t-1.3975\n+-2.5237\t-2.4427\t-1.5498\t-3.1847\t-1.6248\n+-1.7613\t1.6919\t-4.6466\t-0.5331\t1.3921\n+-1.1236\t2.5796\t-4.4469\t0.3771\t0.0692\n+-2.5892\t3.4039\t-1.3071\t0.2542\t0.1349\n+-3.5099\t-0.9352\t1.4462\t-2.5959\t-0.3994\n+-3.9546\t6.3431\t0.5939\t2.3052\t-0.2344\n+-2.0819\t-1.6617\t-1.0140\t-3.0790\t1.1571\n+-2.6320\t0.0703\t-0.0526\t-1.5092\t-1.8043\n+-1.8865\t0.2234\t-2.0628\t-1.9632\t2.3486\n+-4.5803\t3.1525\t2.6718\t-0.3104\t0.7210\n+-6.5473\t2.5731\t6.0056\t-0.6369\t-1.2620\n+-5.8360\t4.1304\t4.6222\t0.5425\t-0.1996\n+-1.1136\t3.4820\t-5.1375\t0.4626\t3.1966\n+-4.7698\t0.9659\t3.3912\t-1.9008\t-0.0558\n+-2.4391\t-0.7627\t-1.6123\t-1.9380\t-0.8111\n+-4.8859\t4.2069\t1.6117\t0.6831\t0.8832\n+-2.2466\t-1.0655\t-1.2645\t-2.4867\t-0.1625\n+-4.4644\t5.0000\t1.1159\t1.6869\t-0.4784\n+-3.7358\t0.9995\t1.6597\t-1.3727\t-1.7457\n+-5.8271\t3.6580\t5.8537\t-0.0174\t-0.9750\n+-4.7392\t4.2895\t2.1369\t0.6907\t-0.6801\n+-1.5432\t2.7403\t-5.0837\t0.0664\t2.6107\n+-5.7249\t7.7343\t3.2678\t3.5103\t-2.0555\n+-4.2566\t5.3778\t1.2450\t1.4052\t0.2429\n+-5.7932\t5.3928\t4.6237\t1.0285\t1.0814\n+-2.9574\t-1.1660\t1.2251\t-2.5803\t-0.5026\n+-2.0365\t4.7362\t-3.8569\t1.8582\t-0.6083\n+-5.1883\t6.2608\t1.6921\t2.1737\t0.9110\n+-5.5934\t1.2903\t5.3088\t-1.4372\t0.2000\n+-1.4178\t0.5340\t-3.0765\t-1.4210\t1.9659\n+-5.1568\t4.3100\t2.6279\t0.8400\t-0.4656\n+-4.2551\t3.3395\t1.2265\t-0.0344\t-0.0296\n+-6.4636\t3.6525\t5.4351\t-0.1493\t1.1392\n+-4.0271\t-0.6214\t2.0667\t-2.5704\t0.5389\n+-3.2885\t2.2421\t0.4406\t-0.5508\t0.4760\n+-3.2320\t3.1264\t0.1610\t0.0045\t-0.3199\n+-2.6003\t5.2398\t-2.1366\t1.6829\t0.7428\n+-4.3207\t1.7506\t1.6012\t-0.9072\t-1.5917\n+-1.9287\t2.7030\t-3.8706\t0.1751\t0.9751\n+-4.6549\t5.5519\t2.1315\t1.7555\t-0.4025\n+-2.4743\t1.5111\t-1.6381\t-0.8537\t-0.4237\n+-1.2837\t3.5483\t-5.9098\t0.8155\t0.5023\n+-3.9514\t5.4703\t0.2135\t1.6665\t0.0226\n+-3.1575\t3.1697\t-2.0242\t-0.1906\t2.4084\n+-6.7971\t3.1578\t6.8243\t-0.5140\t-0.4121\n+-5.9999\t3.1135\t6.0259\t0.1711\t-2.0321\n+-2.3450\t1.9814\t-1.1103\t-0.7338\t0.6581\n+-1.5478\t0.3095\t-3.1375\t-1.9311\t2.3145\n+-3.6067\t1.2237\t-0.4271\t-1.2399\t-0.0987\n+-3.0574\t-0.0303\t-1.0815\t-1.5251\t-1.7385\n+-3.7608\t4.9627\t0.5748\t1.3373\t1.6977\n+-3.3834\t2.2529\t-1.4015\t-0.3531\t-0.8381\n+-5.3297\t2.0845\t4.0157\t-1.0934\t0.1069\n+-4.6415\t5.6565\t1.0886\t1.6713\t-0.3536\n+-4.7611\t4.6882\t1.0939\t0.9883\t1.7929\n+-1.7499\t1.7738\t-2.6457\t-0.0629\t-0.5751\n+-5.1579\t7.5589\t1.1299\t3.1680\t-0.8202\n+-3.4019\t-1.4226\t0.3991\t-2.5729\t-0.9099\n+-1.6689\t1.3580\t-3.7300\t-0.7291\t1.5630\n+-5.5132\t6.6256\t3.6086\t1.9423\t0.3727\n+-4.4010\t7.0180\t1.1796\t2.6417\t0.9847\n+-2.1174\t3.1273\t-2.6107\t-0.2004\t2.3541\n+-2.2818\t-0.7861\t-1.5672\t-1.8685\t-1.2308\n+-4.2055\t4.8158\t-0.1348\t1.2570\t-0.2039\n+-2.2741\t1.1907\t-1.5868\t-1.0998\t-0.5999\n+-3.0433\t3.1513\t-1.8017\t0.1704\t0.3636\n+-5.3872\t1.7330\t5.6772\t-1.1538\t-0.2345\n+-3.5773\t2.5712\t-0.8771\t0.2747\t-1.2405\n+-3.0843\t1.4711\t-0.1928\t-1.2214\t1.2785\n+-1.9572\t3.5730\t-4.2197\t0.3158\t2.0016\n+-2.3444\t4.7106\t-3.7159\t1.0094\t1.7919\n+-3.4024\t1.1605\t0.5845\t-1.1358\t-0.6689\n+-3.2321\t4.3272\t-1.2592\t1.0365\t-0.4073\n+-5.0553\t5.8588\t3.0041\t1.9760\t-0.7261\n+-3.6706\t1.0101\t1.8198\t-1.8471\t1.1714\n+8.9574\t3.4341\t-0.6861\t1.5391\t0.1971\n+10.4081\t3.3686\t0.6688\t0.9791\t2.2503\n+8.8871\t0.8254\t3.9087\t-0.9576\t1.5038\n+2.168'..b'435\t-2.7504\t0.4447\n+0.0868\t-0.4073\t-1.2184\t-1.6489\t-2.2725\n+4.9808\t-1.5720\t2.3203\t-2.7613\t1.7252\n+-2.5810\t-5.5079\t0.0561\t2.7450\t-1.9096\n+0.0370\t-2.1438\t-0.4775\t-3.6280\t1.6759\n+1.0997\t-3.5629\t1.4259\t1.7291\t-1.2305\n+-1.3638\t-2.9435\t-0.1034\t-1.4507\t-0.9311\n+-0.7078\t-4.3400\t-1.1441\t0.4367\t-1.7198\n+-0.5174\t-3.5645\t0.2025\t0.4664\t0.8917\n+-2.6934\t-6.0138\t-1.9617\t2.8409\t-1.2291\n+-0.9761\t-2.4423\t-1.2032\t-2.4114\t-2.0578\n+0.4854\t-5.7924\t0.7961\t2.7641\t0.1446\n+0.2325\t-3.0261\t-0.5759\t-1.7790\t0.5984\n+1.4671\t-1.9240\t0.8327\t-1.8960\t-0.4687\n+-2.5020\t-5.9264\t-1.0911\t2.3968\t0.3954\n+0.5726\t-3.6055\t1.0491\t-0.0200\t1.7178\n+1.2348\t-2.2932\t1.4641\t-1.1600\t0.3434\n+1.5601\t-1.7994\t0.7362\t-1.6966\t0.7891\n+-1.2684\t-4.2416\t-1.5761\t0.0715\t-1.2373\n+-1.2743\t-1.4687\t0.5370\t-2.5136\t1.0946\n+-0.1360\t-4.4661\t-0.0391\t0.5382\t2.6005\n+-0.3517\t-4.6240\t-0.4723\t1.6165\t-2.1852\n+1.4760\t-2.5560\t0.2351\t-0.9202\t0.4155\n+-1.7783\t-5.3517\t0.7277\t3.2291\t1.2118\n+1.6356\t-3.7406\t0.1185\t1.1998\t-2.1563\n+-1.5925\t-4.0036\t-0.4063\t-0.3156\t1.4120\n+0.5318\t-6.1624\t1.3086\t4.4905\t-1.6720\n+-2.1599\t-2.1987\t-1.7868\t-2.7058\t-0.8553\n+-0.1707\t-3.3282\t0.2381\t-0.0381\t0.8412\n+0.2620\t-2.7657\t-0.3002\t-2.0651\t-1.3784\n+-1.4631\t-2.2472\t-0.7439\t-2.3037\t1.4133\n+-0.6767\t-2.6672\t0.4721\t-0.4983\t-0.3085\n+0.8239\t-2.9922\t0.8695\t1.0520\t-1.3905\n+-2.8612\t-4.9086\t-0.2330\t1.8419\t-2.0691\n+0.9513\t-4.1058\t-0.0948\t0.4003\t0.9779\n+-0.7449\t-2.5754\t0.1638\t-0.3664\t-1.0395\n+-2.8790\t-6.2301\t-1.0229\t3.2147\t-1.2238\n+0.4482\t-6.0936\t0.7186\t2.6087\t1.1673\n+-0.8581\t-4.9450\t1.2239\t2.8168\t1.9155\n+-1.9585\t-5.5486\t0.1892\t2.2741\t0.9021\n+-0.7563\t-3.0605\t0.3737\t-1.2120\t-0.5686\n+-1.2176\t-5.9017\t-0.7336\t3.0326\t-1.7117\n+-0.3342\t-6.3764\t-0.2172\t4.3070\t-1.8445\n+-1.8562\t-1.9825\t0.1295\t-1.7173\t1.1371\n+-0.2782\t-4.6277\t1.0273\t1.3582\t1.5796\n+-2.8011\t-3.9132\t-0.0397\t-0.1702\t0.0073\n+0.4691\t-5.7114\t1.0759\t2.0051\t2.1326\n+-0.3390\t-3.6590\t-0.0755\t-0.2534\t1.4111\n+-1.6579\t-2.2403\t0.1202\t-2.8767\t1.5805\n+-1.9985\t-1.4155\t-0.0361\t-2.4219\t1.1876\n+-0.7262\t-2.5969\t-0.9169\t-1.8642\t-1.8831\n+0.2857\t-6.9537\t-0.1326\t4.3486\t-0.2990\n+0.3086\t-4.2096\t1.1225\t0.8869\t2.5561\n+-1.0663\t-4.8302\t-0.2336\t1.1157\t1.8525\n+1.8021\t-2.9988\t0.2358\t-0.4046\t0.6294\n+-0.6291\t-4.2600\t-0.6726\t1.3688\t-0.7486\n+-0.3361\t-2.0217\t-0.1530\t-1.7010\t-2.3760\n+1.2746\t-3.1963\t1.5362\t-0.5552\t0.5358\n+1.3034\t-4.6254\t1.0466\t0.2558\t2.0886\n+-1.8983\t-4.7895\t-0.5294\t2.0973\t-1.6399\n+0.4626\t-5.0153\t1.1064\t3.2728\t-0.2280\n+0.7110\t-3.3192\t-0.2483\t-0.9576\t0.0827\n+-0.1416\t-5.5202\t1.0561\t2.5331\t0.7097\n+-0.7655\t-3.2629\t0.7123\t0.0562\t1.2078\n+-1.3523\t-4.5564\t0.2855\t1.3386\t-2.0493\n+-1.7088\t-2.2341\t0.0825\t-2.0599\t0.8335\n+-3.1236\t-5.9987\t-0.0811\t3.0773\t1.9977\n+1.2663\t-4.4825\t1.5603\t1.2249\t1.6582\n+-0.6526\t-5.8705\t0.9230\t2.7318\t1.7473\n+0.3953\t-2.5365\t-0.0945\t-3.0766\t1.7426\n+-0.1625\t-3.4281\t0.1297\t-0.4754\t1.6961\n+-2.2140\t-5.0133\t-0.0794\t0.9143\t2.5583\n+-0.4613\t-6.2075\t1.2719\t4.1384\t2.7730\n+-1.4160\t-7.0144\t-0.4855\t3.6068\t0.5113\n+0.6889\t-3.8158\t1.3029\t0.6781\t1.8137\n+0.9439\t-4.3010\t0.8782\t0.0062\t2.0116\n+-1.8597\t-3.7948\t-1.2024\t-0.3578\t-2.7539\n+-1.6633\t-3.2509\t-1.2846\t-1.4958\t0.0063\n+-1.6431\t-2.0504\t-0.6246\t-2.3438\t-2.7969\n+-2.3001\t-4.4964\t-0.8472\t-0.0680\t-0.8250\n+-1.5611\t-4.4606\t0.2359\t0.9168\t-0.6425\n+1.3137\t-1.1202\t1.7077\t-2.6874\t1.5428\n+-1.6780\t-5.6718\t0.0590\t2.3780\t-1.0233\n+-2.5783\t-6.3740\t-0.3193\t3.5583\t3.0596\n+-0.6061\t-2.7607\t-1.2368\t-1.6965\t-2.5497\n+-2.6190\t-3.5589\t0.1136\t0.0072\t2.0735\n+-1.0030\t-6.4175\t0.5149\t3.7496\t0.7330\n+-1.7912\t-5.1464\t-1.0372\t1.4546\t-0.7956\n+-1.5792\t-4.4416\t0.1851\t0.9401\t-1.6688\n+0.2980\t-4.1670\t-0.0238\t0.2527\t-0.5423\n+-1.1631\t-3.4712\t0.8661\t-0.5976\t1.4833\n+-0.2476\t-5.4577\t-0.6047\t2.7129\t-1.6734\n+0.2774\t-6.7209\t0.9426\t4.4414\t-0.2085\n+-2.3883\t-2.2052\t-1.6941\t-2.3223\t-1.2636\n+-1.3874\t-4.1603\t0.0421\t0.9842\t1.4190\n+-2.8690\t-5.6070\t0.0897\t3.4202\t1.5811\n+-0.6246\t-6.5834\t0.4792\t3.6943\t1.5019\n+0.0832\t-2.1370\t-1.5266\t-2.4836\t-2.9471\n+0.4752\t-5.1350\t1.0637\t3.0672\t-1.3305\n+-1.1580\t-4.1051\t0.2756\t1.9264\t-1.8723\n+-1.5835\t-4.6959\t-0.7588\t-0.2192\t0.3629\n+-2.3501\t-1.8305\t-1.6013\t-2.4534\t-2.6362\n+-0.9423\t-3.9026\t-0.2186\t0.8125\t-2.2811\n'
b
diff -r 000000000000 -r af2624d5ab32 test-data/pca_incremental_header_names_output.dat
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/pca_incremental_header_names_output.dat Sat May 01 01:24:32 2021 +0000
b
b'@@ -0,0 +1,300 @@\n+-4.5041\t1.2787\t-0.9244\t-0.6417\t2.4742\t-0.6790\t-2.2376\n+-2.3058\t0.3190\t2.0467\t-1.5089\t-1.7879\t-2.8580\t2.3471\n+-3.6298\t1.9778\t0.5695\t3.2844\t-2.1532\t1.0649\t0.2268\n+-4.9371\t1.9948\t-1.1517\t-0.7883\t0.1443\t0.3281\t-1.6340\n+-5.1251\t2.1148\t1.0578\t-1.2113\t2.7850\t-0.4718\t-0.5876\n+-5.2311\t3.1236\t2.4521\t-0.1645\t2.0950\t1.3657\t-1.2221\n+-5.2736\t3.9401\t3.1613\t-0.4221\t1.4421\t0.6351\t1.4170\n+-5.8843\t2.3108\t-0.9970\t-2.5925\t4.5634\t-1.7216\t-0.6943\n+-3.0847\t0.9348\t1.2297\t-0.1673\t-1.5983\t-0.3054\t-0.8598\n+-5.0726\t2.7358\t2.1948\t0.6672\t2.8109\t0.2199\t0.2954\n+-3.0489\t-1.4621\t-1.4039\t2.2051\t-2.1125\t-3.5712\t1.8233\n+-3.9828\t0.4789\t-0.4798\t3.1414\t-0.2706\t1.0323\t-3.0673\n+-4.7769\t0.5637\t-0.6189\t2.4042\t1.8522\t-0.8892\t0.1666\n+-2.8177\t-0.2411\t1.1106\t-1.6952\t-0.7266\t-2.7600\t-0.0377\n+-3.3395\t0.6116\t-2.4104\t0.4482\t-1.2702\t-2.2185\t1.2082\n+-3.7895\t1.3834\t1.9512\t0.4855\t1.2103\t0.2934\t-1.2998\n+-5.6850\t2.9597\t-0.6627\t-1.3921\t1.9849\t-0.8803\t0.8756\n+-2.3684\t1.9853\t2.1848\t1.9063\t-1.9759\t2.9765\t-2.6155\n+-2.7658\t1.1415\t-1.5911\t-2.5974\t-4.2491\t-2.4650\t1.0467\n+-3.4687\t2.6724\t0.7399\t-2.5064\t-1.2477\t1.3807\t-2.8392\n+-4.1960\t1.4928\t1.5341\t3.2301\t1.6231\t0.5496\t-0.1931\n+-6.8974\t5.6555\t-0.4728\t-0.2712\t1.6418\t2.8355\t-1.2524\n+-5.4610\t5.7033\t1.6135\t-2.4726\t-0.3671\t0.4024\t2.5259\n+-3.6199\t0.4197\t-3.3176\t0.7440\t-0.9565\t-3.1444\t1.3050\n+-6.9755\t6.9895\t0.8969\t-1.2372\t2.7908\t1.0709\t2.8530\n+-2.4744\t-2.4406\t-1.2029\t1.7749\t-1.8143\t-2.5760\t-1.4119\n+-2.2670\t1.4244\t0.3150\t-0.3939\t-4.8261\t0.5653\t-1.5405\n+-1.2997\t1.8646\t1.6713\t0.9233\t-3.9371\t1.9971\t-2.1376\n+-2.9030\t2.7832\t2.8769\t-2.1591\t-0.9938\t-1.4089\t2.2805\n+-4.0193\t-0.7911\t-2.3511\t-2.6334\t0.8198\t-3.1752\t-1.7134\n+-4.6438\t5.1543\t2.9044\t-3.5749\t1.3863\t1.1820\t0.2145\n+-3.1379\t-0.3608\t-3.4466\t2.6176\t-2.5651\t-3.1252\t2.2431\n+-3.3334\t0.1605\t-2.3825\t-0.8492\t-0.2283\t-0.9548\t-3.0823\n+-1.8428\t-0.3384\t-0.2287\t1.0854\t-2.4548\t0.2911\t-3.1773\n+-5.7377\t2.9593\t-1.3065\t2.8244\t2.0424\t0.2718\t0.8933\n+-7.5818\t1.9024\t-2.0175\t1.7637\t5.6534\t-0.4313\t-0.7429\n+-6.7842\t3.2537\t-0.5172\t1.7928\t4.5232\t1.0479\t-0.2527\n+-2.6542\t4.2982\t-0.6507\t-0.7068\t-6.0759\t0.4216\t2.2052\n+-4.7066\t-0.0723\t0.5610\t2.7188\t3.2642\t-1.3372\t-0.2390\n+-3.1046\t-0.4176\t-2.2528\t3.1483\t-2.1742\t-0.7089\t-0.9199\n+-5.1411\t2.5961\t1.4787\t2.5253\t1.9749\t2.6935\t-2.1514\n+-3.0358\t-0.4673\t-3.0178\t-0.1183\t-2.0737\t-1.9430\t-1.7089\n+-4.8013\t3.3868\t2.2209\t-2.8917\t1.9785\t1.7562\t-2.4000\n+-4.8966\t1.5345\t-1.9107\t1.6206\t1.0586\t-2.4528\t2.2876\n+-6.4597\t2.5302\t-0.0772\t3.0385\t5.9297\t0.5882\t-0.3289\n+-5.9218\t3.7935\t-0.2556\t3.3005\t2.0236\t1.1245\t1.1257\n+-2.6684\t3.1373\t-0.3467\t-3.5191\t-5.6729\t-0.1143\t-0.0563\n+-6.6356\t5.9313\t2.3230\t-4.2617\t4.5651\t2.7706\t-2.2094\n+-5.6003\t4.9145\t-0.0899\t0.9846\t1.1772\t1.7796\t0.3640\n+-7.1334\t4.7398\t-0.3982\t-1.1846\t4.2620\t0.3247\t0.9009\n+-3.7706\t-0.4166\t-2.7645\t3.4809\t0.1927\t-2.6134\t1.7933\n+-2.5829\t3.6848\t2.0472\t0.9575\t-3.0212\t3.5198\t-2.3061\n+-5.9636\t4.7293\t1.7384\t-3.3395\t2.2166\t2.2397\t-1.9459\n+-5.6726\t0.3054\t0.5239\t-1.8098\t5.0980\t-2.8714\t0.6939\n+-1.6695\t0.2400\t-0.7679\t2.4374\t-3.4909\t1.2909\t-3.0332\n+-5.3286\t2.8034\t2.9264\t-1.9200\t3.2485\t-0.4062\t0.7878\n+-4.1365\t1.8401\t2.8304\t-2.6400\t1.8543\t-0.6305\t-0.9748\n+-7.3517\t2.9113\t-0.3355\t-1.6157\t4.9779\t-1.2069\t1.0193\n+-4.1967\t-0.9651\t-1.0766\t-0.8147\t1.4782\t-2.8124\t-0.7958\n+-3.3859\t1.1867\t0.7010\t1.8196\t0.6206\t1.2032\t-2.4485\n+-4.8094\t3.7086\t-1.5026\t-1.1737\t-0.4332\t-1.3418\t2.0650\n+-3.6897\t4.9698\t1.8855\t1.2100\t-2.0871\t1.2348\t3.0630\n+-4.6351\t1.0548\t0.6174\t-1.1733\t1.8384\t-1.7480\t-0.2812\n+-2.5884\t2.4750\t0.7398\t1.9273\t-3.9507\t1.3361\t0.0575\n+-4.9906\t4.0057\t3.2665\t-2.0939\t2.9379\t0.7270\t0.5404\n+-3.2473\t1.8445\t0.2434\t-0.5658\t-1.9457\t-2.2797\t2.6466\n+-2.3835\t3.6974\t0.5424\t0.1868\t-5.9273\t1.4366\t0.1764\n+-4.4944\t4.1873\t2.5387\t2.3093\t0.7909\t2.4807\t0.3451\n+-4.0672\t3.1357\t0.7866\t0.0835\t-2.6209\t-0.6464\t2.6246\n+-7.7257\t2.4335\t-0.7824\t4.0495\t6.3356\t-0.7156\t2.2593\n+-7.7345\t3.3520\t-3.0476\t3.4232\t5.3875\t-0.3854\t2.2324\n+-2.5709\t1.6372\t2.0053\t0.0627\t-1.1963\t-1.7860\t2.6362\n+-1.4913\t0.2742\t2.0702\t-1.8860\t-3.6079\t-3.4077\t2.9305\n+-3.8738\t0.4747\t-0.3085\t0.2335\t-0.4071\t0.0756\t-3.0254\n+-4.1544\t0.8081'..b'.6621\t-2.0543\n+2.4622\t-4.3923\t4.6318\t3.2286\t0.8290\t-1.3138\t-0.7092\n+-0.3768\t-3.3689\t0.9185\t3.4446\t-0.7548\t0.1917\t-2.8933\n+-1.5571\t-1.2636\t-4.4107\t-0.4740\t-0.5614\t-1.1182\t0.8865\n+0.7017\t-2.2599\t-3.0353\t-3.5220\t0.7490\t0.2524\t-1.4167\n+-2.0500\t-5.2108\t-0.5936\t2.8702\t0.0865\t1.7500\t1.8663\n+3.0137\t-5.0641\t3.4237\t3.1012\t0.4598\t0.6318\t2.2810\n+-1.5219\t-1.6394\t-4.8442\t2.7113\t-0.5852\t1.1910\t-1.9068\n+-2.3981\t-6.0576\t-2.8749\t-2.3686\t-1.0002\t2.5079\t0.2015\n+1.8921\t-6.3274\t-0.7450\t-2.7750\t0.8666\t2.2501\t-0.2021\n+1.5814\t-6.5836\t3.2901\t0.9566\t1.9993\t3.1253\t1.3281\n+-0.9830\t-5.8270\t-1.5955\t-1.7203\t0.1380\t2.4005\t-0.5153\n+0.2115\t-3.6162\t0.4305\t1.7197\t0.5603\t-0.5445\t-0.9444\n+-0.9981\t-5.0750\t-2.8310\t-3.8982\t-0.8326\t0.7471\t2.2110\n+1.8743\t-7.4837\t2.1129\t1.0955\t1.0676\t3.9861\t1.5601\n+-0.2173\t-3.4647\t3.5740\t-0.5041\t0.5984\t-1.8010\t-0.3018\n+-0.8708\t-3.0747\t-5.4424\t-0.2924\t-0.2948\t1.1786\t1.4711\n+-2.1813\t-4.1987\t-0.6857\t-2.5649\t-0.1683\t-1.1085\t0.0401\n+1.0261\t-4.8342\t-2.5095\t-3.2447\t0.3524\t0.5019\t2.4819\n+1.3312\t-4.7451\t1.9789\t-1.0770\t0.2929\t0.0568\t-1.0795\n+-0.4084\t-3.3231\t1.7444\t-2.2754\t0.1706\t-2.6516\t-2.3484\n+0.1271\t-3.6357\t5.1002\t-0.8028\t0.8378\t-1.9041\t-2.1182\n+-0.3108\t-2.5467\t0.0172\t0.5997\t-0.8420\t-2.4078\t0.2985\n+2.3229\t-7.7318\t0.5990\t1.5900\t0.6791\t4.9678\t0.7111\n+2.2490\t-5.2254\t2.0891\t-0.4591\t1.4099\t1.2195\t0.1653\n+0.8997\t-5.9620\t2.1464\t-3.1994\t0.2412\t0.6705\t-0.2757\n+1.6351\t-2.0544\t-3.3725\t2.7118\t-0.5164\t0.9859\t-0.7674\n+-0.3884\t-3.7377\t-2.0037\t-3.7865\t-0.8058\t-0.1159\t0.5169\n+-1.2854\t-0.6815\t-3.7704\t-1.2835\t-0.8277\t-2.8829\t0.1127\n+2.7532\t-3.6849\t1.9801\t-1.1850\t1.8530\t-1.4601\t0.8846\n+1.3367\t-3.5217\t-4.2259\t-1.7081\t0.0284\t0.6012\t-0.8891\n+0.2084\t-6.3242\t3.0772\t-2.9331\t0.7925\t1.0186\t-0.2763\n+0.4991\t-4.0107\t-3.9909\t-4.2000\t0.7601\t2.0399\t0.2795\n+2.0422\t-3.9227\t1.1896\t0.2482\t0.1053\t-0.3852\t-1.3664\n+1.3943\t-5.9124\t0.3596\t-2.3189\t1.3754\t1.5829\t1.1913\n+0.0278\t-3.4528\t-0.0648\t-0.0516\t0.5044\t-0.1313\t0.8862\n+-1.1295\t-4.1677\t-2.8210\t1.7549\t0.2425\t1.7050\t-0.0711\n+-0.8205\t-2.8645\t1.3685\t1.3576\t-0.0320\t-2.0074\t0.6672\n+-2.3925\t-5.9955\t-2.2835\t-4.1743\t-0.4680\t2.2192\t0.7221\n+2.4329\t-4.5049\t-0.5699\t-3.2655\t1.5086\t0.7033\t-0.3623\n+0.0935\t-5.5951\t-2.6666\t-0.0202\t0.4672\t3.1178\t0.7036\n+2.0608\t-3.6542\t2.5894\t0.5949\t0.1042\t-1.9610\t-2.1251\n+1.3668\t-4.3150\t1.5058\t-0.8470\t0.3603\t0.1046\t-1.2965\n+-0.7108\t-5.7729\t0.8471\t-2.1552\t-0.1474\t0.4989\t0.8110\n+2.0865\t-7.5741\t2.4725\t-3.2358\t1.9803\t3.5518\t1.0200\n+-0.7413\t-6.5902\t-3.3497\t-4.0118\t-0.7169\t2.8734\t-0.0154\n+1.9231\t-4.1669\t0.6324\t3.6388\t1.1516\t1.4119\t1.9045\n+0.9972\t-3.3835\t-3.8478\t0.4535\t-0.1122\t0.9351\t-0.6707\n+-1.4850\t-3.7556\t-0.8861\t2.0163\t-1.0464\t-0.5987\t0.6890\n+-0.6859\t-4.0281\t0.3683\t3.6759\t-1.1975\t0.3557\t-1.6469\n+-1.8927\t-1.7090\t-2.2045\t-2.1649\t-0.6862\t-2.7007\t-2.6949\n+-2.6846\t-3.5555\t-3.8040\t-2.2653\t-1.5920\t-1.2491\t0.6841\n+0.3020\t-5.6926\t2.2922\t-1.7143\t1.0921\t0.3534\t-0.3102\n+0.6378\t0.0965\t-3.4219\t1.3310\t0.4472\t-2.1190\t-0.2407\n+-1.9094\t-4.6694\t-5.0032\t-0.7066\t-0.5111\t2.2129\t0.1366\n+-0.5069\t-7.3869\t2.1287\t-3.7626\t-0.0722\t1.8540\t3.3739\n+-0.8111\t-2.1449\t-2.0660\t1.3755\t-1.4613\t-1.8106\t-0.2669\n+-2.1210\t-3.7767\t-1.4218\t-1.0966\t-0.4062\t0.3971\t-0.5842\n+-1.0161\t-5.3783\t-5.2064\t-4.0163\t-0.1682\t2.9910\t0.1047\n+-1.0107\t-5.1148\t-0.6561\t0.8090\t-1.0136\t0.7724\t2.2620\n+0.3727\t-5.6904\t3.2177\t3.2668\t1.1544\t0.6645\t2.2839\n+1.8337\t-4.6784\t1.9878\t1.5645\t0.4593\t-0.2257\t1.9580\n+-0.1967\t-4.0023\t-0.2283\t-1.0764\t0.7112\t-0.1925\t-1.3203\n+0.2941\t-5.0048\t-2.0272\t2.6678\t-0.5445\t2.7349\t1.9569\n+2.5823\t-7.5935\t2.0663\t0.1052\t1.8888\t3.5327\t2.8774\n+-2.5822\t-2.1414\t-2.1563\t3.1156\t-2.0672\t-0.7239\t-2.1677\n+-1.2405\t-3.6815\t-2.5594\t1.7052\t-0.7032\t1.3623\t1.5195\n+-2.8369\t-5.1205\t-4.5254\t-0.1156\t-0.6598\t4.1633\t0.1587\n+0.3149\t-6.2694\t-2.0587\t-0.3757\t0.1556\t3.2380\t2.4606\n+2.0357\t-3.8799\t4.1492\t4.3973\t-0.1287\t-0.6609\t-2.9551\n+1.4539\t-5.0928\t-1.1201\t1.5114\t1.4226\t3.0672\t1.1131\n+0.0900\t-4.8785\t0.4034\t1.7989\t1.0459\t2.5805\t-0.6281\n+0.0903\t-5.5698\t2.3322\t-1.8054\t-0.3871\t-1.4925\t1.4869\n+-3.2787\t-0.9351\t-3.4811\t2.8220\t-2.2307\t-1.9613\t-0.5324\n+-0.8901\t-3.3226\t-2.1998\t-3.3552\t-0.2246\t-0.9037\t0.3637\n'
b
diff -r 000000000000 -r af2624d5ab32 test-data/pca_incremental_output.dat
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/pca_incremental_output.dat Sat May 01 01:24:32 2021 +0000
b
b'@@ -0,0 +1,300 @@\n+-4.5041\t1.2787\t-0.9244\t-0.6417\t2.4742\t-0.6790\t-2.2376\n+-2.3058\t0.3190\t2.0467\t-1.5089\t-1.7879\t-2.8580\t2.3471\n+-3.6298\t1.9778\t0.5695\t3.2844\t-2.1532\t1.0649\t0.2268\n+-4.9371\t1.9948\t-1.1517\t-0.7883\t0.1443\t0.3281\t-1.6340\n+-5.1251\t2.1148\t1.0578\t-1.2113\t2.7850\t-0.4718\t-0.5876\n+-5.2311\t3.1236\t2.4521\t-0.1645\t2.0950\t1.3657\t-1.2221\n+-5.2736\t3.9401\t3.1613\t-0.4221\t1.4421\t0.6351\t1.4170\n+-5.8843\t2.3108\t-0.9970\t-2.5925\t4.5634\t-1.7216\t-0.6943\n+-3.0847\t0.9348\t1.2297\t-0.1673\t-1.5983\t-0.3054\t-0.8598\n+-5.0726\t2.7358\t2.1948\t0.6672\t2.8109\t0.2199\t0.2954\n+-3.0489\t-1.4621\t-1.4039\t2.2051\t-2.1125\t-3.5712\t1.8233\n+-3.9828\t0.4789\t-0.4798\t3.1414\t-0.2706\t1.0323\t-3.0673\n+-4.7769\t0.5637\t-0.6189\t2.4042\t1.8522\t-0.8892\t0.1666\n+-2.8177\t-0.2411\t1.1106\t-1.6952\t-0.7266\t-2.7600\t-0.0377\n+-3.3395\t0.6116\t-2.4104\t0.4482\t-1.2702\t-2.2185\t1.2082\n+-3.7895\t1.3834\t1.9512\t0.4855\t1.2103\t0.2934\t-1.2998\n+-5.6850\t2.9597\t-0.6627\t-1.3921\t1.9849\t-0.8803\t0.8756\n+-2.3684\t1.9853\t2.1848\t1.9063\t-1.9759\t2.9765\t-2.6155\n+-2.7658\t1.1415\t-1.5911\t-2.5974\t-4.2491\t-2.4650\t1.0467\n+-3.4687\t2.6724\t0.7399\t-2.5064\t-1.2477\t1.3807\t-2.8392\n+-4.1960\t1.4928\t1.5341\t3.2301\t1.6231\t0.5496\t-0.1931\n+-6.8974\t5.6555\t-0.4728\t-0.2712\t1.6418\t2.8355\t-1.2524\n+-5.4610\t5.7033\t1.6135\t-2.4726\t-0.3671\t0.4024\t2.5259\n+-3.6199\t0.4197\t-3.3176\t0.7440\t-0.9565\t-3.1444\t1.3050\n+-6.9755\t6.9895\t0.8969\t-1.2372\t2.7908\t1.0709\t2.8530\n+-2.4744\t-2.4406\t-1.2029\t1.7749\t-1.8143\t-2.5760\t-1.4119\n+-2.2670\t1.4244\t0.3150\t-0.3939\t-4.8261\t0.5653\t-1.5405\n+-1.2997\t1.8646\t1.6713\t0.9233\t-3.9371\t1.9971\t-2.1376\n+-2.9030\t2.7832\t2.8769\t-2.1591\t-0.9938\t-1.4089\t2.2805\n+-4.0193\t-0.7911\t-2.3511\t-2.6334\t0.8198\t-3.1752\t-1.7134\n+-4.6438\t5.1543\t2.9044\t-3.5749\t1.3863\t1.1820\t0.2145\n+-3.1379\t-0.3608\t-3.4466\t2.6176\t-2.5651\t-3.1252\t2.2431\n+-3.3334\t0.1605\t-2.3825\t-0.8492\t-0.2283\t-0.9548\t-3.0823\n+-1.8428\t-0.3384\t-0.2287\t1.0854\t-2.4548\t0.2911\t-3.1773\n+-5.7377\t2.9593\t-1.3065\t2.8244\t2.0424\t0.2718\t0.8933\n+-7.5818\t1.9024\t-2.0175\t1.7637\t5.6534\t-0.4313\t-0.7429\n+-6.7842\t3.2537\t-0.5172\t1.7928\t4.5232\t1.0479\t-0.2527\n+-2.6542\t4.2982\t-0.6507\t-0.7068\t-6.0759\t0.4216\t2.2052\n+-4.7066\t-0.0723\t0.5610\t2.7188\t3.2642\t-1.3372\t-0.2390\n+-3.1046\t-0.4176\t-2.2528\t3.1483\t-2.1742\t-0.7089\t-0.9199\n+-5.1411\t2.5961\t1.4787\t2.5253\t1.9749\t2.6935\t-2.1514\n+-3.0358\t-0.4673\t-3.0178\t-0.1183\t-2.0737\t-1.9430\t-1.7089\n+-4.8013\t3.3868\t2.2209\t-2.8917\t1.9785\t1.7562\t-2.4000\n+-4.8966\t1.5345\t-1.9107\t1.6206\t1.0586\t-2.4528\t2.2876\n+-6.4597\t2.5302\t-0.0772\t3.0385\t5.9297\t0.5882\t-0.3289\n+-5.9218\t3.7935\t-0.2556\t3.3005\t2.0236\t1.1245\t1.1257\n+-2.6684\t3.1373\t-0.3467\t-3.5191\t-5.6729\t-0.1143\t-0.0563\n+-6.6356\t5.9313\t2.3230\t-4.2617\t4.5651\t2.7706\t-2.2094\n+-5.6003\t4.9145\t-0.0899\t0.9846\t1.1772\t1.7796\t0.3640\n+-7.1334\t4.7398\t-0.3982\t-1.1846\t4.2620\t0.3247\t0.9009\n+-3.7706\t-0.4166\t-2.7645\t3.4809\t0.1927\t-2.6134\t1.7933\n+-2.5829\t3.6848\t2.0472\t0.9575\t-3.0212\t3.5198\t-2.3061\n+-5.9636\t4.7293\t1.7384\t-3.3395\t2.2166\t2.2397\t-1.9459\n+-5.6726\t0.3054\t0.5239\t-1.8098\t5.0980\t-2.8714\t0.6939\n+-1.6695\t0.2400\t-0.7679\t2.4374\t-3.4909\t1.2909\t-3.0332\n+-5.3286\t2.8034\t2.9264\t-1.9200\t3.2485\t-0.4062\t0.7878\n+-4.1365\t1.8401\t2.8304\t-2.6400\t1.8543\t-0.6305\t-0.9748\n+-7.3517\t2.9113\t-0.3355\t-1.6157\t4.9779\t-1.2069\t1.0193\n+-4.1967\t-0.9651\t-1.0766\t-0.8147\t1.4782\t-2.8124\t-0.7958\n+-3.3859\t1.1867\t0.7010\t1.8196\t0.6206\t1.2032\t-2.4485\n+-4.8094\t3.7086\t-1.5026\t-1.1737\t-0.4332\t-1.3418\t2.0650\n+-3.6897\t4.9698\t1.8855\t1.2100\t-2.0871\t1.2348\t3.0630\n+-4.6351\t1.0548\t0.6174\t-1.1733\t1.8384\t-1.7480\t-0.2812\n+-2.5884\t2.4750\t0.7398\t1.9273\t-3.9507\t1.3361\t0.0575\n+-4.9906\t4.0057\t3.2665\t-2.0939\t2.9379\t0.7270\t0.5404\n+-3.2473\t1.8445\t0.2434\t-0.5658\t-1.9457\t-2.2797\t2.6466\n+-2.3835\t3.6974\t0.5424\t0.1868\t-5.9273\t1.4366\t0.1764\n+-4.4944\t4.1873\t2.5387\t2.3093\t0.7909\t2.4807\t0.3451\n+-4.0672\t3.1357\t0.7866\t0.0835\t-2.6209\t-0.6464\t2.6246\n+-7.7257\t2.4335\t-0.7824\t4.0495\t6.3356\t-0.7156\t2.2593\n+-7.7345\t3.3520\t-3.0476\t3.4232\t5.3875\t-0.3854\t2.2324\n+-2.5709\t1.6372\t2.0053\t0.0627\t-1.1963\t-1.7860\t2.6362\n+-1.4913\t0.2742\t2.0702\t-1.8860\t-3.6079\t-3.4077\t2.9305\n+-3.8738\t0.4747\t-0.3085\t0.2335\t-0.4071\t0.0756\t-3.0254\n+-4.1544\t0.8081'..b'.6621\t-2.0543\n+2.4622\t-4.3923\t4.6318\t3.2286\t0.8290\t-1.3138\t-0.7092\n+-0.3768\t-3.3689\t0.9185\t3.4446\t-0.7548\t0.1917\t-2.8933\n+-1.5571\t-1.2636\t-4.4107\t-0.4740\t-0.5614\t-1.1182\t0.8865\n+0.7017\t-2.2599\t-3.0353\t-3.5220\t0.7490\t0.2524\t-1.4167\n+-2.0500\t-5.2108\t-0.5936\t2.8702\t0.0865\t1.7500\t1.8663\n+3.0137\t-5.0641\t3.4237\t3.1012\t0.4598\t0.6318\t2.2810\n+-1.5219\t-1.6394\t-4.8442\t2.7113\t-0.5852\t1.1910\t-1.9068\n+-2.3981\t-6.0576\t-2.8749\t-2.3686\t-1.0002\t2.5079\t0.2015\n+1.8921\t-6.3274\t-0.7450\t-2.7750\t0.8666\t2.2501\t-0.2021\n+1.5814\t-6.5836\t3.2901\t0.9566\t1.9993\t3.1253\t1.3281\n+-0.9830\t-5.8270\t-1.5955\t-1.7203\t0.1380\t2.4005\t-0.5153\n+0.2115\t-3.6162\t0.4305\t1.7197\t0.5603\t-0.5445\t-0.9444\n+-0.9981\t-5.0750\t-2.8310\t-3.8982\t-0.8326\t0.7471\t2.2110\n+1.8743\t-7.4837\t2.1129\t1.0955\t1.0676\t3.9861\t1.5601\n+-0.2173\t-3.4647\t3.5740\t-0.5041\t0.5984\t-1.8010\t-0.3018\n+-0.8708\t-3.0747\t-5.4424\t-0.2924\t-0.2948\t1.1786\t1.4711\n+-2.1813\t-4.1987\t-0.6857\t-2.5649\t-0.1683\t-1.1085\t0.0401\n+1.0261\t-4.8342\t-2.5095\t-3.2447\t0.3524\t0.5019\t2.4819\n+1.3312\t-4.7451\t1.9789\t-1.0770\t0.2929\t0.0568\t-1.0795\n+-0.4084\t-3.3231\t1.7444\t-2.2754\t0.1706\t-2.6516\t-2.3484\n+0.1271\t-3.6357\t5.1002\t-0.8028\t0.8378\t-1.9041\t-2.1182\n+-0.3108\t-2.5467\t0.0172\t0.5997\t-0.8420\t-2.4078\t0.2985\n+2.3229\t-7.7318\t0.5990\t1.5900\t0.6791\t4.9678\t0.7111\n+2.2490\t-5.2254\t2.0891\t-0.4591\t1.4099\t1.2195\t0.1653\n+0.8997\t-5.9620\t2.1464\t-3.1994\t0.2412\t0.6705\t-0.2757\n+1.6351\t-2.0544\t-3.3725\t2.7118\t-0.5164\t0.9859\t-0.7674\n+-0.3884\t-3.7377\t-2.0037\t-3.7865\t-0.8058\t-0.1159\t0.5169\n+-1.2854\t-0.6815\t-3.7704\t-1.2835\t-0.8277\t-2.8829\t0.1127\n+2.7532\t-3.6849\t1.9801\t-1.1850\t1.8530\t-1.4601\t0.8846\n+1.3367\t-3.5217\t-4.2259\t-1.7081\t0.0284\t0.6012\t-0.8891\n+0.2084\t-6.3242\t3.0772\t-2.9331\t0.7925\t1.0186\t-0.2763\n+0.4991\t-4.0107\t-3.9909\t-4.2000\t0.7601\t2.0399\t0.2795\n+2.0422\t-3.9227\t1.1896\t0.2482\t0.1053\t-0.3852\t-1.3664\n+1.3943\t-5.9124\t0.3596\t-2.3189\t1.3754\t1.5829\t1.1913\n+0.0278\t-3.4528\t-0.0648\t-0.0516\t0.5044\t-0.1313\t0.8862\n+-1.1295\t-4.1677\t-2.8210\t1.7549\t0.2425\t1.7050\t-0.0711\n+-0.8205\t-2.8645\t1.3685\t1.3576\t-0.0320\t-2.0074\t0.6672\n+-2.3925\t-5.9955\t-2.2835\t-4.1743\t-0.4680\t2.2192\t0.7221\n+2.4329\t-4.5049\t-0.5699\t-3.2655\t1.5086\t0.7033\t-0.3623\n+0.0935\t-5.5951\t-2.6666\t-0.0202\t0.4672\t3.1178\t0.7036\n+2.0608\t-3.6542\t2.5894\t0.5949\t0.1042\t-1.9610\t-2.1251\n+1.3668\t-4.3150\t1.5058\t-0.8470\t0.3603\t0.1046\t-1.2965\n+-0.7108\t-5.7729\t0.8471\t-2.1552\t-0.1474\t0.4989\t0.8110\n+2.0865\t-7.5741\t2.4725\t-3.2358\t1.9803\t3.5518\t1.0200\n+-0.7413\t-6.5902\t-3.3497\t-4.0118\t-0.7169\t2.8734\t-0.0154\n+1.9231\t-4.1669\t0.6324\t3.6388\t1.1516\t1.4119\t1.9045\n+0.9972\t-3.3835\t-3.8478\t0.4535\t-0.1122\t0.9351\t-0.6707\n+-1.4850\t-3.7556\t-0.8861\t2.0163\t-1.0464\t-0.5987\t0.6890\n+-0.6859\t-4.0281\t0.3683\t3.6759\t-1.1975\t0.3557\t-1.6469\n+-1.8927\t-1.7090\t-2.2045\t-2.1649\t-0.6862\t-2.7007\t-2.6949\n+-2.6846\t-3.5555\t-3.8040\t-2.2653\t-1.5920\t-1.2491\t0.6841\n+0.3020\t-5.6926\t2.2922\t-1.7143\t1.0921\t0.3534\t-0.3102\n+0.6378\t0.0965\t-3.4219\t1.3310\t0.4472\t-2.1190\t-0.2407\n+-1.9094\t-4.6694\t-5.0032\t-0.7066\t-0.5111\t2.2129\t0.1366\n+-0.5069\t-7.3869\t2.1287\t-3.7626\t-0.0722\t1.8540\t3.3739\n+-0.8111\t-2.1449\t-2.0660\t1.3755\t-1.4613\t-1.8106\t-0.2669\n+-2.1210\t-3.7767\t-1.4218\t-1.0966\t-0.4062\t0.3971\t-0.5842\n+-1.0161\t-5.3783\t-5.2064\t-4.0163\t-0.1682\t2.9910\t0.1047\n+-1.0107\t-5.1148\t-0.6561\t0.8090\t-1.0136\t0.7724\t2.2620\n+0.3727\t-5.6904\t3.2177\t3.2668\t1.1544\t0.6645\t2.2839\n+1.8337\t-4.6784\t1.9878\t1.5645\t0.4593\t-0.2257\t1.9580\n+-0.1967\t-4.0023\t-0.2283\t-1.0764\t0.7112\t-0.1925\t-1.3203\n+0.2941\t-5.0048\t-2.0272\t2.6678\t-0.5445\t2.7349\t1.9569\n+2.5823\t-7.5935\t2.0663\t0.1052\t1.8888\t3.5327\t2.8774\n+-2.5822\t-2.1414\t-2.1563\t3.1156\t-2.0672\t-0.7239\t-2.1677\n+-1.2405\t-3.6815\t-2.5594\t1.7052\t-0.7032\t1.3623\t1.5195\n+-2.8369\t-5.1205\t-4.5254\t-0.1156\t-0.6598\t4.1633\t0.1587\n+0.3149\t-6.2694\t-2.0587\t-0.3757\t0.1556\t3.2380\t2.4606\n+2.0357\t-3.8799\t4.1492\t4.3973\t-0.1287\t-0.6609\t-2.9551\n+1.4539\t-5.0928\t-1.1201\t1.5114\t1.4226\t3.0672\t1.1131\n+0.0900\t-4.8785\t0.4034\t1.7989\t1.0459\t2.5805\t-0.6281\n+0.0903\t-5.5698\t2.3322\t-1.8054\t-0.3871\t-1.4925\t1.4869\n+-3.2787\t-0.9351\t-3.4811\t2.8220\t-2.2307\t-1.9613\t-0.5324\n+-0.8901\t-3.3226\t-2.1998\t-3.3552\t-0.2246\t-0.9037\t0.3637\n'
b
diff -r 000000000000 -r af2624d5ab32 test-data/pca_input.dat
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/pca_input.dat Sat May 01 01:24:32 2021 +0000
b
b'@@ -0,0 +1,300 @@\n+9.579515262220434924e-01\t3.726315038026500881e-02\t1.192922318396479886e+00\t2.208390853256675612e+00\t1.545465138214503487e+00\t6.587709093437858598e+00\t2.676346016926679283e+00\t2.002623182927643519e+00\t1.960111203222609788e+00\t1.244000891424809074e-01\n+5.456295681254983432e-01\t3.130646354641937745e-01\t6.007098680445335681e+00\t3.644816120915847169e+00\t1.043692024581632793e+00\t1.970255599020762194e+00\t7.768759398827200791e-01\t6.435887304171391543e-01\t4.933655623778216537e+00\t5.543893932919085055e-01\n+9.341619128268041639e-02\t2.558875163847860179e-01\t3.752555601842988420e+00\t5.559464534602225783e+00\t1.010576057771932490e+00\t3.077425253345768663e+00\t6.807691255479467962e+00\t2.571623094724099445e+00\t3.153422029415700933e+00\t3.550137612924495478e-01\n+1.471186662367064413e-01\t4.878675188546680030e-01\t1.721359340060558907e+00\t4.167324499252111458e+00\t1.985766726491912326e+00\t5.316541920919662267e+00\t2.962330081397085202e+00\t6.305674360296442682e-02\t1.008632361074792705e+00\t1.954028793571292999e-01\n+4.517113041288302044e-01\t2.082857695479499172e-01\t2.670306166999409037e+00\t3.583860843014433861e+00\t3.162463432948562581e+00\t7.681858694197329029e+00\t1.942922465406477395e+00\t1.098113345375552274e+00\t3.412374063138972069e+00\t4.177369511025994520e-01\n+7.303789721527453871e-02\t1.033872090825091461e-01\t1.795276419298532655e+00\t5.719768891948289635e+00\t2.977579113876869243e+00\t7.844204854679616368e+00\t3.384744044988120759e+00\t2.998214211517472805e+00\t4.270749911114063657e+00\t4.257092154152972707e-01\n+6.359175475607969918e-02\t1.242103944058722265e-01\t4.586547825302377923e+00\t6.614372405184473891e+00\t1.923730124171568079e+00\t7.681919225138223339e+00\t2.793831391806054665e+00\t1.406817959154060160e+00\t4.716223047352296582e+00\t6.211888780251716424e-01\n+9.886982415066914998e-01\t4.584180816802013725e-02\t2.793304094637673707e+00\t1.871823089629541581e+00\t2.607924236602056745e+00\t9.206004712207825236e+00\t4.623226894276893928e-01\t2.966776892612695615e+00\t1.485603429563761679e+00\t4.654181765444357355e-01\n+1.741783097973695904e-01\t2.446343910749277373e-01\t2.534641383481000876e+00\t4.584056834431557093e+00\t2.890133078107056441e+00\t2.823965747627366518e+00\t3.034991597984873835e+00\t6.600338090755860643e-01\t3.752675891901568583e+00\t2.970984388064664694e-01\n+4.690204907875814566e-01\t2.929759132721354575e-02\t3.548495454913231484e+00\t4.482718753445549709e+00\t3.203674397180960920e+00\t8.065801814409903514e+00\t3.773297073513870004e+00\t2.113193009064737282e+00\t4.579511767735440664e+00\t4.024397631591818403e-01\n+6.941135206286030979e-03\t2.133769823942091026e-01\t6.135694255460425239e+00\t5.395845311332483352e-01\t1.504596129252289138e-01\t9.253872174123642935e-01\t4.483882842463830620e+00\t2.321382684831085008e+00\t2.576937740611303873e+00\t1.109827593740932983e-01\n+2.746007390787119640e-03\t6.638140727681796083e-02\t4.108407001279307247e-01\t3.212637467080699416e+00\t4.920639575099698959e-01\t4.018583101004429281e+00\t6.796866753550522056e+00\t1.316040638035593568e+00\t2.620935479341985896e+00\t2.976887894006831070e-01\n+3.557722260573822348e-01\t3.727028444011896702e-01\t3.811343797909652054e+00\t1.715026071489426762e+00\t5.294113011251582179e-01\t5.980858755297242979e+00\t5.404241675653790544e+00\t1.825392885196229997e+00\t2.835734218533411788e+00\t3.200816860194626301e-01\n+3.642510923301112147e-01\t7.309428690756680780e-03\t3.666945761684001326e+00\t2.430979500086832612e+00\t3.031996394197797429e+00\t2.708093900045675184e+00\t7.623775896209878944e-01\t7.865319376558289610e-01\t4.100162854521766320e+00\t8.307551984431076342e-01\n+9.927215581748555229e-01\t4.537144790675278760e-01\t5.145060290158723681e+00\t2.151991198713361086e+00\t4.862387339994040936e-01\t2.589672936803951053e+00\t3.398039583724480561e+00\t2.809787771375323651e+00\t8.864381880832911120e-01\t3.331137683083518208e-01\n+5.057807499542814611e-01\t1.402717130943404999e-01\t1.883175574051066725e+00\t4.122193241585520695e+00\t1.035989381539633492e+00\t5.670773882751129591e+00\t3.687442345139384958e+00\t1.684532121504235480e+00\t4.6421085696735'..b'1774e+00\t5.577399746499002831e+00\t3.951771200670569417e+00\t5.006730198987506819e-01\t3.596816929922009187e+00\t4.994925384211443831e-01\t4.413151350909869208e+00\t1.418601174288871114e+00\t4.839712146933547565e+00\t9.760601828982439221e-01\n+1.878213562249231749e+00\t4.684555617152866169e+00\t3.889299363513806895e+00\t2.336972704101780707e-02\t4.196904367361877775e+00\t1.636484945202322683e+00\t6.120210824336769662e+00\t1.961635110777701918e+00\t9.569470113865142125e+00\t2.306199899979174406e-01\n+3.431803129291879362e+00\t4.012460279071989433e+00\t3.786995576138590280e+00\t3.401138113223747261e-01\t3.171776225702972241e+00\t5.533791211625698203e-01\t4.380475363051796300e+00\t2.758179724465671612e+00\t8.238428864941681695e+00\t1.611340397700082860e-01\n+2.349694539920080150e+00\t3.425362999949447929e+00\t7.467879539582895632e-01\t1.309946362139290388e-01\t5.805916871698890702e-01\t1.657140208444111584e+00\t2.281415527558649625e+00\t1.158152241249966297e+00\t4.945420862933350925e+00\t7.787419149878671565e-02\n+2.871323242818877297e+00\t6.534521291113140506e+00\t3.096514924644943711e+00\t6.667111185354958058e-01\t4.581752374807475547e+00\t4.565652293776831794e-01\t6.933935392164753964e+00\t7.018283743141698894e-01\t4.265168364923495936e+00\t6.901545552296244335e-01\n+4.035690704229409675e+00\t8.932606707872219332e+00\t2.440117375879147676e+00\t1.749555891357814641e-01\t3.388836934377787546e+00\t1.096540733110390020e+00\t4.167898589768252116e+00\t1.510982178698893286e+00\t9.444730460473605760e+00\t1.685190174932924556e-01\n+1.895368854776446899e-01\t8.111771833719306768e-01\t1.496918109897968030e+00\t8.739513189132178672e-01\t3.271337369168346054e+00\t7.030233822128264531e-01\t6.401423581208871560e+00\t1.029249503237427366e+00\t2.009554833626514103e+00\t6.565754492988090663e-01\n+1.951751457247066224e+00\t5.044712160044664273e+00\t3.421432883480826970e+00\t8.109581787816579901e-01\t1.128731846702503816e+00\t1.209060606591956688e+00\t5.696978360178965417e+00\t1.330520244258292406e+00\t2.811362781860978100e+00\t4.875439923021647193e-01\n+7.946496095974958651e-01\t7.959850102243462011e+00\t8.886633323027690601e-01\t8.912678301211978127e-01\t1.506339073727302580e+00\t1.772468056111744072e+00\t5.209003007333696367e+00\t1.910008906957938679e+00\t6.469615179120823401e-01\t4.738444478905338153e-01\n+2.923037361500433029e+00\t8.570876002749072242e+00\t2.708321601975356430e+00\t3.282691449572501252e-01\t1.618103487581618039e+00\t7.910655433987572316e-01\t4.227704764846127006e+00\t1.475926172111246215e+00\t4.356475937759435091e+00\t6.465232347181339989e-01\n+2.522665440307211071e+00\t1.963912550509897548e-01\t1.722541801969912356e-02\t7.082969529076817983e-01\t4.976037482905154796e+00\t2.468844590379837278e-02\t6.706997326725380404e+00\t1.309674497144936556e+00\t9.860787708680341268e+00\t8.357747879146570913e-02\n+4.255818572678702338e+00\t6.744878797873571585e+00\t1.760033311394217925e+00\t3.676055726983046279e-01\t4.092141947481376718e+00\t1.612470654985766583e+00\t5.778498425542084149e+00\t1.079514954339773070e+00\t5.568696898336217060e+00\t8.545812171197980645e-01\n+2.446430878144840282e+00\t5.234567351584125561e+00\t5.715867278950135244e-01\t7.003532329827356628e-01\t4.568505639054738054e+00\t1.943174193938479233e+00\t5.764774523003604934e+00\t1.937713945979894881e+00\t6.298501537697791086e+00\t9.026276578818063223e-01\n+1.273163398740040364e+00\t4.162415703791141119e+00\t3.455865592048522394e+00\t3.656764736071405064e-02\t1.954721520949571190e+00\t2.734474567634836983e-01\t8.281588142887611470e-01\t4.484584054747960957e-01\t7.652386767723299954e+00\t6.708336181638869800e-01\n+3.272573882698126546e-01\t2.771858331744477821e-01\t3.650874191969583293e+00\t8.313793690203402642e-01\t4.721314279438468375e+00\t1.073305358961462286e+00\t5.882774132732964567e+00\t2.199287336285580263e+00\t3.798152586004966969e-01\t5.333129895506141249e-01\n+2.307814819672060480e+00\t4.104095493932964800e+00\t2.489576320635186413e+00\t4.496836765063628727e-01\t4.578631912903595946e+00\t1.329946843468508222e+00\t1.683934112573501896e-01\t7.002919109156724442e-02\t2.471193668340454508e+00\t5.351430311502515247e-01\n'
b
diff -r 000000000000 -r af2624d5ab32 test-data/pca_input_with_headers.dat
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/pca_input_with_headers.dat Sat May 01 01:24:32 2021 +0000
b
b'@@ -0,0 +1,301 @@\n+col_1\tcol_2\tcol_3\tcol_4\tcol_5\tcol_6\tcol_7\tcol_8\tcol_9\tcol_10\n+9.579515262220434924e-01\t3.726315038026500881e-02\t1.192922318396479886e+00\t2.208390853256675612e+00\t1.545465138214503487e+00\t6.587709093437858598e+00\t2.676346016926679283e+00\t2.002623182927643519e+00\t1.960111203222609788e+00\t1.244000891424809074e-01\n+5.456295681254983432e-01\t3.130646354641937745e-01\t6.007098680445335681e+00\t3.644816120915847169e+00\t1.043692024581632793e+00\t1.970255599020762194e+00\t7.768759398827200791e-01\t6.435887304171391543e-01\t4.933655623778216537e+00\t5.543893932919085055e-01\n+9.341619128268041639e-02\t2.558875163847860179e-01\t3.752555601842988420e+00\t5.559464534602225783e+00\t1.010576057771932490e+00\t3.077425253345768663e+00\t6.807691255479467962e+00\t2.571623094724099445e+00\t3.153422029415700933e+00\t3.550137612924495478e-01\n+1.471186662367064413e-01\t4.878675188546680030e-01\t1.721359340060558907e+00\t4.167324499252111458e+00\t1.985766726491912326e+00\t5.316541920919662267e+00\t2.962330081397085202e+00\t6.305674360296442682e-02\t1.008632361074792705e+00\t1.954028793571292999e-01\n+4.517113041288302044e-01\t2.082857695479499172e-01\t2.670306166999409037e+00\t3.583860843014433861e+00\t3.162463432948562581e+00\t7.681858694197329029e+00\t1.942922465406477395e+00\t1.098113345375552274e+00\t3.412374063138972069e+00\t4.177369511025994520e-01\n+7.303789721527453871e-02\t1.033872090825091461e-01\t1.795276419298532655e+00\t5.719768891948289635e+00\t2.977579113876869243e+00\t7.844204854679616368e+00\t3.384744044988120759e+00\t2.998214211517472805e+00\t4.270749911114063657e+00\t4.257092154152972707e-01\n+6.359175475607969918e-02\t1.242103944058722265e-01\t4.586547825302377923e+00\t6.614372405184473891e+00\t1.923730124171568079e+00\t7.681919225138223339e+00\t2.793831391806054665e+00\t1.406817959154060160e+00\t4.716223047352296582e+00\t6.211888780251716424e-01\n+9.886982415066914998e-01\t4.584180816802013725e-02\t2.793304094637673707e+00\t1.871823089629541581e+00\t2.607924236602056745e+00\t9.206004712207825236e+00\t4.623226894276893928e-01\t2.966776892612695615e+00\t1.485603429563761679e+00\t4.654181765444357355e-01\n+1.741783097973695904e-01\t2.446343910749277373e-01\t2.534641383481000876e+00\t4.584056834431557093e+00\t2.890133078107056441e+00\t2.823965747627366518e+00\t3.034991597984873835e+00\t6.600338090755860643e-01\t3.752675891901568583e+00\t2.970984388064664694e-01\n+4.690204907875814566e-01\t2.929759132721354575e-02\t3.548495454913231484e+00\t4.482718753445549709e+00\t3.203674397180960920e+00\t8.065801814409903514e+00\t3.773297073513870004e+00\t2.113193009064737282e+00\t4.579511767735440664e+00\t4.024397631591818403e-01\n+6.941135206286030979e-03\t2.133769823942091026e-01\t6.135694255460425239e+00\t5.395845311332483352e-01\t1.504596129252289138e-01\t9.253872174123642935e-01\t4.483882842463830620e+00\t2.321382684831085008e+00\t2.576937740611303873e+00\t1.109827593740932983e-01\n+2.746007390787119640e-03\t6.638140727681796083e-02\t4.108407001279307247e-01\t3.212637467080699416e+00\t4.920639575099698959e-01\t4.018583101004429281e+00\t6.796866753550522056e+00\t1.316040638035593568e+00\t2.620935479341985896e+00\t2.976887894006831070e-01\n+3.557722260573822348e-01\t3.727028444011896702e-01\t3.811343797909652054e+00\t1.715026071489426762e+00\t5.294113011251582179e-01\t5.980858755297242979e+00\t5.404241675653790544e+00\t1.825392885196229997e+00\t2.835734218533411788e+00\t3.200816860194626301e-01\n+3.642510923301112147e-01\t7.309428690756680780e-03\t3.666945761684001326e+00\t2.430979500086832612e+00\t3.031996394197797429e+00\t2.708093900045675184e+00\t7.623775896209878944e-01\t7.865319376558289610e-01\t4.100162854521766320e+00\t8.307551984431076342e-01\n+9.927215581748555229e-01\t4.537144790675278760e-01\t5.145060290158723681e+00\t2.151991198713361086e+00\t4.862387339994040936e-01\t2.589672936803951053e+00\t3.398039583724480561e+00\t2.809787771375323651e+00\t8.864381880832911120e-01\t3.331137683083518208e-01\n+5.057807499542814611e-01\t1.402717130943404999e-01\t1.883175574051066725e+00\t4.122193241585520695e+00\t1.035989381539633492e+00\t5.670773882751129591e+00\t3.6'..b'1774e+00\t5.577399746499002831e+00\t3.951771200670569417e+00\t5.006730198987506819e-01\t3.596816929922009187e+00\t4.994925384211443831e-01\t4.413151350909869208e+00\t1.418601174288871114e+00\t4.839712146933547565e+00\t9.760601828982439221e-01\n+1.878213562249231749e+00\t4.684555617152866169e+00\t3.889299363513806895e+00\t2.336972704101780707e-02\t4.196904367361877775e+00\t1.636484945202322683e+00\t6.120210824336769662e+00\t1.961635110777701918e+00\t9.569470113865142125e+00\t2.306199899979174406e-01\n+3.431803129291879362e+00\t4.012460279071989433e+00\t3.786995576138590280e+00\t3.401138113223747261e-01\t3.171776225702972241e+00\t5.533791211625698203e-01\t4.380475363051796300e+00\t2.758179724465671612e+00\t8.238428864941681695e+00\t1.611340397700082860e-01\n+2.349694539920080150e+00\t3.425362999949447929e+00\t7.467879539582895632e-01\t1.309946362139290388e-01\t5.805916871698890702e-01\t1.657140208444111584e+00\t2.281415527558649625e+00\t1.158152241249966297e+00\t4.945420862933350925e+00\t7.787419149878671565e-02\n+2.871323242818877297e+00\t6.534521291113140506e+00\t3.096514924644943711e+00\t6.667111185354958058e-01\t4.581752374807475547e+00\t4.565652293776831794e-01\t6.933935392164753964e+00\t7.018283743141698894e-01\t4.265168364923495936e+00\t6.901545552296244335e-01\n+4.035690704229409675e+00\t8.932606707872219332e+00\t2.440117375879147676e+00\t1.749555891357814641e-01\t3.388836934377787546e+00\t1.096540733110390020e+00\t4.167898589768252116e+00\t1.510982178698893286e+00\t9.444730460473605760e+00\t1.685190174932924556e-01\n+1.895368854776446899e-01\t8.111771833719306768e-01\t1.496918109897968030e+00\t8.739513189132178672e-01\t3.271337369168346054e+00\t7.030233822128264531e-01\t6.401423581208871560e+00\t1.029249503237427366e+00\t2.009554833626514103e+00\t6.565754492988090663e-01\n+1.951751457247066224e+00\t5.044712160044664273e+00\t3.421432883480826970e+00\t8.109581787816579901e-01\t1.128731846702503816e+00\t1.209060606591956688e+00\t5.696978360178965417e+00\t1.330520244258292406e+00\t2.811362781860978100e+00\t4.875439923021647193e-01\n+7.946496095974958651e-01\t7.959850102243462011e+00\t8.886633323027690601e-01\t8.912678301211978127e-01\t1.506339073727302580e+00\t1.772468056111744072e+00\t5.209003007333696367e+00\t1.910008906957938679e+00\t6.469615179120823401e-01\t4.738444478905338153e-01\n+2.923037361500433029e+00\t8.570876002749072242e+00\t2.708321601975356430e+00\t3.282691449572501252e-01\t1.618103487581618039e+00\t7.910655433987572316e-01\t4.227704764846127006e+00\t1.475926172111246215e+00\t4.356475937759435091e+00\t6.465232347181339989e-01\n+2.522665440307211071e+00\t1.963912550509897548e-01\t1.722541801969912356e-02\t7.082969529076817983e-01\t4.976037482905154796e+00\t2.468844590379837278e-02\t6.706997326725380404e+00\t1.309674497144936556e+00\t9.860787708680341268e+00\t8.357747879146570913e-02\n+4.255818572678702338e+00\t6.744878797873571585e+00\t1.760033311394217925e+00\t3.676055726983046279e-01\t4.092141947481376718e+00\t1.612470654985766583e+00\t5.778498425542084149e+00\t1.079514954339773070e+00\t5.568696898336217060e+00\t8.545812171197980645e-01\n+2.446430878144840282e+00\t5.234567351584125561e+00\t5.715867278950135244e-01\t7.003532329827356628e-01\t4.568505639054738054e+00\t1.943174193938479233e+00\t5.764774523003604934e+00\t1.937713945979894881e+00\t6.298501537697791086e+00\t9.026276578818063223e-01\n+1.273163398740040364e+00\t4.162415703791141119e+00\t3.455865592048522394e+00\t3.656764736071405064e-02\t1.954721520949571190e+00\t2.734474567634836983e-01\t8.281588142887611470e-01\t4.484584054747960957e-01\t7.652386767723299954e+00\t6.708336181638869800e-01\n+3.272573882698126546e-01\t2.771858331744477821e-01\t3.650874191969583293e+00\t8.313793690203402642e-01\t4.721314279438468375e+00\t1.073305358961462286e+00\t5.882774132732964567e+00\t2.199287336285580263e+00\t3.798152586004966969e-01\t5.333129895506141249e-01\n+2.307814819672060480e+00\t4.104095493932964800e+00\t2.489576320635186413e+00\t4.496836765063628727e-01\t4.578631912903595946e+00\t1.329946843468508222e+00\t1.683934112573501896e-01\t7.002919109156724442e-02\t2.471193668340454508e+00\t5.351430311502515247e-01\n'
b
diff -r 000000000000 -r af2624d5ab32 test-data/pickle_blacklist
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/pickle_blacklist Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,4 @@
+cos
+system
+(S'ls ~'
+tR.
\ No newline at end of file
b
diff -r 000000000000 -r af2624d5ab32 test-data/pipeline01
b
Binary file test-data/pipeline01 has changed
b
diff -r 000000000000 -r af2624d5ab32 test-data/pipeline02
b
Binary file test-data/pipeline02 has changed
b
diff -r 000000000000 -r af2624d5ab32 test-data/pipeline03
b
Binary file test-data/pipeline03 has changed
b
diff -r 000000000000 -r af2624d5ab32 test-data/pipeline04
b
Binary file test-data/pipeline04 has changed
b
diff -r 000000000000 -r af2624d5ab32 test-data/pipeline05
b
Binary file test-data/pipeline05 has changed
b
diff -r 000000000000 -r af2624d5ab32 test-data/pipeline06
b
Binary file test-data/pipeline06 has changed
b
diff -r 000000000000 -r af2624d5ab32 test-data/pipeline07
b
Binary file test-data/pipeline07 has changed
b
diff -r 000000000000 -r af2624d5ab32 test-data/pipeline08
b
Binary file test-data/pipeline08 has changed
b
diff -r 000000000000 -r af2624d5ab32 test-data/pipeline09
b
Binary file test-data/pipeline09 has changed
b
diff -r 000000000000 -r af2624d5ab32 test-data/pipeline10
b
Binary file test-data/pipeline10 has changed
b
diff -r 000000000000 -r af2624d5ab32 test-data/pipeline11
b
Binary file test-data/pipeline11 has changed
b
diff -r 000000000000 -r af2624d5ab32 test-data/pipeline12
b
Binary file test-data/pipeline12 has changed
b
diff -r 000000000000 -r af2624d5ab32 test-data/pipeline14
b
Binary file test-data/pipeline14 has changed
b
diff -r 000000000000 -r af2624d5ab32 test-data/pipeline15
b
Binary file test-data/pipeline15 has changed
b
diff -r 000000000000 -r af2624d5ab32 test-data/pipeline16
b
Binary file test-data/pipeline16 has changed
b
diff -r 000000000000 -r af2624d5ab32 test-data/pipeline17
b
Binary file test-data/pipeline17 has changed
b
diff -r 000000000000 -r af2624d5ab32 test-data/pipeline_params05.tabular
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/pipeline_params05.tabular Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,18 @@
+ Parameter Value
+@ bootstrap bootstrap: True
+@ criterion criterion: 'mse'
+@ max_depth max_depth: None
+@ max_features max_features: 'auto'
+@ max_leaf_nodes max_leaf_nodes: None
+@ min_impurity_decrease min_impurity_decrease: 0.0
+@ min_impurity_split min_impurity_split: None
+@ min_samples_leaf min_samples_leaf: 1
+@ min_samples_split min_samples_split: 2
+@ min_weight_fraction_leaf min_weight_fraction_leaf: 0.0
+@ n_estimators n_estimators: 100
+* n_jobs n_jobs: 1
+@ oob_score oob_score: False
+@ random_state random_state: 42
+@ verbose verbose: 0
+@ warm_start warm_start: False
+ Note: @, params eligible for search in searchcv tool.
b
diff -r 000000000000 -r af2624d5ab32 test-data/pipeline_params18
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/pipeline_params18 Sat May 01 01:24:32 2021 +0000
[
@@ -0,0 +1,89 @@
+ Parameter Value
+* memory memory: None
+@ powertransformer powertransformer: PowerTransformer(copy=True, method='yeo-johnson', standardize=True)
+* steps "steps: [('powertransformer', PowerTransformer(copy=True, method='yeo-johnson', standardize=True)), ('transformedtargetregressor', TransformedTargetRegressor(check_inverse=True, func=None, inverse_func=None,
+                           regressor=RandomForestRegressor(bootstrap=True,
+                                                           criterion='mse',
+                                                           max_depth=None,
+                                                           max_features='auto',
+                                                           max_leaf_nodes=None,
+                                                           min_impurity_decrease=0.0,
+                                                           min_impurity_split=None,
+                                                           min_samples_leaf=1,
+                                                           min_samples_split=2,
+                                                           min_weight_fraction_leaf=0.0,
+                                                           n_estimators='warn',
+                                                           n_jobs=1,
+                                                           oob_score=False,
+                                                           random_state=10,
+                                                           verbose=0,
+                                                           warm_start=False),
+                           transformer=QuantileTransformer(copy=True,
+                                                           ignore_implicit_zeros=False,
+                                                           n_quantiles=1000,
+                                                           output_distribution='uniform',
+                                                           random_state=10,
+                                                           subsample=100000)))]"
+@ transformedtargetregressor "transformedtargetregressor: TransformedTargetRegressor(check_inverse=True, func=None, inverse_func=None,
+                           regressor=RandomForestRegressor(bootstrap=True,
+                                                           criterion='mse',
+                                                           max_depth=None,
+                                                           max_features='auto',
+                                                           max_leaf_nodes=None,
+                                                           min_impurity_decrease=0.0,
+                                                           min_impurity_split=None,
+                                                           min_samples_leaf=1,
+                                                           min_samples_split=2,
+                                                           min_weight_fraction_leaf=0.0,
+                                                           n_estimators='warn',
+                                                           n_jobs=1,
+                                                           oob_score=False,
+                                                           random_state=10,
+                                                           verbose=0,
+                                                           warm_start=False),
+                           transformer=QuantileTransformer(copy=True,
+                                                           ignore_implicit_zeros=False,
+                                                           n_quantiles=1000,
+                                                           output_distribution='uniform',
+                                                           random_state=10,
+                                                           subsample=100000))"
+@ verbose verbose: False
+@ powertransformer__copy powertransformer__copy: True
+@ powertransformer__method powertransformer__method: 'yeo-johnson'
+@ powertransformer__standardize powertransformer__standardize: True
+@ transformedtargetregressor__check_inverse transformedtargetregressor__check_inverse: True
+@ transformedtargetregressor__func transformedtargetregressor__func: None
+@ transformedtargetregressor__inverse_func transformedtargetregressor__inverse_func: None
+@ transformedtargetregressor__regressor "transformedtargetregressor__regressor: RandomForestRegressor(bootstrap=True, criterion='mse', max_depth=None,
+                      max_features='auto', max_leaf_nodes=None,
+                      min_impurity_decrease=0.0, min_impurity_split=None,
+                      min_samples_leaf=1, min_samples_split=2,
+                      min_weight_fraction_leaf=0.0, n_estimators='warn',
+                      n_jobs=1, oob_score=False, random_state=10, verbose=0,
+                      warm_start=False)"
+@ transformedtargetregressor__regressor__bootstrap transformedtargetregressor__regressor__bootstrap: True
+@ transformedtargetregressor__regressor__criterion transformedtargetregressor__regressor__criterion: 'mse'
+@ transformedtargetregressor__regressor__max_depth transformedtargetregressor__regressor__max_depth: None
+@ transformedtargetregressor__regressor__max_features transformedtargetregressor__regressor__max_features: 'auto'
+@ transformedtargetregressor__regressor__max_leaf_nodes transformedtargetregressor__regressor__max_leaf_nodes: None
+@ transformedtargetregressor__regressor__min_impurity_decrease transformedtargetregressor__regressor__min_impurity_decrease: 0.0
+@ transformedtargetregressor__regressor__min_impurity_split transformedtargetregressor__regressor__min_impurity_split: None
+@ transformedtargetregressor__regressor__min_samples_leaf transformedtargetregressor__regressor__min_samples_leaf: 1
+@ transformedtargetregressor__regressor__min_samples_split transformedtargetregressor__regressor__min_samples_split: 2
+@ transformedtargetregressor__regressor__min_weight_fraction_leaf transformedtargetregressor__regressor__min_weight_fraction_leaf: 0.0
+@ transformedtargetregressor__regressor__n_estimators transformedtargetregressor__regressor__n_estimators: 'warn'
+* transformedtargetregressor__regressor__n_jobs transformedtargetregressor__regressor__n_jobs: 1
+@ transformedtargetregressor__regressor__oob_score transformedtargetregressor__regressor__oob_score: False
+@ transformedtargetregressor__regressor__random_state transformedtargetregressor__regressor__random_state: 10
+@ transformedtargetregressor__regressor__verbose transformedtargetregressor__regressor__verbose: 0
+@ transformedtargetregressor__regressor__warm_start transformedtargetregressor__regressor__warm_start: False
+@ transformedtargetregressor__transformer "transformedtargetregressor__transformer: QuantileTransformer(copy=True, ignore_implicit_zeros=False, n_quantiles=1000,
+                    output_distribution='uniform', random_state=10,
+                    subsample=100000)"
+@ transformedtargetregressor__transformer__copy transformedtargetregressor__transformer__copy: True
+@ transformedtargetregressor__transformer__ignore_implicit_zeros transformedtargetregressor__transformer__ignore_implicit_zeros: False
+@ transformedtargetregressor__transformer__n_quantiles transformedtargetregressor__transformer__n_quantiles: 1000
+@ transformedtargetregressor__transformer__output_distribution transformedtargetregressor__transformer__output_distribution: 'uniform'
+@ transformedtargetregressor__transformer__random_state transformedtargetregressor__transformer__random_state: 10
+@ transformedtargetregressor__transformer__subsample transformedtargetregressor__transformer__subsample: 100000
+ Note: @, params eligible for search in searchcv tool.
b
diff -r 000000000000 -r af2624d5ab32 test-data/precision_recall_curve.txt
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/precision_recall_curve.txt Sat May 01 01:24:32 2021 +0000
[
@@ -0,0 +1,2 @@
+precision_recall_curve : 
+(array([1., 1.]), array([1., 0.]), array([1]))
b
diff -r 000000000000 -r af2624d5ab32 test-data/precision_recall_fscore_support.txt
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/precision_recall_fscore_support.txt Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,2 @@
+precision_recall_fscore_support : 
+(0.8461538461538461, 0.8461538461538461, 0.8461538461538461, None)
b
diff -r 000000000000 -r af2624d5ab32 test-data/precision_score.txt
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/precision_score.txt Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,2 @@
+precision_score : 
+0.8461538461538461
b
diff -r 000000000000 -r af2624d5ab32 test-data/predicted_header.tabular
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/predicted_header.tabular Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,35 @@
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
b
diff -r 000000000000 -r af2624d5ab32 test-data/prp_model01
b
Binary file test-data/prp_model01 has changed
b
diff -r 000000000000 -r af2624d5ab32 test-data/prp_model02
b
Binary file test-data/prp_model02 has changed
b
diff -r 000000000000 -r af2624d5ab32 test-data/prp_model03
b
Binary file test-data/prp_model03 has changed
b
diff -r 000000000000 -r af2624d5ab32 test-data/prp_model04
b
Binary file test-data/prp_model04 has changed
b
diff -r 000000000000 -r af2624d5ab32 test-data/prp_model05
b
Binary file test-data/prp_model05 has changed
b
diff -r 000000000000 -r af2624d5ab32 test-data/prp_model06
b
Binary file test-data/prp_model06 has changed
b
diff -r 000000000000 -r af2624d5ab32 test-data/prp_model07
b
Binary file test-data/prp_model07 has changed
b
diff -r 000000000000 -r af2624d5ab32 test-data/prp_model08
b
Binary file test-data/prp_model08 has changed
b
diff -r 000000000000 -r af2624d5ab32 test-data/prp_model09
b
Binary file test-data/prp_model09 has changed
b
diff -r 000000000000 -r af2624d5ab32 test-data/prp_result01
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/prp_result01 Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,15 @@
+0.34079224150348947 -0.3921227933085925 0.2490507280911941 -0.7698156258582727 -0.1701382209728926
+-0.8620757555306106 -0.19048588419219253 0.24710543975009408 0.7422931346186274 -0.6790706051909926
+-0.44857543757211044 0.19920312300180737 -0.812112096739406 0.2785593090771274 0.04069143168750737
+1.3342816328356895 1.6641608262566074 -3.000113357933606 -0.6701123839490727 -0.07045038775469255
+0.7615267260378895 0.9176274108888074 -1.954493327131406 -0.5675301168878727 0.10063563654750733
+0.3517077819346894 0.6351202511326074 -1.518915029366606 -0.30971697444707263 0.09957030020130735
+-1.1546995581165105 -0.5289323469785927 0.7279548225941941 0.8261855855227276 -0.6127421735668926
+-0.17683671467671042 -1.5830256329757926 1.8352445249339941 -1.0553955128494728 0.23777966502290743
+-0.04589044764567053 0.4089694362054475 -1.1558632189207658 -0.02446696726223259 0.07501752707814739
+-2.322599763463111 -1.5464662131621925 2.233148890877594 1.4052188634961276 -0.5115354482934926
+0.3359621667503495 -0.16218071845273258 -0.03556840603494589 -0.5958346262657126 -0.28461208654203257
+0.09817425011268949 -0.29803272230839256 0.18230400872239416 -0.42567750847007263 -0.2990016986016926
+0.6939725287059254 -0.046625817910626616 -0.25306728129413986 -0.9172273915573068 -0.2192857084889266
+-1.8560091420543106 -0.8903352997473926 0.8320084501263939 1.0765172991949272 0.09558502193530742
+0.7235684795430894 -0.41357463008399253 0.19661484068979412 -1.2196980959976726 -0.029144264696292624
b
diff -r 000000000000 -r af2624d5ab32 test-data/prp_result02
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/prp_result02 Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,15 @@
+0.5507843815769634 0.3509713593582132 0.5024983733118504 0.21181277111109376 0.0
+0.3861806964013367 0.5069547456108511 0.6109599535763046 0.8290928000628373 0.0
+0.5507375738755746 0.6540163740150353 0.4443100403766963 0.7730482551190299 1.0
+1.0 1.0 0.0 0.517244227590485 1.0
+0.8235586181451755 0.7985651943678985 0.18709221814790866 0.4963213476362478 1.0
+0.7266009913523925 0.7367833962232062 0.2805049676108317 0.5753897601225946 1.0
+0.30103611027291544 0.41809900797558924 0.6996399175984136 0.8344573213929083 0.0
+0.3312417925943893 0.0 0.7545711939364796 0.0 0.0
+0.6381134490835961 0.6925288168071413 0.36342661590035497 0.6700118165314028 1.0
+0.0 0.17251430929709788 1.0 0.9803983325686505 0.0
+0.5767922296995018 0.42657716609772306 0.4660985815769355 0.29991460317209145 0.0
+0.5238014571892052 0.39991387603944323 0.5157872357238816 0.3562801111416092 0.0
+0.6462177807916674 0.4376032758632245 0.4055927537907609 0.18180023195970593 0.0
+0.2038689924106734 0.40279192286335813 0.7842991022590049 1.0 1.0
+0.6081358906411253 0.3153114383337088 0.4611172283355056 0.03134330438976468 0.0
b
diff -r 000000000000 -r af2624d5ab32 test-data/prp_result03
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/prp_result03 Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,15 @@
+1.0 -0.409899987374 -0.649450145317 0.510268556953 -0.229110484125 0.0 0.16801799964920539 0.26620960636548074 -0.2091590750523839 0.09391238455008853 -0.0 0.4217854912522724 -0.33139398846382173 0.14879583720862946 -0.0 0.260374000214897 -0.11690787611726697 0.0 0.05249161393599188 -0.0 0.0
+1.0 -1.10383560019 0.0611191480175 1.0172556528299999 1.79193066057 0.0 1.2184530322468177 -0.06746549143499857 -1.1228830040882731 -1.9779968562091492 -0.0 0.003735550254385074 0.06217379881695535 0.10952127528047438 0.0 1.0348090632145892 1.8228515939442282 0.0 3.2110154922908367 0.0 0.0
+1.0 -0.41009731910999997 0.7310461183329999 0.238276079462 1.60843479815 1.0 0.16817981114120914 -0.29980005327413506 -0.09771638139540752 -0.659614798684549 -0.41009731910999997 0.5344284271297466 0.17419080298230055 1.1758400157792797 0.7310461183329999 0.05677549004378134 0.38325153777343535 0.238276079462 2.5870624998998313 1.60843479815 1.0
+1.0 1.48390157074 2.30714564103 -1.83858336229 0.7709049245659999 1.0 2.2019638716446392 3.423577040650361 -2.7282767392385616 1.1439470284546884 1.48390157074 5.322921008923729 -4.241879589977655 1.7785899363610076 2.30714564103 3.3803887800896018 -1.417372968214475 -1.83858336229 0.59429440272011 0.7709049245659999 1.0
+1.0 0.74006063964 1.38952620136 -0.96404935579 0.702401167325 1.0 0.547689750344366 1.028333649375021 -0.7134549828904773 0.5198194571744222 0.74006063964 1.9307830642659514 -1.3395718392744338 0.9760048258639371 1.38952620136 0.929391160399114 -0.6771493928658102 -0.96404935579 0.49336739985952266 0.702401167325 1.0
+1.0 0.331307031883 1.10808437795 -0.527405721679 0.961279646112 1.0 0.10976434937512317 0.3671161463345349 -0.17473322424758106 0.3184787063629073 0.331307031883 1.2278509886568385 -0.5844100410339456 1.0651789586980116 1.10808437795 0.27815679525974685 -0.5069843854930332 -0.527405721679 0.924058558029212 0.961279646112 1.0
+1.0 -1.4627878344 -0.34365574639300006 1.43177660405 1.8094946798500002 0.0 2.139748248468642 0.5026954450453321 -2.0943853979828857 -2.646906804096103 -0.0 0.11809927202892997 -0.49203825753283764 -0.6218432447980146 -0.0 2.0499842439049503 2.5907921477621754 0.0 3.274270996405455 0.0 0.0
+1.0 -1.33544682955 -2.24827087098 1.6885444678000001 -0.922608257112 0.0 1.7834182345551466 3.0024462066198576 -2.254961356077702 1.2320942718768715 -0.0 5.054721909297167 -3.7963053413091665 2.074273269790536 -0.0 2.851182419737986 -1.5578650684930677 0.0 0.8512059960912424 -0.0 0.0
+1.0 -0.041738424574199996 0.906486336146 -0.13980113811 1.27108242642 1.0 0.001742096085936182 -0.03783531156877273 0.005835079258391552 -0.05305297798272229 -0.041738424574199996 0.821717477619399 -0.12672782147437484 1.1522188516650336 0.906486336146 0.019544358216851295 -0.17769876984513633 -0.13980113811 1.6156505347537549 1.27108242642 1.0
+1.0 -2.7318947650200003 -1.46239633785 2.83576394706 2.28732123255 0.0 7.463249007143682 3.9951128997568346 -7.747008681805666 -6.24872090112244 -0.0 2.1386030489570915 -4.147010811187605 -3.344970193967668 -0.0 8.04155716344531 6.486303086610132 0.0 5.231838420874052 0.0 0.0
+1.0 -0.300256196558 -0.305034204892 0.340123288396 0.0593443810367 0.0 0.09015378357147634 0.0915884101809656 -0.1021241249345827 -0.017818518137168244 -0.0 0.09304586615409464 -0.10374923684112626 -0.01810206608433767 -0.0 0.11568385130930857 0.020184406026027626 0.0 0.0035217555606290385 0.0 0.0
+1.0 -0.523654501136 -0.42649659668799994 0.5723853152130001 0.24389111089200002 0.0 0.274214036559993 0.22333686257485638 -0.29973214669543563 -0.1277146780056551 -0.0 0.18189934698644647 -0.2441203889325326 -0.1040187287578936 -0.0 0.3276249490714854 0.1395996903855662 0.0 0.05948287397213385 0.0 0.0
+1.0 -0.007572212655529999 -0.254805682403 0.0572980350837 -0.327374762308 0.0 5.733840450056868e-05 0.0019294428129929542 -0.00043387290639779506 0.002478951318249763 -0.0 0.0649259357848585 -0.014599864929853214 0.08341694971140987 -0.0 0.003283064824452916 -0.018757930616241734 0.0 0.1071742349962195 -0.0 0.0
+1.0 -1.87242461384 -0.413385894664 1.8275030360799998 2.35149919802 1.0 3.5059739345138734 0.7740339241831431 -3.421861666623521 -4.403004977797668 -1.87242461384 0.1708878979071557 -0.755463977571107 -0.9720765997751761 -0.413385894664 3.339767346881617 4.297371923721235 1.8275030360799998 5.529548478288703 2.35149919802 1.0
+1.0 -0.16811770561099998 -0.811895938369 0.316838713275 -0.819986910541 0.0 0.028263562939906853 0.13649408235348612 -0.053266197524534487 0.1378543180312052 -0.0 0.659175014740079 -0.25724006442603264 0.6657440421839824 -0.0 0.10038677022975767 -0.25980359763815297 0.0 0.672378533458574 -0.0 0.0
b
diff -r 000000000000 -r af2624d5ab32 test-data/prp_result04
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/prp_result04 Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,15 @@
+0.0 -0.25385559680817016 0.13442061387070464 -0.5602120769938709 0.0
+-0.5807061112525813 0.2698773982744695 0.5349578561360192 0.571982134735025 0.0
+-0.00016513310878258202 0.7636545174678359 -0.0804627978317235 0.4691871204655464 1.0
+1.584789882498885 1.9253361878040125 -1.7212531850763018 0.0 1.0
+0.9623215057330502 1.248994581161877 -1.0303412425843197 -0.038376040801309956 1.0
+0.6202642404230927 1.0415547572084232 -0.6853777543973235 0.1066485748494791 1.0
+-0.881088095119412 -0.028466436412001278 0.862443663986116 0.5818215588435884 0.0
+-0.7745253270992509 -1.4322841823191093 1.0652991072215634 -0.9487119185155306 0.0
+0.30808862594408043 0.8929646798898123 -0.37915680271103425 0.2802011596461483 1.0
+-1.9431147973567746 -0.8530466232854528 1.97164195151228 0.8495016397748227 0.0
+0.09175320910447847 0.0 0.0 -0.3986186678055577 0.0
+-0.0951931852237634 -0.08952520583418162 0.18349498924288923 -0.2952349539785941 0.0
+0.33667993570408733 0.03702149075186114 -0.22344167716683067 -0.6152600641516485 0.0
+-1.223884424953702 -0.07986181719203675 1.1750811552867684 0.8854543571237001 1.0
+0.20233065722424093 -0.37358807403702804 -0.01839561515890641 -0.8912230866367292 0.0
b
diff -r 000000000000 -r af2624d5ab32 test-data/prp_result05
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/prp_result05 Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,7 @@
+%%MatrixMarket matrix coordinate real general
+%
+3 3 4
+1 1 1.000000000000000e+00
+2 3 1.000000000000000e+00
+3 1 1.000000000000000e+00
+3 3 1.000000000000000e+00
b
diff -r 000000000000 -r af2624d5ab32 test-data/prp_result06
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/prp_result06 Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,9 @@
+%%MatrixMarket matrix coordinate real general
+%
+3 3 6
+1 1 1.000000000000000e+00
+3 1 4.000000000000000e-02
+3 2 -5.000000000000000e+00
+1 3 -2.000000000000000e-01
+2 3 1.100000000000000e+01
+3 3 2.600000000000000e+00
b
diff -r 000000000000 -r af2624d5ab32 test-data/prp_result07
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/prp_result07 Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,15 @@
+0.10866513901130055 -0.5565683482001781 0.01697338750768846 -0.9589623671667038 -0.816496580927726
+-0.5710995365177258 0.07926796585473102 0.46542360804755395 0.9797654572160418 -0.816496580927726
+0.10847183700890116 0.6787364476581768 -0.2236147606088382 0.803743046654752 1.224744871391589
+1.9637931622636124 2.0890722453009336 -2.0606794162148767 0.00032423752699077795 1.224744871391589
+1.2351422905746392 1.2679619500798842 -1.2871193566513779 -0.06538948660301952 1.224744871391589
+0.8347358862676002 1.0161203164819261 -0.9008907216292501 0.18294534382616373 1.224744871391589
+-0.922721566734639 -0.28293538193724904 0.8320838514832234 0.9966141260199964 -0.816496580927726
+-0.7979810068833711 -1.9872356829362412 1.059205224122999 -1.6242152405020795 -0.816496580927726
+0.4693084330819043 0.8357250235474191 -0.5580390743243027 0.48013042183945476 1.224744871391589
+-2.1659119218220786 -1.284014236214121 2.073966413639728 1.4549796745789692 -0.816496580927726
+0.2160698816290759 -0.2483757987671466 -0.1335268595966537 -0.6822557426452339 -0.816496580927726
+-0.0027663810163240663 -0.35706357942460004 0.07191812706310458 -0.5052252645629531 -0.816496580927726
+0.5027769329398427 -0.20342998011241972 -0.3836970281346616 -1.053224520491157 -0.816496580927726
+-1.3239931073762934 -0.34533177433843787 1.182119596299028 1.5165437885484256 1.224744871391589
+0.3455099575735564 -0.7019291669926769 -0.15412299100336474 -1.5257734742396478 -0.816496580927726
b
diff -r 000000000000 -r af2624d5ab32 test-data/prp_result08
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/prp_result08 Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,9 @@
+%%MatrixMarket matrix coordinate real general
+%
+3 3 6
+1 1 1.000000000000000e+00
+1 3 -1.818181818181818e-02
+2 3 1.000000000000000e+00
+3 1 4.000000000000000e-02
+3 2 -1.000000000000000e+00
+3 3 2.363636363636364e-01
b
diff -r 000000000000 -r af2624d5ab32 test-data/prp_result09
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/prp_result09 Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,9 @@
+%%MatrixMarket matrix coordinate real general
+%
+3 3 6
+1 1 8.333333333333334e-01
+1 3 -1.666666666666667e-01
+2 3 1.000000000000000e+00
+3 1 5.235602094240837e-03
+3 2 -6.544502617801047e-01
+3 3 3.403141361256544e-01
b
diff -r 000000000000 -r af2624d5ab32 test-data/prp_result10
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/prp_result10 Sat May 01 01:24:32 2021 +0000
b
b'@@ -0,0 +1,262 @@\n+year\tmonth\tday\ttemp_2\ttemp_1\taverage\tforecast_noaa\tforecast_acc\tforecast_under\tfriend\tweek_Fri\tweek_Mon\tweek_Sat\tweek_Sun\tweek_Thurs\tweek_Tues\tweek_Wed\n+-1.0\t0.4545454545454546\t0.19999999999999996\t0.22222222222222188\t-0.17073170731707288\t0.5232198142414863\t0.33333333333333304\t0.6000000000000001\t0.5428571428571427\t0.791044776119403\t-1.0\t1.0\t-1.0\t-1.0\t-1.0\t-1.0\t-1.0\n+-1.0\t-0.4545454545454546\t-0.1333333333333333\t-0.07407407407407396\t-0.41463414634146334\t-0.195046439628483\t-0.11111111111111116\t-0.02857142857142847\t-0.20000000000000018\t0.13432835820895517\t-1.0\t-1.0\t-1.0\t-1.0\t1.0\t-1.0\t-1.0\n+-1.0\t0.09090909090909083\t0.9333333333333333\t0.8518518518518516\t0.29268292682926855\t0.9938080495356032\t0.8888888888888884\t0.8857142857142857\t0.8857142857142852\t0.25373134328358193\t-1.0\t-1.0\t1.0\t-1.0\t-1.0\t-1.0\t-1.0\n+-1.0\t-0.2727272727272727\t-0.06666666666666665\t0.7407407407407405\t-0.26829268292682906\t0.21362229102167207\t0.22222222222222232\t0.31428571428571406\t0.1428571428571428\t-0.10447761194029859\t-1.0\t-1.0\t-1.0\t1.0\t-1.0\t-1.0\t-1.0\n+-1.0\t-1.0\t0.1333333333333333\t-0.2962962962962963\t-0.6341463414634145\t-0.8513931888544892\t-0.8333333333333335\t-0.8857142857142857\t-0.7142857142857144\t-0.10447761194029859\t-1.0\t1.0\t-1.0\t-1.0\t-1.0\t-1.0\t-1.0\n+-1.0\t-1.0\t0.6000000000000001\t-0.5185185185185186\t-0.6097560975609755\t-0.8080495356037152\t-0.7777777777777777\t-0.7142857142857144\t-0.7142857142857144\t0.04477611940298498\t-1.0\t1.0\t-1.0\t-1.0\t-1.0\t-1.0\t-1.0\n+-1.0\t0.8181818181818181\t0.6000000000000001\t-0.4814814814814816\t-0.5853658536585364\t-0.7832817337461302\t-0.7777777777777777\t-0.657142857142857\t-0.8285714285714287\t-0.6119402985074627\t1.0\t-1.0\t-1.0\t-1.0\t-1.0\t-1.0\t-1.0\n+-1.0\t0.09090909090909083\t0.2666666666666666\t0.40740740740740744\t0.048780487804878314\t0.956656346749226\t0.8888888888888884\t0.8285714285714287\t0.8857142857142852\t0.13432835820895517\t-1.0\t-1.0\t-1.0\t-1.0\t-1.0\t-1.0\t1.0\n+-1.0\t0.9999999999999998\t0.06666666666666665\t-0.8518518518518519\t-0.9999999999999999\t-0.9938080495356036\t-0.8888888888888888\t-0.9428571428571431\t-0.8857142857142857\t-0.7014925373134329\t-1.0\t-1.0\t1.0\t-1.0\t-1.0\t-1.0\t-1.0\n+-1.0\t0.9999999999999998\t-0.5333333333333333\t-0.7407407407407409\t-0.8780487804878048\t-0.9380804953560373\t-0.7777777777777777\t-0.7142857142857144\t-0.8285714285714287\t-0.7611940298507462\t-1.0\t-1.0\t-1.0\t-1.0\t1.0\t-1.0\t-1.0\n+-1.0\t0.9999999999999998\t0.8\t-0.7407407407407409\t-0.7073170731707314\t-0.9876160990712077\t-1.0\t-0.8285714285714287\t-1.0\t-0.10447761194029859\t-1.0\t-1.0\t-1.0\t-1.0\t-1.0\t-1.0\t1.0\n+-1.0\t0.09090909090909083\t0.06666666666666665\t0.5185185185185186\t-0.09756097560975596\t0.9318885448916405\t0.9444444444444446\t0.8285714285714287\t0.8857142857142852\t0.791044776119403\t-1.0\t-1.0\t-1.0\t1.0\t-1.0\t-1.0\t-1.0\n+-1.0\t0.09090909090909083\t-0.6\t0.2592592592592591\t2.220446049250313e-16\t0.8142414860681115\t0.7777777777777777\t0.7714285714285709\t0.7142857142857144\t0.31343283582089554\t-1.0\t-1.0\t-1.0\t-1.0\t1.0\t-1.0\t-1.0\n+-1.0\t0.9999999999999998\t-0.06666666666666665\t-0.8148148148148149\t-0.9024390243902437\t-0.9876160990712077\t-0.7777777777777777\t-0.8285714285714287\t-0.8285714285714287\t-0.4626865671641791\t-1.0\t-1.0\t-1.0\t-1.0\t1.0\t-1.0\t-1.0\n+-1.0\t-0.09090909090909105\t0.7333333333333334\t0.33333333333333304\t0.048780487804878314\t0.6780185758513935\t0.6111111111111112\t0.6000000000000001\t0.5999999999999996\t0.6716417910447763\t-1.0\t1.0\t-1.0\t-1.0\t-1.0\t-1.0\t-1.0\n+-1.0\t-0.2727272727272727\t0.9999999999999998\t0.07407407407407396\t-0.12195121951219501\t0.3746130030959747\t0.22222222222222232\t0.4857142857142853\t0.37142857142857144\t0.7014925373134326\t-1.0\t-1.0\t-1.0\t-1.0\t-1.0\t1.0\t-1.0\n+-1.0\t-1.0\t0.2666666666666666\t-0.2962962962962963\t-0.6829268292682924\t-0.8390092879256965\t-0.8333333333333335\t-0.657142857142857\t-0.7142857142857144\t-0.014925373134328401\t-1.0\t-1.0\t-1.0\t-1.0\t-1.0\t-1.0\t1.0\n+-1.0\t0.2727272727272727\t-0.4\t0.40740740740740744\t-0.09756097560975596\t0.9752321981424141\t1.0\t0.8285714285714287\t0.8857142857142852\t0.19402985074626877\t-1.0\t-1.0\t-1.0\t-1.0\t-1.0\t-1.0\t1.0\n+-1.0\t-0.6363636363636365\t0.4666666666666'..b'444444444442\t0.024390243902439268\t0.888544891640866\t0.833333333333333\t0.8285714285714287\t0.8285714285714283\t-0.16417910447761197\t-1.0\t-1.0\t-1.0\t-1.0\t-1.0\t-1.0\t1.0\n+-1.0\t0.8181818181818181\t-0.6\t-0.14814814814814836\t-0.36585365853658525\t-0.51702786377709\t-0.44444444444444464\t-0.4285714285714288\t-0.6000000000000001\t-0.791044776119403\t-1.0\t1.0\t-1.0\t-1.0\t-1.0\t-1.0\t-1.0\n+-1.0\t0.6363636363636365\t-1.0\t0.14814814814814792\t-0.21951219512195097\t0.2507739938080493\t0.2777777777777777\t0.37142857142857144\t0.1428571428571428\t-0.22388059701492535\t-1.0\t-1.0\t1.0\t-1.0\t-1.0\t-1.0\t-1.0\n+-1.0\t0.8181818181818181\t0.3999999999999999\t-0.2592592592592595\t-0.5365853658536583\t-0.7399380804953566\t-0.7222222222222223\t-0.5428571428571431\t-0.7142857142857144\t-0.10447761194029859\t-1.0\t-1.0\t-1.0\t-1.0\t-1.0\t1.0\t-1.0\n+-1.0\t-0.09090909090909105\t-1.0\t0.33333333333333304\t0.07317073170731736\t0.3808049535603719\t0.33333333333333304\t0.31428571428571406\t0.2571428571428571\t-0.10447761194029859\t-1.0\t-1.0\t-1.0\t-1.0\t-1.0\t-1.0\t1.0\n+-1.0\t-0.2727272727272727\t-0.19999999999999996\t0.7037037037037037\t0.024390243902439268\t0.1888544891640862\t0.22222222222222232\t0.19999999999999973\t0.2571428571428571\t0.16417910447761197\t1.0\t-1.0\t-1.0\t-1.0\t-1.0\t-1.0\t-1.0\n+-1.0\t-0.09090909090909105\t-0.8666666666666667\t0.4814814814814814\t-0.12195121951219501\t0.3993808049535601\t0.2777777777777777\t0.4285714285714284\t0.2571428571428571\t-0.19402985074626855\t1.0\t-1.0\t-1.0\t-1.0\t-1.0\t-1.0\t-1.0\n+-1.0\t-0.4545454545454546\t-0.2666666666666666\t-0.11111111111111116\t-0.43902439024390216\t-0.21981424148606798\t-0.2777777777777777\t-0.2571428571428571\t-0.2571428571428571\t-0.014925373134328401\t-1.0\t-1.0\t-1.0\t-1.0\t-1.0\t1.0\t-1.0\n+-1.0\t-0.6363636363636365\t0.9999999999999998\t0.07407407407407396\t-0.19512195121951192\t-0.3312693498452015\t-0.22222222222222232\t-0.2571428571428571\t-0.3142857142857145\t-0.16417910447761197\t-1.0\t-1.0\t-1.0\t-1.0\t1.0\t-1.0\t-1.0\n+-1.0\t0.9999999999999998\t-0.1333333333333333\t-0.7037037037037037\t-0.8780487804878048\t-0.9814241486068114\t-0.7777777777777777\t-0.8857142857142857\t-0.9428571428571431\t-0.3731343283582089\t-1.0\t-1.0\t-1.0\t-1.0\t-1.0\t-1.0\t1.0\n+-1.0\t0.2727272727272727\t-0.7333333333333334\t0.4814814814814814\t0.09756097560975618\t0.9938080495356032\t0.8888888888888884\t0.9999999999999996\t0.9428571428571431\t0.28358208955223874\t1.0\t-1.0\t-1.0\t-1.0\t-1.0\t-1.0\t-1.0\n+-1.0\t-0.2727272727272727\t-0.8\t0.9259259259259256\t-0.04878048780487787\t0.06501547987616041\t0.0\t0.08571428571428541\t0.1428571428571428\t-0.014925373134328401\t-1.0\t-1.0\t-1.0\t-1.0\t-1.0\t-1.0\t1.0\n+-1.0\t0.9999999999999998\t0.9999999999999998\t-0.5185185185185186\t-0.4634146341463412\t-0.975232198142415\t-0.9444444444444446\t-0.8857142857142857\t-0.8285714285714287\t-0.13432835820895517\t-1.0\t-1.0\t1.0\t-1.0\t-1.0\t-1.0\t-1.0\n+-1.0\t-1.0\t0.33333333333333326\t-0.5185185185185186\t-0.5853658536585364\t-0.8328173374613006\t-0.8888888888888888\t-0.7142857142857144\t-0.8857142857142857\t-0.13432835820895517\t-1.0\t-1.0\t-1.0\t-1.0\t1.0\t-1.0\t-1.0\n+-1.0\t0.09090909090909083\t-0.4\t0.4444444444444442\t-0.12195121951219501\t0.8575851393188851\t0.6666666666666665\t0.7714285714285709\t0.8285714285714283\t0.9999999999999998\t-1.0\t-1.0\t-1.0\t1.0\t-1.0\t-1.0\t-1.0\n+-1.0\t-0.6363636363636365\t-0.06666666666666665\t-0.2962962962962963\t-0.6585365853658536\t-0.47368421052631593\t-0.5555555555555558\t-0.3142857142857145\t-0.5428571428571427\t0.25373134328358193\t-1.0\t-1.0\t-1.0\t-1.0\t-1.0\t1.0\t-1.0\n+-1.0\t-0.4545454545454546\t0.19999999999999996\t0.5555555555555554\t0.3170731707317076\t-0.13931888544891669\t0.0\t-0.02857142857142847\t-0.1428571428571428\t-0.014925373134328401\t-1.0\t-1.0\t-1.0\t-1.0\t-1.0\t1.0\t-1.0\n+-1.0\t0.6363636363636365\t-0.1333333333333333\t0.14814814814814792\t-0.3902439024390243\t-0.06501547987616085\t-0.16666666666666696\t0.02857142857142847\t-0.08571428571428585\t0.4925373134328359\t1.0\t-1.0\t-1.0\t-1.0\t-1.0\t-1.0\t-1.0\n+-1.0\t-0.4545454545454546\t-0.06666666666666665\t-0.11111111111111116\t-0.41463414634146334\t-0.18266253869969074\t-0.0555555555555558\t-0.1428571428571428\t-0.08571428571428585\t-0.6417910447761195\t1.0\t-1.0\t-1.0\t-1.0\t-1.0\t-1.0\t-1.0\n'
b
diff -r 000000000000 -r af2624d5ab32 test-data/pw_metric01.tabular
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/pw_metric01.tabular Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,4 @@
+3.0431466614809506e-10 1.0 0.0014178061201292206 0.04636609716221582 0.012120163495312785 0.012120163495312785 0.03966478547536481 4.837152686522704e-11
+0.00827235898637926 0.0014178061201292193 1.0 0.5030530725911153 0.005949415154775898 0.005949415154775898 0.001821364614043494 1.4472984886595985e-15
+0.0001805433897597471 0.04636609716221582 0.5030530725911155 1.0 0.05154646069476933 0.05154646069476933 0.032127855194777344 6.217339473667583e-13
+1.9087117205849074e-06 0.012120163495312775 0.005949415154775898 0.05154646069476933 1.0 1.0 0.6882765785347926 7.171478371468866e-07
b
diff -r 000000000000 -r af2624d5ab32 test-data/pw_metric02.tabular
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/pw_metric02.tabular Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,4 @@
+0.0 6.991989327202 4.700302055636 5.583279679695999
+6.991989327202 0.0 2.2916872715660004 5.558713150412
+4.700302055636 2.2916872715660004 0.0 4.078323200938
+5.583279679695999 5.558713150412 4.078323200938 0.0
b
diff -r 000000000000 -r af2624d5ab32 test-data/pw_metric03.tabular
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/pw_metric03.tabular Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,4 @@
+0.0 0.7801459919993865 0.69641542739614 0.649889281728111
+0.7801459919993865 0.0 0.7727193167666271 0.7669511761085644
+0.69641542739614 0.7727193167666271 0.0 0.6761972684325525
+0.649889281728111 0.7669511761085644 0.6761972684325525 0.0
b
diff -r 000000000000 -r af2624d5ab32 test-data/qda_model01
b
Binary file test-data/qda_model01 has changed
b
diff -r 000000000000 -r af2624d5ab32 test-data/qda_prediction_result01.tabular
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/qda_prediction_result01.tabular Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,5 @@
+3.68258022948 2.82110345641 -3.990140724 -1.9523364774 0
+0.015942057224 -0.7119585943469999 0.125502976978 -0.972218263337 0
+2.0869076882499997 0.929399321468 -2.1292408448400004 -1.9971402218799998 0
+1.4132105208399999 0.523750660422 -1.4210539291 -1.49298569451 0
+0.7683140439399999 1.38267855169 -0.989045048734 0.649504257894 0
b
diff -r 000000000000 -r af2624d5ab32 test-data/ranking_.tabular
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/ranking_.tabular Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,18 @@
+ranking_
+17
+7
+4
+5
+2
+1
+9
+6
+8
+3
+10
+15
+14
+11
+13
+12
+16
b
diff -r 000000000000 -r af2624d5ab32 test-data/recall_score.txt
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/recall_score.txt Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,2 @@
+recall_score : 
+0.8461538461538461
b
diff -r 000000000000 -r af2624d5ab32 test-data/regression.txt
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/regression.txt Sat May 01 01:24:32 2021 +0000
b
b'@@ -0,0 +1,101 @@\n+0\t1\t2\t3\t4\t5\t6\t7\t8\t9\t10\t11\t12\t13\t14\t15\t16\t17\t18\t19\t20\t21\t22\t23\t24\t25\t26\t27\t28\t29\t30\t31\t32\t33\t34\t35\t36\t37\t38\t39\t40\t41\t42\t43\t44\t45\t46\t47\t48\t49\t50\t51\t52\t53\t54\t55\t56\t57\t58\t59\t60\t61\t62\t63\t64\t65\t66\t67\t68\t69\t70\t71\t72\t73\t74\t75\t76\t77\t78\t79\t80\t81\t82\t83\t84\t85\t86\t87\t88\t89\t90\t91\t92\t93\t94\t95\t96\t97\t98\t99\t0\n+0.04046644371002\t-0.32221869168671\t-1.94327028847757\t0.14089076168331\t0.50797298196983\t-1.18817519027507\t-0.55837864669810\t-1.47620399282446\t-0.24067909072797\t-0.06358029107758\t0.80929510932574\t0.35754611398417\t-0.35463528505326\t-0.01380073878907\t-0.08714823571983\t-0.79453654989307\t0.04252844430409\t1.18480579060701\t-0.34427022696108\t1.81291648390238\t1.93725134258407\t-0.23132382095439\t0.47212052875794\t-1.08604982624795\t-0.22514835388271\t-0.26881282015314\t-2.61290454860335\t-1.40689902631265\t-0.68522413053158\t0.30338447890179\t0.23198841547816\t1.07743876868911\t0.16518761240476\t-1.00475952205489\t-0.44900112900871\t0.06253225964953\t0.50965618717347\t0.34198041660066\t0.20063772499325\t-0.88958780218396\t0.10327990808768\t-0.40064406812253\t0.99727968716504\t0.30650582998706\t1.82568154541772\t-0.41737003120446\t0.44026752537403\t-0.28046691405830\t0.29390729185835\t-0.25861302105802\t1.61354083357410\t-0.21018097777042\t0.73272699046602\t0.33128542535218\t0.28400681429313\t-1.08781339197842\t-1.56515214507716\t0.11108254244934\t-1.05834110708324\t2.71590566117543\t-0.46709306413711\t-0.34890072684346\t-1.25513636804015\t-0.38706157159995\t-2.08983065701298\t-0.05398563142234\t-1.69582718701414\t0.11871279626400\t0.11536367037547\t0.14893574125345\t-0.75025315028281\t2.96755797814605\t0.14570961888674\t-1.16629293291132\t-1.11326773952565\t0.31364953071627\t0.10971268259129\t-1.08261212881083\t1.51674154694999\t1.74111073387411\t1.65143588667220\t0.59294661121932\t1.83067100732278\t-1.17600259748250\t0.50010668566359\t-0.65702939966286\t-0.00009313425995\t0.06338028324231\t2.21679138485467\t-0.04565557453581\t0.95729262004143\t1.62993492561400\t0.37185201934035\t-0.25222024644962\t0.23738912982157\t0.81546373244439\t-0.85286175333815\t0.96037198372300\t-0.30907239676257\t-0.56066669259864\t-6.00832134693808\n+-0.71424406804035\t0.28458137387342\t-0.30625448987067\t-0.17448841397393\t1.12578361592423\t-1.80584080194791\t-1.06785276227400\t0.81398817572329\t0.73661869553748\t-1.49629927138158\t-1.56717360361501\t-1.12018456782121\t0.10177233182574\t-2.45504661423104\t-0.40554207157953\t-1.05963908925750\t-0.37210757142254\t0.40749776579757\t0.68553648625535\t-0.39884590002055\t-0.94459829604073\t-2.10119710498413\t0.38679160339170\t-1.41816962334346\t-0.51923835176096\t0.97898426074191\t0.40508582774259\t-0.77652811271980\t1.10175760640748\t-0.79228035470404\t0.61001162860313\t-0.33062637671512\t-0.32042678881724\t-0.72672493847467\t-0.18435061102909\t-2.64629329629355\t0.23852982942963\t-1.26888176101983\t1.45228797948679\t-1.44631600120499\t-0.03944246509191\t0.83664527833708\t-0.32080614910167\t1.73265246953955\t-0.43674374808587\t0.00064259510634\t-0.54908314046260\t0.44138872930676\t-0.43306138863878\t-0.50788915323440\t-0.71562529742484\t0.24730882154570\t2.34264998959277\t1.39731257913356\t0.18452040005385\t1.28537324525823\t1.97667702755659\t-1.00696330920959\t1.11581898903754\t-0.49351437757812\t1.03520399433437\t2.01005968546991\t-0.07145737231421\t-0.05824150571174\t-0.05500426148539\t-0.18496077328512\t0.63825358857687\t-0.79762195021188\t1.90659647973074\t-2.18232577234497\t-1.22200333720390\t0.59956850953643\t0.53073647163920\t0.92313325091088\t0.54634108574228\t1.23753050621481\t-0.80531409504059\t1.40053754921282\t-0.82885164729651\t-1.41231683902822\t-0.77506619672441\t0.78710407214462\t-0.11413232110539\t-1.58966521342893\t1.82079148764931\t0.88747900665656\t-1.14489712094299\t-0.67489966013144\t-0.40151386976975\t1.26230720670704\t-1.04186780413216\t1.44434627688843\t-1.39500110713296\t-0.96634203108665\t-0.02576081685939\t0.42214550402037\t0.02827639221332\t0.67481973806360\t-0.06302357453907\t-0.90665121506621\t-119.53727328767538\n+0.10887198989791\t0.23784462192362\t1.54160517451341\t-0.18949583082318\t1.02973268513335\t-0.54443916167246\t-1.44321699522534\t0.50780959049232\t-0'..b'317523741\t0.24107892253465\t0.24350121915887\t-0.27274824801804\t0.54381104462477\t0.51820246808255\t-0.86009434942919\t-1.21423044904284\t0.36792051934856\t1.94082103916430\t2.11497218011132\t-0.43432765000649\t0.84999093815312\t1.83997148290589\t-2.18970055774918\t0.83007009623657\t-0.36828110384226\t0.32488842221473\t-0.40800572079705\t1.32393049920405\t0.32899969445643\t-0.28744255330986\t-0.60348124002864\t-2.04249975403047\t-0.12214358859249\t-1.61254032348568\t334.59256835042021\n+0.59156531213735\t-0.11033751258127\t-0.51236076329509\t-0.02454130061915\t0.27734607491290\t0.58725946809788\t-0.70971120747053\t-0.60061928108883\t-0.45186439475169\t0.36767347630475\t-0.28352190645161\t2.22550996123369\t-1.19436828147964\t1.89224039000434\t1.01762849591326\t1.00772158295772\t1.64615065381323\t-1.52145268077060\t-0.03805465490844\t0.64006527712100\t1.11018655266838\t1.72123119765181\t-0.96688703561691\t0.50951459531196\t-0.62580619481323\t1.65406935887170\t-0.27590718483285\t-0.59168211787820\t1.04792553943564\t-1.44913283833913\t-1.71554047709632\t0.92937971799566\t0.45187539696618\t1.56973338225729\t0.09924302883050\t-1.43045835132095\t-1.77900521980859\t0.97878159675935\t0.45962084160257\t0.00203998931280\t0.67057138105928\t0.13284630812698\t0.47422403034583\t-0.35687701161565\t0.90670081209711\t-1.35109741007581\t1.35258835724488\t0.72577510861552\t-0.09572608603917\t1.02184266769816\t-0.88361389539080\t-0.94127337729540\t1.59477698865504\t1.02092398022724\t0.09230308711426\t-0.04862094515233\t0.21076381406530\t1.64185343672488\t-0.24434612500460\t0.35034932531788\t0.75172685734187\t0.41889930681095\t-1.19270821234849\t0.56363994003780\t1.05623566225127\t1.09453422287246\t-1.03407151900200\t-0.04100745586156\t-1.49164829714866\t-0.58664552154587\t0.42604107028870\t-1.86180235120416\t-0.49850298818036\t0.14073031758719\t-1.18336380112792\t-0.71721011226228\t-0.01462447874399\t-2.21756850024936\t0.68942651065361\t1.51410447510684\t-1.25650641629701\t0.67624979937259\t1.64272679099395\t-0.59274406249763\t2.66282719183090\t-1.58227678600233\t-0.05242307406752\t0.96243709558340\t-0.33997777557902\t0.57406693541952\t-1.36830195639258\t0.11957236699383\t0.90590645568978\t1.21752159269684\t1.02860799311460\t0.89057952995516\t0.02075411792998\t1.76027271828188\t0.98122091065469\t0.03053194235190\t-58.56926535911965\n+0.11442909702019\t-0.60013542716991\t-1.17498506051978\t0.37789812105231\t0.80426397769103\t-0.25412543454522\t0.19100846967688\t0.05793637913866\t-0.57265676903169\t0.67137704493643\t1.00486767335942\t1.08035585383206\t-0.58595322636766\t0.31773863810775\t-1.48177365409568\t0.86636864807494\t-0.68610025158762\t0.98118972532235\t0.01499274526023\t-0.96048079613058\t-1.42376242684708\t-1.41447921097575\t-1.07641241372230\t-0.53471921590327\t0.63968104716280\t0.00003821688661\t-1.64789645160895\t-0.47946793783441\t-0.58027590555339\t-0.35626565190656\t-0.35395058607792\t1.22971874563225\t-1.11134507414587\t-1.94996110855379\t-0.18462590495313\t-1.08253549941625\t0.28175986241297\t1.43139435246322\t-0.21681273301629\t2.03318107641510\t-0.85554039248279\t0.80865738804815\t-0.81274796855477\t0.96225703674330\t0.83971775809643\t0.16367264651409\t0.37612382180038\t-1.20540534803405\t-0.39646871176150\t0.50440678609316\t-2.12269357911018\t-1.73337919698402\t0.66146222578848\t-1.25318872693810\t-1.73345228013854\t-0.63842960648510\t-0.52108483212612\t-1.07578377352847\t-0.17170592241337\t-1.58621109170536\t0.90224730254889\t0.10062624722110\t0.73537091959573\t-0.47506469682613\t-0.64652941101725\t0.95479548044025\t-2.06126245571583\t-0.89892744374809\t-0.64543661765593\t1.56589257557317\t0.05620965608259\t0.18979697580970\t0.21927974168967\t1.08315275138023\t1.43153004847297\t-0.27009563032084\t-1.13690656369851\t1.80239042546146\t-0.76721517469843\t-1.13280035299273\t0.20345737220567\t1.40956378493415\t-0.31306670625260\t0.01704629098668\t1.72791506643712\t-1.12339337581082\t-2.57092069804582\t-0.82500883083613\t1.87542985267408\t0.31904409765191\t-0.74511306613439\t0.73266290512627\t1.27807444106703\t0.97324132419371\t0.97757166179023\t0.33609045136699\t-0.42708309660138\t0.82163782590776\t-0.64757790004240\t0.73025258429468\t-40.12207652891863\n'
b
diff -r 000000000000 -r af2624d5ab32 test-data/regression_X.tabular
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/regression_X.tabular Sat May 01 01:24:32 2021 +0000
b
b'@@ -0,0 +1,262 @@\n+year\tmonth\tday\ttemp_2\ttemp_1\taverage\tforecast_noaa\tforecast_acc\tforecast_under\tfriend\tweek_Fri\tweek_Mon\tweek_Sat\tweek_Sun\tweek_Thurs\tweek_Tues\tweek_Wed\n+2016\t9\t19\t68\t69\t69.7\t65\t74\t71\t88\t0\t1\t0\t0\t0\t0\t0\n+2016\t4\t14\t60\t59\t58.1\t57\t63\t58\t66\t0\t0\t0\t0\t1\t0\t0\n+2016\t7\t30\t85\t88\t77.3\t75\t79\t77\t70\t0\t0\t1\t0\t0\t0\t0\n+2016\t5\t15\t82\t65\t64.7\t63\t69\t64\t58\t0\t0\t0\t1\t0\t0\t0\n+2016\t1\t18\t54\t50\t47.5\t44\t48\t49\t58\t0\t1\t0\t0\t0\t0\t0\n+2016\t1\t25\t48\t51\t48.2\t45\t51\t49\t63\t0\t1\t0\t0\t0\t0\t0\n+2016\t11\t25\t49\t52\t48.6\t45\t52\t47\t41\t1\t0\t0\t0\t0\t0\t0\n+2016\t7\t20\t73\t78\t76.7\t75\t78\t77\t66\t0\t0\t0\t0\t0\t0\t1\n+2016\t12\t17\t39\t35\t45.2\t43\t47\t46\t38\t0\t0\t1\t0\t0\t0\t0\n+2016\t12\t8\t42\t40\t46.1\t45\t51\t47\t36\t0\t0\t0\t0\t1\t0\t0\n+2016\t12\t28\t42\t47\t45.3\t41\t49\t44\t58\t0\t0\t0\t0\t0\t0\t1\n+2016\t7\t17\t76\t72\t76.3\t76\t78\t77\t88\t0\t0\t0\t1\t0\t0\t0\n+2016\t7\t7\t69\t76\t74.4\t73\t77\t74\t72\t0\t0\t0\t0\t1\t0\t0\n+2016\t12\t15\t40\t39\t45.3\t45\t49\t47\t46\t0\t0\t0\t0\t1\t0\t0\n+2016\t6\t27\t71\t78\t72.2\t70\t74\t72\t84\t0\t1\t0\t0\t0\t0\t0\n+2016\t5\t31\t64\t71\t67.3\t63\t72\t68\t85\t0\t0\t0\t0\t0\t1\t0\n+2016\t1\t20\t54\t48\t47.7\t44\t52\t49\t61\t0\t0\t0\t0\t0\t0\t1\n+2016\t8\t10\t73\t72\t77.0\t77\t78\t77\t68\t0\t0\t0\t0\t0\t0\t1\n+2016\t3\t23\t56\t57\t54.7\t50\t58\t55\t70\t0\t0\t0\t0\t0\t0\t1\n+2016\t12\t24\t45\t40\t45.1\t44\t47\t46\t39\t0\t0\t1\t0\t0\t0\t0\n+2016\t1\t19\t50\t54\t47.6\t47\t49\t48\t53\t0\t0\t0\t0\t0\t1\t0\n+2016\t11\t6\t65\t58\t53.2\t52\t57\t55\t71\t0\t0\t0\t1\t0\t0\t0\n+2016\t4\t17\t60\t68\t58.6\t58\t62\t59\t54\t0\t0\t0\t1\t0\t0\t0\n+2016\t10\t29\t60\t65\t55.3\t55\t59\t55\t65\t0\t0\t1\t0\t0\t0\t0\n+2016\t2\t1\t48\t47\t48.8\t46\t49\t49\t51\t0\t1\t0\t0\t0\t0\t0\n+2016\t12\t12\t44\t44\t45.6\t43\t50\t45\t42\t0\t1\t0\t0\t0\t0\t0\n+2016\t5\t30\t64\t64\t67.1\t64\t70\t66\t69\t0\t1\t0\t0\t0\t0\t0\n+2016\t10\t23\t59\t62\t57.1\t57\t58\t59\t67\t0\t0\t0\t1\t0\t0\t0\n+2016\t9\t30\t68\t66\t65.7\t64\t67\t65\t74\t1\t0\t0\t0\t0\t0\t0\n+2016\t9\t12\t77\t70\t71.8\t67\t73\t73\t90\t0\t1\t0\t0\t0\t0\t0\n+2016\t11\t2\t59\t57\t54.2\t54\t58\t55\t70\t0\t0\t0\t0\t0\t0\t1\n+2016\t11\t17\t55\t50\t50.5\t46\t51\t50\t57\t0\t0\t0\t0\t1\t0\t0\n+2016\t3\t3\t58\t55\t51.8\t49\t54\t50\t71\t0\t0\t0\t0\t1\t0\t0\n+2016\t11\t21\t57\t55\t49.5\t46\t51\t49\t67\t0\t1\t0\t0\t0\t0\t0\n+2016\t12\t27\t42\t42\t45.2\t41\t50\t47\t47\t0\t0\t0\t0\t0\t1\t0\n+2016\t4\t24\t64\t65\t60.1\t57\t61\t60\t41\t0\t0\t0\t1\t0\t0\t0\n+2016\t5\t20\t64\t63\t65.6\t63\t70\t64\t73\t1\t0\t0\t0\t0\t0\t0\n+2016\t1\t16\t49\t48\t47.3\t45\t52\t46\t28\t0\t0\t1\t0\t0\t0\t0\n+2016\t12\t7\t40\t42\t46.3\t44\t51\t46\t62\t0\t0\t0\t0\t0\t0\t1\n+2016\t1\t7\t44\t51\t46.2\t45\t49\t46\t38\t0\t0\t0\t0\t1\t0\t0\n+2016\t9\t24\t67\t64\t68.0\t65\t71\t66\t64\t0\t0\t1\t0\t0\t0\t0\n+2016\t8\t30\t79\t75\t74.6\t74\t76\t75\t63\t0\t0\t0\t0\t0\t1\t0\n+2016\t1\t11\t50\t52\t46.7\t42\t48\t48\t39\t0\t1\t0\t0\t0\t0\t0\n+2016\t6\t9\t85\t67\t68.6\t66\t73\t69\t80\t0\t0\t0\t0\t1\t0\t0\n+2016\t9\t22\t67\t68\t68.7\t65\t70\t69\t56\t0\t0\t0\t0\t1\t0\t0\n+2016\t3\t25\t53\t54\t55.0\t53\t57\t57\t42\t1\t0\t0\t0\t0\t0\t0\n+2016\t10\t24\t62\t62\t56.8\t52\t61\t57\t70\t0\t1\t0\t0\t0\t0\t0\n+2016\t7\t16\t77\t76\t76.1\t76\t78\t75\t61\t0\t0\t1\t0\t0\t0\t0\n+2016\t7\t1\t74\t73\t73.1\t71\t75\t72\t93\t1\t0\t0\t0\t0\t0\t0\n+2016\t11\t18\t50\t52\t50.3\t50\t53\t50\t35\t1\t0\t0\t0\t0\t0\t0\n+2016\t9\t3\t75\t70\t73.9\t71\t75\t73\t68\t0\t0\t1\t0\t0\t0\t0\n+2016\t8\t2\t73\t77\t77.4\t75\t80\t79\t62\t0\t0\t0\t0\t0\t1\t0\n+2016\t4\t5\t69\t60\t56.6\t52\t58\t56\t72\t0\t0\t0\t0\t0\t1\t0\n+2016\t3\t13\t55\t52\t53.3\t50\t55\t53\t54\t0\t0\t0\t1\t0\t0\t0\n+2016\t8\t28\t81\t79\t75.0\t71\t77\t76\t85\t0\t0\t0\t1\t0\t0\t0\n+2016\t4\t9\t77\t76\t57.2\t53\t61\t57\t74\t0\t0\t1\t0\t0\t0\t0\n+2016\t5\t26\t66\t66\t66.5\t64\t70\t65\t85\t0\t0\t0\t0\t1\t0\t0\n+2016\t10\t10\t68\t57\t61.8\t58\t64\t61\t62\t0\t1\t0\t0\t0\t0\t0\n+2016\t4\t10\t76\t66\t57.4\t57\t60\t57\t60\t0\t0\t0\t1\t0\t0\t0\n+2016\t10\t19\t60\t61\t58.4\t58\t60\t57\t41\t0\t0\t0\t0\t0\t0\t1\n+2016\t3\t12\t56\t55\t53.1\t52\t58\t53\t65\t0\t0\t1\t0\t0\t0\t0\n+2016\t1\t24\t57\t48\t48.1\t46\t50\t48\t54\t0\t0\t0\t1\t0\t0\t0\n+2016\t2\t7\t53\t49\t49.2\t46\t51\t48\t63\t0\t0\t0\t1\t0\t0\t0\n+2016\t5\t27\t66\t65\t66.7\t64\t67\t68\t73\t1\t0\t0\t0\t0\t0\t0\n+2016\t5\t5\t74\t60\t62.5\t58\t66\t62\t56\t0\t0\t0\t0\t1\t0\t0\n+2016\t3\t11\t55\t56\t53.0\t53\t53\t51\t36\t1\t0\t0\t0\t0\t0\t0\n+2016\t10\t22\t62\t59\t57.4\t56\t59\t58\t44\t0\t0\t1\t0\t0\t0\t0\n+2016\t12\t11\t36\t44\t45.7\t41\t46\t47\t35\t0\t0\t0\t1\t0\t0\t0\n+2016\t5\t8\t77\t82\t63.2\t62\t65\t63\t83\t0\t0\t0\t1\t0\t0\t0\n+2016\t5\t29\t64\t64\t67.0\t65\t71\t65\t76\t0\t0\t0\t1\t0\t0\t0\n+2016\t12\t13\t44\t43\t45.5\t41\t47\t46\t46\t0\t0\t0\t0\t0\t1\t0\n+2016\t3\t30\t56\t64\t55.7\t51\t57\t56\t57\t0\t0\t0\t0\t0\t0\t1\n+2016\t11\t8\t61\t63\t52.7\t49\t57\t52\t49\t0\t0\t0\t0\t0\t1\t0\n+2016\t6\t20\t65\t70\t70.6\t67\t71\t70\t79\t0\t1\t0\t0\t0\t0\t0\n+2016\t11\t9\t63\t71\t52.4\t48\t56\t52\t42\t0\t0\t0\t0\t0\t0\t1\n+2016\t7\t3\t76\t76\t73.5\t69\t76\t75\t85\t0\t0\t0\t1\t0\t0\t0\n+2016\t10\t9\t64\t68\t62.1\t58\t65\t63\t55\t0\t0\t0\t1\t0\t0\t0\n+2016\t12\t16\t39\t39\t45.3\t44\t49\t44\t39\t1\t0\t0\t0\t0\t0\t0\n+2016\t9\t16\t79\t71\t70.7\t70\t74\t71\t52\t1\t0\t0\t0\t0\t0\t0\n+2016\t6\t25\t68\t69\t71.7\t68\t73\t73\t'..b'85\t0\t0\t0\t0\t0\t1\t0\n+2016\t11\t11\t65\t64\t51.9\t50\t53\t52\t55\t1\t0\t0\t0\t0\t0\t0\n+2016\t5\t21\t63\t66\t65.7\t62\t67\t65\t49\t0\t0\t1\t0\t0\t0\t0\n+2016\t3\t6\t57\t64\t52.2\t52\t53\t51\t49\t0\t0\t0\t1\t0\t0\t0\n+2016\t5\t18\t60\t71\t65.2\t61\t68\t65\t56\t0\t0\t0\t0\t0\t0\t1\n+2016\t5\t11\t67\t75\t63.8\t62\t68\t63\t60\t0\t0\t0\t0\t0\t0\t1\n+2016\t1\t9\t45\t48\t46.4\t46\t50\t45\t47\t0\t0\t1\t0\t0\t0\t0\n+2016\t3\t8\t60\t53\t52.5\t48\t56\t51\t70\t0\t0\t0\t0\t0\t1\t0\n+2016\t1\t15\t55\t49\t47.1\t46\t51\t46\t65\t1\t0\t0\t0\t0\t0\t0\n+2016\t6\t8\t86\t85\t68.5\t67\t70\t69\t81\t0\t0\t0\t0\t0\t0\t1\n+2016\t2\t10\t57\t62\t49.4\t48\t50\t49\t30\t0\t0\t0\t0\t0\t0\t1\n+2016\t12\t3\t46\t50\t47.0\t42\t52\t47\t58\t0\t0\t1\t0\t0\t0\t0\n+2016\t10\t27\t65\t58\t55.9\t51\t60\t55\t39\t0\t0\t0\t0\t1\t0\t0\n+2016\t8\t7\t79\t72\t77.2\t74\t78\t77\t95\t0\t0\t0\t1\t0\t0\t0\n+2016\t11\t16\t57\t55\t50.7\t50\t51\t49\t34\t0\t0\t0\t0\t0\t0\t1\n+2016\t9\t10\t72\t74\t72.3\t70\t77\t74\t91\t0\t0\t1\t0\t0\t0\t0\n+2016\t7\t29\t83\t85\t77.3\t77\t80\t79\t77\t1\t0\t0\t0\t0\t0\t0\n+2016\t8\t3\t77\t73\t77.3\t77\t81\t77\t93\t0\t0\t0\t0\t0\t0\t1\n+2016\t12\t1\t52\t52\t47.4\t44\t48\t49\t39\t0\t0\t0\t0\t1\t0\t0\n+2016\t9\t25\t64\t67\t67.6\t64\t72\t67\t62\t0\t0\t0\t1\t0\t0\t0\n+2016\t12\t23\t49\t45\t45.1\t45\t49\t44\t35\t1\t0\t0\t0\t0\t0\t0\n+2016\t12\t2\t52\t46\t47.2\t46\t51\t49\t41\t1\t0\t0\t0\t0\t0\t0\n+2016\t10\t13\t62\t66\t60.6\t60\t62\t60\t57\t0\t0\t0\t0\t1\t0\t0\n+2016\t7\t23\t81\t71\t77.0\t75\t81\t76\t86\t0\t0\t1\t0\t0\t0\t0\n+2016\t6\t13\t65\t70\t69.3\t66\t72\t69\t79\t0\t1\t0\t0\t0\t0\t0\n+2016\t2\t15\t55\t58\t49.9\t46\t52\t49\t53\t0\t1\t0\t0\t0\t0\t0\n+2016\t8\t8\t72\t72\t77.1\t76\t78\t77\t65\t0\t1\t0\t0\t0\t0\t0\n+2016\t7\t12\t74\t74\t75.4\t74\t77\t77\t71\t0\t0\t0\t0\t0\t1\t0\n+2016\t10\t3\t63\t65\t64.5\t63\t68\t65\t49\t0\t1\t0\t0\t0\t0\t0\n+2016\t4\t18\t68\t77\t58.8\t55\t59\t57\t39\t0\t1\t0\t0\t0\t0\t0\n+2016\t2\t25\t60\t59\t50.9\t49\t51\t49\t35\t0\t0\t0\t0\t1\t0\t0\n+2016\t1\t2\t44\t45\t45.7\t41\t50\t44\t61\t0\t0\t1\t0\t0\t0\t0\n+2016\t2\t21\t51\t53\t50.5\t49\t54\t52\t46\t0\t0\t0\t1\t0\t0\t0\n+2016\t3\t24\t57\t53\t54.9\t54\t56\t56\t72\t0\t0\t0\t0\t1\t0\t0\n+2016\t7\t27\t85\t79\t77.3\t73\t78\t79\t79\t0\t0\t0\t0\t0\t0\t1\n+2016\t2\t4\t51\t49\t49.0\t44\t54\t51\t44\t0\t0\t0\t0\t1\t0\t0\n+2016\t10\t7\t66\t63\t62.9\t62\t67\t64\t78\t1\t0\t0\t0\t0\t0\t0\n+2016\t4\t4\t63\t69\t56.5\t54\t59\t56\t45\t0\t1\t0\t0\t0\t0\t0\n+2016\t2\t24\t51\t60\t50.8\t47\t53\t50\t46\t0\t0\t0\t0\t0\t0\t1\n+2016\t10\t8\t63\t64\t62.5\t60\t65\t61\t73\t0\t0\t1\t0\t0\t0\t0\n+2016\t9\t15\t75\t79\t71.0\t66\t76\t69\t64\t0\t0\t0\t0\t1\t0\t0\n+2016\t1\t14\t49\t55\t47.0\t43\t47\t46\t58\t0\t0\t0\t0\t1\t0\t0\n+2016\t4\t1\t68\t73\t56.0\t54\t59\t55\t41\t1\t0\t0\t0\t0\t0\t0\n+2016\t10\t17\t62\t60\t59.1\t57\t63\t59\t62\t0\t1\t0\t0\t0\t0\t0\n+2016\t6\t18\t71\t67\t70.2\t67\t75\t69\t77\t0\t0\t1\t0\t0\t0\t0\n+2016\t12\t26\t41\t42\t45.2\t45\t48\t46\t58\t0\t1\t0\t0\t0\t0\t0\n+2016\t5\t17\t57\t60\t65.0\t62\t65\t65\t55\t0\t0\t0\t0\t0\t1\t0\n+2016\t11\t20\t55\t57\t49.8\t47\t54\t48\t30\t0\t0\t0\t1\t0\t0\t0\n+2016\t12\t18\t35\t35\t45.2\t44\t46\t46\t36\t0\t0\t0\t1\t0\t0\t0\n+2016\t9\t17\t71\t75\t70.3\t66\t73\t70\t84\t0\t0\t1\t0\t0\t0\t0\n+2016\t2\t26\t59\t61\t51.1\t48\t56\t53\t65\t1\t0\t0\t0\t0\t0\t0\n+2016\t2\t22\t53\t51\t50.6\t46\t51\t50\t59\t0\t1\t0\t0\t0\t0\t0\n+2016\t6\t26\t69\t71\t71.9\t67\t74\t72\t70\t0\t0\t0\t1\t0\t0\t0\n+2016\t7\t11\t71\t74\t75.3\t74\t79\t75\t71\t0\t1\t0\t0\t0\t0\t0\n+2016\t12\t30\t48\t48\t45.4\t44\t46\t44\t42\t1\t0\t0\t0\t0\t0\t0\n+2016\t7\t9\t68\t74\t74.9\t70\t79\t76\t60\t0\t0\t1\t0\t0\t0\t0\n+2016\t6\t21\t70\t76\t70.8\t68\t75\t71\t57\t0\t0\t0\t0\t0\t1\t0\n+2016\t3\t2\t54\t58\t51.6\t47\t54\t52\t37\t0\t0\t0\t0\t0\t0\t1\n+2016\t2\t20\t53\t51\t50.4\t48\t55\t51\t43\t0\t0\t1\t0\t0\t0\t0\n+2016\t9\t9\t67\t72\t72.6\t68\t77\t71\t78\t1\t0\t0\t0\t0\t0\t0\n+2016\t9\t26\t67\t76\t67.2\t64\t69\t69\t74\t0\t1\t0\t0\t0\t0\t0\n+2016\t1\t22\t52\t52\t47.9\t47\t48\t48\t60\t1\t0\t0\t0\t0\t0\t0\n+2016\t11\t27\t52\t53\t48.2\t48\t49\t49\t53\t0\t0\t0\t1\t0\t0\t0\n+2016\t6\t12\t67\t65\t69.1\t65\t73\t70\t83\t0\t0\t0\t1\t0\t0\t0\n+2016\t10\t20\t61\t58\t58.1\t58\t59\t58\t43\t0\t0\t0\t0\t1\t0\t0\n+2016\t7\t13\t74\t77\t75.6\t74\t78\t76\t56\t0\t0\t0\t0\t0\t0\t1\n+2016\t11\t7\t58\t61\t52.9\t51\t56\t51\t35\t0\t1\t0\t0\t0\t0\t0\n+2016\t10\t1\t66\t67\t65.3\t64\t70\t64\t54\t0\t0\t1\t0\t0\t0\t0\n+2016\t11\t22\t55\t54\t49.3\t46\t54\t49\t58\t0\t0\t0\t0\t0\t1\t0\n+2016\t6\t1\t71\t79\t67.4\t65\t69\t66\t58\t0\t0\t0\t0\t0\t0\t1\n+2016\t5\t13\t81\t77\t64.3\t63\t67\t66\t67\t1\t0\t0\t0\t0\t0\t0\n+2016\t6\t3\t75\t71\t67.7\t64\t71\t66\t55\t1\t0\t0\t0\t0\t0\t0\n+2016\t4\t12\t59\t58\t57.7\t54\t59\t57\t61\t0\t0\t0\t0\t0\t1\t0\n+2016\t3\t31\t64\t68\t55.9\t55\t59\t56\t56\t0\t0\t0\t0\t1\t0\t0\n+2016\t12\t14\t43\t40\t45.4\t45\t48\t45\t49\t0\t0\t0\t0\t0\t0\t1\n+2016\t8\t5\t75\t80\t77.3\t75\t81\t78\t71\t1\t0\t0\t0\t0\t0\t0\n+2016\t5\t4\t87\t74\t62.3\t59\t65\t64\t61\t0\t0\t0\t0\t0\t0\t1\n+2016\t12\t31\t48\t57\t45.5\t42\t48\t47\t57\t0\t0\t1\t0\t0\t0\t0\n+2016\t1\t21\t48\t52\t47.8\t43\t51\t46\t57\t0\t0\t0\t0\t1\t0\t0\n+2016\t7\t10\t74\t71\t75.1\t71\t77\t76\t95\t0\t0\t0\t1\t0\t0\t0\n+2016\t3\t15\t54\t49\t53.6\t49\t58\t52\t70\t0\t0\t0\t0\t0\t1\t0\n+2016\t4\t19\t77\t89\t59.0\t59\t63\t59\t61\t0\t0\t0\t0\t0\t1\t0\n+2016\t10\t14\t66\t60\t60.2\t56\t64\t60\t78\t1\t0\t0\t0\t0\t0\t0\n+2016\t4\t15\t59\t59\t58.3\t58\t61\t60\t40\t1\t0\t0\t0\t0\t0\t0\n'
b
diff -r 000000000000 -r af2624d5ab32 test-data/regression_groups.tabular
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/regression_groups.tabular Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,262 @@
+groups
+train
+train
+train
+train
+test
+test
+train
+train
+validation
+train
+train
+train
+train
+train
+validation
+validation
+train
+train
+train
+test
+test
+validation
+train
+validation
+test
+validation
+train
+train
+train
+test
+test
+test
+train
+test
+train
+train
+train
+test
+train
+train
+train
+train
+test
+train
+train
+train
+train
+train
+train
+train
+train
+train
+test
+test
+validation
+train
+validation
+train
+train
+train
+train
+test
+train
+train
+validation
+validation
+train
+train
+train
+train
+validation
+test
+test
+train
+train
+train
+train
+train
+train
+train
+validation
+train
+train
+train
+train
+test
+train
+validation
+train
+test
+test
+test
+train
+train
+train
+test
+train
+train
+train
+train
+train
+train
+train
+train
+train
+train
+validation
+train
+train
+train
+train
+validation
+train
+validation
+train
+validation
+validation
+train
+validation
+train
+test
+train
+train
+train
+train
+test
+validation
+test
+train
+train
+train
+train
+test
+train
+train
+train
+test
+validation
+train
+train
+train
+train
+train
+validation
+test
+train
+train
+test
+train
+train
+validation
+train
+train
+train
+train
+train
+test
+test
+validation
+train
+test
+train
+validation
+train
+train
+train
+test
+train
+train
+train
+train
+train
+train
+validation
+train
+train
+train
+train
+validation
+test
+train
+train
+train
+validation
+train
+test
+test
+validation
+train
+validation
+validation
+test
+test
+test
+train
+train
+test
+train
+train
+validation
+test
+test
+train
+train
+train
+test
+test
+train
+train
+train
+train
+train
+test
+train
+train
+test
+validation
+test
+train
+train
+test
+train
+train
+train
+validation
+train
+validation
+train
+validation
+train
+train
+train
+validation
+validation
+test
+validation
+train
+test
+train
+validation
+train
+train
+test
+train
+train
+test
+test
+train
+validation
+train
+train
+train
+train
+train
+train
+train
+train
+validation
+train
+test
+train
b
diff -r 000000000000 -r af2624d5ab32 test-data/regression_metrics_result01
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/regression_metrics_result01 Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,2 @@
+explained_variance_score : 
+0.8260
b
diff -r 000000000000 -r af2624d5ab32 test-data/regression_metrics_result02
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/regression_metrics_result02 Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,2 @@
+mean_absolute_error : 
+3.8706
b
diff -r 000000000000 -r af2624d5ab32 test-data/regression_metrics_result03
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/regression_metrics_result03 Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,2 @@
+mean_squared_error : 
+26.0153
b
diff -r 000000000000 -r af2624d5ab32 test-data/regression_metrics_result04
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/regression_metrics_result04 Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,2 @@
+mean_squared_log_error : 
+0.0061
b
diff -r 000000000000 -r af2624d5ab32 test-data/regression_metrics_result05
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/regression_metrics_result05 Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,2 @@
+median_absolute_error : 
+3.0090
b
diff -r 000000000000 -r af2624d5ab32 test-data/regression_metrics_result06
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/regression_metrics_result06 Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,2 @@
+r2_score : 
+0.8129
b
diff -r 000000000000 -r af2624d5ab32 test-data/regression_test.tabular
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/regression_test.tabular Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,5 @@
+86.9702122735 1.00532111569 -1.01739601979 -0.613139481654 0.641846874331
+91.2021798817 -0.621522971207 1.11914889596 0.390012184498 1.28956938152
+-47.4101632272 -0.638416457964 -0.732777468453 -0.864026104978 -1.06109770116
+61.7128046302 -1.09994800577 -0.739679672932 0.585657963012 1.48906827536
+-206.998295124 0.130238853011 0.70574123041 1.33206565264 -1.33220923738
b
diff -r 000000000000 -r af2624d5ab32 test-data/regression_test_X.tabular
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/regression_test_X.tabular Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,88 @@
+year month day temp_2 temp_1 average forecast_noaa forecast_acc forecast_under friend week_Fri week_Mon week_Sat week_Sun week_Thurs week_Tues week_Wed
+2016 9 29 69 68 66.1 63 71 68 57 0 0 0 0 1 0 0
+2016 4 27 59 60 60.7 59 65 60 50 0 0 0 0 0 0 1
+2016 11 28 53 48 48.0 46 48 49 44 0 1 0 0 0 0 0
+2016 10 12 60 62 61.0 60 63 63 52 0 0 0 0 0 0 1
+2016 6 19 67 65 70.4 69 73 70 58 0 0 0 1 0 0 0
+2016 5 7 68 77 63.0 61 65 63 83 0 0 1 0 0 0 0
+2016 7 25 75 80 77.1 75 82 76 81 0 1 0 0 0 0 0
+2016 8 15 90 83 76.6 76 79 75 70 0 1 0 0 0 0 0
+2016 10 28 58 60 55.6 52 56 55 52 1 0 0 0 0 0 0
+2016 6 5 80 81 68.0 64 70 66 54 0 0 0 1 0 0 0
+2016 3 19 58 63 54.2 54 59 54 62 0 0 1 0 0 0 0
+2016 6 7 92 86 68.3 67 69 70 58 0 0 0 0 0 1 0
+2016 12 10 41 36 45.9 44 48 44 65 0 0 1 0 0 0 0
+2016 4 23 73 64 59.9 56 63 59 57 0 0 1 0 0 0 0
+2016 6 24 75 68 71.5 67 73 73 65 1 0 0 0 0 0 0
+2016 2 9 51 57 49.4 45 52 49 57 0 0 0 0 0 1 0
+2016 11 10 71 65 52.2 52 54 51 38 0 0 0 0 1 0 0
+2016 3 21 61 55 54.5 52 56 55 52 0 1 0 0 0 0 0
+2016 2 28 60 57 51.3 48 56 53 66 0 0 0 1 0 0 0
+2016 6 28 78 85 72.4 72 76 74 67 0 0 0 0 0 1 0
+2016 10 6 63 66 63.3 62 67 63 55 0 0 0 0 1 0 0
+2016 2 17 55 56 50.0 45 51 49 46 0 0 0 0 0 0 1
+2016 6 15 66 60 69.7 65 73 71 69 0 0 0 0 0 0 1
+2016 10 15 60 60 59.9 59 62 59 46 0 0 1 0 0 0 0
+2016 3 26 54 57 55.2 53 57 55 54 0 0 1 0 0 0 0
+2016 1 26 51 54 48.3 44 53 50 61 0 0 0 0 0 1 0
+2016 5 23 59 66 66.1 63 68 68 66 0 1 0 0 0 0 0
+2016 1 10 48 50 46.5 45 48 48 49 0 0 0 1 0 0 0
+2016 5 22 66 59 65.9 62 66 65 80 0 0 0 1 0 0 0
+2016 7 15 75 77 76.0 74 80 78 75 1 0 0 0 0 0 0
+2016 4 22 81 73 59.7 59 64 60 59 1 0 0 0 0 0 0
+2016 4 29 61 64 61.2 61 65 61 49 1 0 0 0 0 0 0
+2016 1 23 52 57 48.0 45 49 50 37 0 0 1 0 0 0 0
+2016 8 16 83 84 76.5 72 78 78 90 0 0 0 0 0 1 0
+2016 8 1 76 73 77.4 76 78 79 65 0 1 0 0 0 0 0
+2016 2 27 61 60 51.2 51 53 53 61 0 0 1 0 0 0 0
+2016 2 12 56 55 49.6 49 52 48 33 1 0 0 0 0 0 0
+2016 1 31 52 48 48.7 47 52 49 61 0 0 0 1 0 0 0
+2016 9 5 67 68 73.5 71 75 73 54 0 1 0 0 0 0 0
+2016 12 20 39 46 45.1 45 49 45 62 0 0 0 0 0 1 0
+2016 5 1 61 68 61.6 60 65 60 75 0 0 0 1 0 0 0
+2016 3 28 59 51 55.5 55 57 55 47 0 1 0 0 0 0 0
+2016 4 21 81 81 59.4 55 61 59 55 0 0 0 0 1 0 0
+2016 1 6 40 44 46.1 43 49 48 40 0 0 0 0 0 0 1
+2016 10 21 58 62 57.8 56 60 59 44 1 0 0 0 0 0 0
+2016 5 2 68 77 61.9 60 66 61 59 0 1 0 0 0 0 0
+2016 3 1 53 54 51.5 48 56 50 53 0 0 0 0 0 1 0
+2016 7 21 78 82 76.8 73 81 78 84 0 0 0 0 1 0 0
+2016 3 17 51 53 53.9 49 58 52 62 0 0 0 0 1 0 0
+2016 12 6 46 40 46.4 44 50 45 56 0 0 0 0 0 1 0
+2016 12 21 46 51 45.1 44 50 46 39 0 0 0 0 0 0 1
+2016 1 4 44 41 45.9 44 48 46 53 0 1 0 0 0 0 0
+2016 10 2 67 63 64.9 62 69 66 82 0 0 0 1 0 0 0
+2016 5 28 65 64 66.8 64 69 65 64 0 0 1 0 0 0 0
+2016 9 11 74 77 72.1 69 75 71 70 0 0 0 1 0 0 0
+2016 10 25 62 61 56.5 53 60 55 70 0 0 0 0 0 1 0
+2016 2 18 56 57 50.1 47 55 49 34 0 0 0 0 1 0 0
+2016 11 1 117 59 54.5 51 59 55 61 0 0 0 0 0 1 0
+2016 3 16 49 51 53.7 52 54 55 65 0 0 0 0 0 0 1
+2016 4 26 55 59 60.5 56 61 62 75 0 0 0 0 0 1 0
+2016 6 10 67 65 68.8 67 71 67 73 1 0 0 0 0 0 0
+2016 2 3 46 51 48.9 48 49 50 40 0 0 0 0 0 0 1
+2016 3 7 64 60 52.4 49 57 53 71 0 1 0 0 0 0 0
+2016 9 18 75 68 70.0 66 73 71 90 0 0 0 1 0 0 0
+2016 3 20 63 61 54.3 51 56 55 50 0 0 0 1 0 0 0
+2016 4 6 60 57 56.8 53 59 57 64 0 0 0 0 0 0 1
+2016 7 2 73 76 73.3 70 77 73 84 0 0 1 0 0 0 0
+2016 7 5 71 68 74.0 72 77 74 62 0 0 0 0 0 1 0
+2016 7 19 80 73 76.6 76 78 77 90 0 0 0 0 0 1 0
+2016 12 9 40 41 46.0 43 51 44 54 1 0 0 0 0 0 0
+2016 6 29 85 79 72.6 68 76 74 81 0 0 0 0 0 0 1
+2016 3 22 55 56 54.6 51 55 54 64 0 0 0 0 0 1 0
+2016 4 3 71 63 56.3 54 61 56 64 0 0 0 1 0 0 0
+2016 1 17 48 54 47.4 45 51 46 47 0 0 0 1 0 0 0
+2016 3 10 54 55 52.8 49 55 53 50 0 0 0 0 1 0 0
+2016 5 9 82 63 63.4 59 66 62 64 0 1 0 0 0 0 0
+2016 1 8 51 45 46.3 43 47 46 34 1 0 0 0 0 0 0
+2016 8 11 72 76 76.9 74 81 75 80 0 0 0 0 1 0 0
+2016 12 29 47 48 45.3 43 50 45 65 0 0 0 0 1 0 0
+2016 11 23 54 54 49.1 48 52 49 38 0 0 0 0 0 0 1
+2016 11 19 52 55 50.0 50 54 49 56 0 0 1 0 0 0 0
+2016 4 7 57 68 56.9 52 61 55 38 0 0 0 0 1 0 0
+2016 6 4 71 80 67.9 63 72 66 76 0 0 1 0 0 0 0
+2016 6 17 67 71 70.0 66 74 69 54 1 0 0 0 0 0 0
+2016 10 5 61 63 63.7 61 66 65 48 0 0 0 0 0 0 1
+2016 3 4 55 59 51.9 47 56 53 45 1 0 0 0 0 0 0
+2016 12 22 51 49 45.1 42 47 46 38 0 0 0 0 1 0 0
b
diff -r 000000000000 -r af2624d5ab32 test-data/regression_test_y.tabular
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/regression_test_y.tabular Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,88 @@
+actual prediction
+66 69.857
+61 61.319
+52 51.891
+66 61.321
+70 66.463
+82 70.162
+85 78.848
+84 75.786
+65 62.121
+92 74.078
+61 63.647
+85 72.176
+44 38.458
+65 62.433
+69 71.594
+62 56.013
+64 60.943
+56 56.995
+53 56.748
+79 76.113
+63 63.758
+57 54.401
+67 66.493
+62 62.465
+59 58.786
+56 53.032
+65 66.769
+52 46.448
+66 62.122
+76 80.176
+64 73.833
+61 64.313
+48 55.188
+81 81.972
+77 74.178
+57 61.695
+58 53.636
+47 51.424
+68 68.929
+51 42.452
+77 70.385
+56 57.373
+73 76.172
+51 42.396
+59 61.098
+87 74.08
+58 52.745
+81 81.381
+58 53.324
+42 42.471
+49 46.507
+40 42.106
+65 64.17
+64 65.703
+70 74.13
+65 61.339
+53 55.177
+57 59.945
+53 54.651
+60 59.664
+67 65.474
+49 50.061
+53 60.849
+69 70.188
+55 60.062
+68 59.236
+76 71.868
+69 69.796
+78 76.83
+36 41.32
+74 76.868
+57 56.783
+69 60.378
+50 50.468
+56 54.426
+67 63.991
+48 43.711
+80 74.354
+48 47.306
+49 52.326
+57 53.526
+77 68.21
+81 73.607
+67 72.658
+66 63.243
+57 57.126
+45 46.04
b
diff -r 000000000000 -r af2624d5ab32 test-data/regression_train.tabular
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/regression_train.tabular Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,10 @@
+143.762620712 -0.330941870584 -1.17964571928 0.47944415578 -0.0486946279099 1.57951239219
+-88.5787166225 1.08055532812 -2.57109184022 -0.92512305494 0.317511276982 -1.202358944
+-82.8452345578 0.272541389247 -0.168636324107 0.923988150154 -0.467750945768 -0.719169535969
+72.4951388149 -0.268686605278 0.991068834926 0.731619322189 1.17038734294 0.323842059244
+11.805182128 1.03604670966 -0.709685560786 -1.54916691211 -0.614757954242 0.24176665894
+-63.9354970901 -0.101485840571 0.984112210822 -2.01704822953 0.282058758309 -0.776448499847
+126.325840796 -0.359998340179 0.353534448839 -1.23256828198 0.563632964937 1.15031170568
+23.0341392692 0.518540465136 1.03188231893 -2.53173026594 -0.0419267228327 0.193734455015
+67.6714937696 -0.115688051547 -0.821437865172 -0.368962397052 -0.526743874023 0.94315222831
+47.3927584881 -0.785096541368 -0.0942409319417 0.224267378731 -1.63317786831 1.26458811586
b
diff -r 000000000000 -r af2624d5ab32 test-data/regression_y.tabular
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/regression_y.tabular Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,262 @@
+actual
+71
+59
+76
+57
+54
+54
+52
+82
+35
+41
+48
+80
+68
+39
+85
+79
+52
+76
+53
+41
+48
+61
+77
+68
+46
+43
+71
+62
+67
+74
+57
+52
+59
+54
+47
+55
+66
+54
+40
+45
+67
+70
+45
+65
+67
+57
+61
+72
+76
+55
+67
+73
+57
+54
+75
+66
+65
+60
+59
+58
+52
+51
+51
+64
+68
+55
+62
+44
+63
+64
+40
+68
+71
+76
+65
+71
+57
+35
+75
+71
+75
+77
+57
+49
+90
+68
+59
+87
+68
+68
+40
+46
+64
+52
+71
+79
+68
+86
+72
+41
+64
+58
+67
+74
+59
+73
+55
+75
+63
+58
+48
+51
+65
+81
+80
+73
+60
+76
+69
+56
+46
+55
+57
+64
+74
+49
+65
+55
+53
+52
+75
+66
+68
+65
+83
+60
+76
+62
+73
+79
+77
+55
+63
+60
+85
+63
+57
+42
+66
+65
+44
+45
+53
+59
+52
+59
+79
+77
+55
+72
+80
+68
+68
+58
+49
+72
+64
+71
+67
+51
+51
+71
+52
+56
+61
+68
+63
+60
+63
+59
+60
+64
+81
+50
+54
+48
+67
+56
+49
+60
+72
+50
+77
+88
+75
+46
+76
+40
+50
+60
+75
+66
+55
+73
+77
+61
+89
+61
+44
+51
+54
+83
+49
+64
+60
+59
+68
+71
+49
+71
+60
+65
+42
+71
+55
+39
+68
+60
+51
+78
+74
+57
+71
+73
+55
+53
+74
+77
+57
+48
+70
+62
+75
+63
+63
+54
+75
+82
+80
+60
+73
+39
+79
+60
+40
+52
+74
+51
+81
+60
+60
b
diff -r 000000000000 -r af2624d5ab32 test-data/regression_y_split_test01.tabular
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/regression_y_split_test01.tabular Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,67 @@
+actual
+57
+71
+75
+49
+66
+59
+68
+48
+46
+45
+67
+75
+79
+74
+60
+48
+77
+71
+85
+41
+75
+61
+76
+52
+46
+77
+88
+60
+68
+40
+89
+46
+49
+68
+57
+50
+68
+55
+64
+51
+77
+79
+42
+76
+54
+54
+59
+80
+55
+54
+54
+54
+54
+71
+56
+66
+61
+40
+71
+63
+78
+53
+75
+50
+72
+68
b
diff -r 000000000000 -r af2624d5ab32 test-data/rfc_model01
b
Binary file test-data/rfc_model01 has changed
b
diff -r 000000000000 -r af2624d5ab32 test-data/rfc_result01
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/rfc_result01 Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,6 @@
+0 1 2 3 predicted
+3.68258022948 2.82110345641 -3.9901407239999998 -1.9523364774 1
+0.015942057224 -0.7119585943469999 0.125502976978 -0.972218263337 0
+2.0869076882499997 0.929399321468 -2.1292408448400004 -1.9971402218799998 1
+1.4132105208399999 0.523750660422 -1.4210539291 -1.49298569451 1
+0.7683140439399999 1.38267855169 -0.989045048734 0.649504257894 1
b
diff -r 000000000000 -r af2624d5ab32 test-data/rfc_result02
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/rfc_result02 Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,6 @@
+0 1 2 3 0
+3.68258022948 2.82110345641 -3.990140724 -1.9523364774 1
+0.015942057224 -0.711958594347 0.125502976978 -0.972218263337 0
+2.08690768825 0.929399321468 -2.12924084484 -1.99714022188 1
+1.41321052084 0.523750660422 -1.4210539291 -1.49298569451 1
+0.76831404394 1.38267855169 -0.989045048734 0.649504257894 1
b
diff -r 000000000000 -r af2624d5ab32 test-data/rfr_model01
b
Binary file test-data/rfr_model01 has changed
b
diff -r 000000000000 -r af2624d5ab32 test-data/rfr_result01
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/rfr_result01 Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,6 @@
+0 1 2 3 4 predicted
+86.97021227350001 1.00532111569 -1.01739601979 -0.613139481654 0.641846874331 0.6686209127804698
+91.2021798817 -0.6215229712070001 1.11914889596 0.390012184498 1.28956938152 1.0374491367850487
+-47.4101632272 -0.638416457964 -0.7327774684530001 -0.8640261049779999 -1.06109770116 -0.16198314840411981
+61.712804630200004 -1.0999480057700002 -0.739679672932 0.585657963012 1.4890682753600002 1.1603837128651284
+-206.998295124 0.130238853011 0.70574123041 1.3320656526399999 -1.3322092373799999 -0.6710618307873705
b
diff -r 000000000000 -r af2624d5ab32 test-data/roc_auc_score.txt
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/roc_auc_score.txt Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,2 @@
+roc_auc_score : 
+1.0
b
diff -r 000000000000 -r af2624d5ab32 test-data/roc_curve.txt
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/roc_curve.txt Sat May 01 01:24:32 2021 +0000
[
@@ -0,0 +1,2 @@
+roc_curve : 
+(array([0., 0., 1.]), array([0., 1., 1.]), array([2, 1, 0]))
b
diff -r 000000000000 -r af2624d5ab32 test-data/scurve.txt
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/scurve.txt Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,101 @@
+0 1 2 0
+0.39776718705997 1.55657843089970 -0.08251361596022 0.40908193877997
+-0.86879981115888 1.55919679722150 1.49516349636287 -2.08881933479976
+-0.65289858569518 1.22065630641879 0.24255466415243 -0.71140494713335
+-0.10763690701725 0.61800069704880 -1.99419027165214 3.24943849328794
+0.99901118702158 1.39546981502591 1.04445951198273 -4.66791480855597
+0.41235208162789 1.71923659145813 1.91102456650583 -3.56662701011788
+0.99925989449517 1.25064751551362 -1.03846639122004 1.60927221055449
+0.07044439596581 1.96481565921910 -1.99751570768435 3.07108986477003
+0.27862976083519 1.95300025403171 1.96039859244843 -3.42395972683000
+0.65011665703012 0.33338826239772 -0.24016558892613 0.70773795618674
+-0.51972572286454 0.04635627295681 -1.85433317446587 3.68812253056743
+-0.39023537550648 0.32148909701416 1.92071513059323 -2.74070543050842
+-0.17497560644179 1.84699365051817 1.98457276884460 -2.96571166126051
+0.52214849802422 1.90709969975907 1.85285458667410 -3.69096085168046
+-0.47925331233802 0.42195683743689 1.87767662758731 -2.64178889385637
+-0.97977425898513 0.72105050162922 -1.20010597549835 4.51092289761005
+0.20263517733109 1.09875052325534 -1.97925430042865 2.93754447428025
+-0.04968907072476 0.54366169835394 1.99876473518567 -3.09188311303017
+0.16054679698371 0.92120324214970 -1.98702822957516 2.98034804244113
+-0.84804646283126 1.39232312964677 1.52992187809091 -2.12930476964060
+-0.60013742781021 1.00071179334973 0.20010308930378 -0.64367290462456
+-0.84447848269654 1.43214198112867 -1.53558948109774 4.14718334605828
+0.14728007148812 1.05191187245956 -1.98909482889279 2.99377486434698
+-0.99965711795058 0.00279804623809 0.97381514692169 -1.54460848053403
+-0.08231500933733 0.78940057337967 1.99660636122684 -3.05918440186536
+-0.93150531410199 0.98433393980230 0.63627228618133 -1.19852972744287
+0.99856317168399 0.80576066275828 1.05358723874584 -4.65877606165848
+-0.72309049986373 0.70859660021264 1.69075330546210 -2.33332664772038
+0.34762708311665 1.00122863888591 -1.93763287649474 2.78655349368483
+0.98968220719363 0.89035325766228 1.14327989658126 -4.56861426359002
+0.80256165657373 0.18086557639287 -0.40343081926936 0.93157688044368
+0.82956862745286 0.54712584005488 -0.44159522535532 0.97833473179891
+0.54785892441872 1.88695419548545 1.83657073755587 -3.72139539228723
+-0.89687690788692 0.05308928266788 0.55771975841194 -1.11265681822453
+0.94148777871374 0.07999737928130 1.33704712212488 -4.36861024611527
+-0.51237630504464 0.56628071943964 -1.85876103895601 3.67954229875839
+-0.98387649482367 1.16468834043354 -1.17884921843132 4.53257229404396
+0.84462075857746 1.98178560584965 1.53536508494674 -4.14744904524777
+-0.51346352650877 1.98528447480594 -1.85811141872439 3.68080881254506
+0.66297042486611 1.98623474496209 -0.25135441245350 0.72477955669555
+0.75540725891007 0.22009666193313 -1.65525557852335 2.28531722974483
+0.94146657753588 1.32896289192788 -0.66289366160977 1.22695469533933
+0.69709983371402 1.04797366897663 -0.28302592666409 0.77134449118105
+0.98150332100560 0.34629981961746 1.19144511185972 -4.51975469932009
+-0.39737776530432 1.88592048983005 1.91765511584786 -2.73293511995769
+0.40880572260440 0.48372017195250 -0.08737856634534 0.42114505503778
+0.56896103935759 1.99786453768642 -1.82236447861829 2.53635073354523
+-0.71172480125230 1.16538763029980 1.70245840252814 -2.34964210993111
+-0.90170936131617 0.36655800126115 1.43234272020560 -2.01788557835913
+-0.17868797583124 0.77369084383558 -1.98390579187915 3.32124545737013
+-0.97238420561973 0.37934705782430 -1.23338585360148 4.47683073216571
+-0.46670740559088 0.82154134605062 -1.88441178054492 3.62715683862448
+-0.96972627581563 1.18936013780341 0.75580550785746 -1.32410737612446
+0.80264918483869 1.43317218625668 -0.40354858866980 0.93172361431463
+-0.97961162574302 0.97378296473825 0.79909937105844 -1.36851911905928
+-0.99791255777848 0.61917963553341 0.93542038225568 -1.50617173609004
+-0.10737008100010 1.15488274565569 1.99421912358697 -3.03401519525838
+-0.62053033447500 0.88341563913749 1.78418244305541 -2.47217384154222
+0.91193663680462 0.71935620520107 1.41033104982866 -4.28957192937856
+0.05116766471514 0.64266386401763 -0.00130992290521 0.05119001832686
+-0.91929803801872 0.41641448039205 0.60643791176621 -1.16629314143751
+0.76733212242118 0.90251724812367 -0.35875011586699 0.87467029775790
+0.94069506387414 0.98368582052811 -0.66074670701256 1.22467332521808
+0.22488014225528 1.79815262958742 1.97438643341297 -3.36841267959257
+-0.81182171546475 1.45872092205888 -1.58390538814081 4.08885791768265
+-0.87500135199695 1.54017954583939 -1.48412047467908 4.20703126278048
+0.82154938436974 0.75087849512398 -0.42986264019825 0.96412328524223
+-0.87126700078957 0.68747907047077 0.50919065480052 -1.05777788763274
+-0.96067491767947 1.31007041199864 0.72232446535292 -1.28942268363907
+-0.34769588110474 1.42207598642100 1.93760736679209 -2.78648011856577
+-0.86121371561088 0.22707515043735 1.50824298917320 -2.10393972942553
+-0.68364804192970 0.26605737874715 1.72981186258219 -2.38884304067862
+-0.06537388681621 0.91207811521225 1.99786083945736 -3.07617211177039
+-0.95089045248252 0.31947246031702 -1.30952761973303 4.39769276488546
+-0.91904458599080 1.92328380754929 -1.39415358549809 4.30724228359896
+0.79764487680537 1.67523148972362 -0.39687260839343 0.92338022079859
+0.82012784273065 1.04032137407585 -1.57218032260641 2.17995823974852
+-0.99779218141586 0.43654451545631 0.93358642679862 -1.50433383401485
+0.64665631513320 0.26983744506480 1.76278149564495 -3.84478535850956
+-0.33727714800813 1.95814069096774 0.05859460091228 -0.34402306664559
+0.08189332342924 1.41408699137829 -0.00335889931344 0.08198513727442
+0.67212785139840 1.71995111389133 1.74043510949615 -3.87867149146120
+0.26116197153766 0.77434525565728 -0.03470500642417 0.26422575485318
+-0.99726999031677 0.50166803966345 -1.07384149520151 4.63848021571996
+-0.83562138750324 0.59887603788940 0.45069416829314 -0.98926303703773
+-0.99977253923164 1.71379105681003 0.97867232318507 -1.54946703276290
+0.25977496592172 0.94596798113644 -1.96566918097264 2.87880349232616
+0.67753788404984 1.32655409403226 -1.73548787595532 2.39718279848362
+-0.98181280555323 1.61145721487357 1.18985156004547 -1.76180728144481
+0.95308405257346 0.50596100929945 -0.69729422085108 1.26326597551748
+0.37166713582053 0.15914687794065 -0.07163394065112 0.38080415080844
+-0.94128581409549 1.46552121003143 1.33761074654488 -1.91517377819665
+0.50263574695009 1.92279495500721 1.86449829721517 -3.66823761100048
+-0.98104238623297 1.90760946833533 1.19379328268628 -1.76582359131701
+-0.39444347866163 0.98099810376780 0.08107979555278 -0.40546214696481
+0.99734968930361 1.26438412886551 -0.92724288869125 1.49797487095100
+-0.73490346461986 1.46599003967598 1.67817173170940 -2.31606833009633
+0.97111212627401 1.80481900649593 -0.76137636704726 1.32984803323919
+-0.31012473730097 0.32449383749640 1.95069587530083 -2.82626841764302
+0.99637118283100 0.81176264473512 -0.91488557100025 1.48557879322013
b
diff -r 000000000000 -r af2624d5ab32 test-data/searchCV01
b
Binary file test-data/searchCV01 has changed
b
diff -r 000000000000 -r af2624d5ab32 test-data/searchCV02
b
Binary file test-data/searchCV02 has changed
b
diff -r 000000000000 -r af2624d5ab32 test-data/sparse.mtx
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/sparse.mtx Sat May 01 01:24:32 2021 +0000
b
b'@@ -0,0 +1,8741 @@\n+%%MatrixMarket matrix coordinate real general\n+%\n+4 1048577 8738\n+1 271 0.02083333333333341\n+1 1038 0.02461995616119806\n+1 1665 0.01253924656438802\n+1 2794 0.0250470072492813\n+1 2897 0.02083333333333341\n+1 3377 0.02083333333333341\n+1 4053 0.05769913639656241\n+1 4959 0.007693218186208322\n+1 5733 0.01641330410746537\n+1 5985 0.01294450932696249\n+1 6146 0.02461995616119806\n+1 6551 0.02083333333333341\n+1 6812 0.01252350362464065\n+1 7663 0.01252350362464065\n+1 8132 0.01941676399044373\n+1 8260 0.01294450932696249\n+1 8398 0.02083333333333341\n+1 8495 0.01253924656438802\n+1 8846 0.02083333333333341\n+1 8955 0.01641330410746537\n+1 9442 0.01941676399044373\n+1 9811 0.02461995616119806\n+1 10010 0.01252350362464065\n+1 10205 0.0194183909345155\n+1 10495 0.03816237987288836\n+1 12091 0.0980885318741561\n+1 12255 0.01641330410746537\n+1 12330 0.01294450932696249\n+1 12841 0.01941676399044373\n+1 13130 0.00970919546725775\n+1 13234 0.00763247597457767\n+1 13369 0.01252350362464065\n+1 13424 0.02461995616119806\n+1 13929 0.01252350362464065\n+1 14370 0.01252350362464065\n+1 14667 0.01641330410746537\n+1 15146 0.01253924656438802\n+1 15784 0.009940534656094338\n+1 15880 0.02083333333333341\n+1 17369 0.01252350362464065\n+1 17674 0.03236127331740622\n+1 18464 0.009940534656094338\n+1 19202 0.00970919546725775\n+1 19526 0.01252350362464065\n+1 19723 0.01253924656438802\n+1 19745 0.02083333333333341\n+1 20407 0.01641330410746537\n+1 20582 0.01252350362464065\n+1 20843 0.00970919546725775\n+1 20975 0.0692389636758749\n+1 21671 0.0152711805445382\n+1 21829 0.0250470072492813\n+1 22178 0.01538643637241664\n+1 22277 0.02083333333333341\n+1 22856 0.01641330410746537\n+1 23053 0.01641330410746537\n+1 23225 0.01294450932696249\n+1 23728 0.02083333333333341\n+1 24382 0.01294450932696249\n+1 24672 0.00970919546725775\n+1 25245 0.01252350362464065\n+1 26569 0.03054236108907641\n+1 27748 0.01252350362464065\n+1 27941 0.01252350362464065\n+1 28962 0.01252350362464065\n+1 29320 0.01252350362464065\n+1 29735 0.07635590272269102\n+1 29839 0.00970919546725775\n+1 30063 0.02083333333333341\n+1 30646 0.0250470072492813\n+1 31588 0.03130875906160163\n+1 32319 0.01294450932696249\n+1 32433 0.01294450932696249\n+1 32797 0.009940534656094338\n+1 32800 0.00970919546725775\n+1 32837 0.02083333333333341\n+1 33008 0.00970919546725775\n+1 33979 0.01880886984658204\n+1 35441 0.0194183909345155\n+1 36189 0.01641330410746537\n+1 37457 0.0152711805445382\n+1 38049 0.01294450932696249\n+1 38464 0.00970919546725775\n+1 39762 0.0194183909345155\n+1 40007 0.01514765184153846\n+1 40018 0.02461995616119806\n+1 40091 0.01294450932696249\n+1 40157 0.01880886984658204\n+1 40920 0.007693218186208322\n+1 41305 0.02083333333333341\n+1 41617 0.01294450932696249\n+1 41628 0.0250470072492813\n+1 41645 0.0152711805445382\n+1 41800 0.03713053286162541\n+1 41970 0.01294450932696249\n+1 42308 0.02083333333333341\n+1 43264 0.02083333333333341\n+1 43550 0.01252350362464065\n+1 43781 0.01526495194915534\n+1 43902 0.0250470072492813\n+1 44084 0.00970919546725775\n+1 44116 0.0250470072492813\n+1 44133 0.01294450932696249\n+1 44135 0.01641330410746537\n+1 44195 0.01294450932696249\n+1 44513 0.02083333333333341\n+1 44990 0.009940534656094338\n+1 45201 0.02083333333333341\n+1 45447 0.01880886984658204\n+1 45548 0.0152711805445382\n+1 46543 0.01252350362464065\n+1 46563 0.0152711805445382\n+1 46627 0.01009843456102564\n+1 46930 0.009940534656094338\n+1 47084 0.01253924656438802\n+1 48208 0.01252350362464065\n+1 48783 0.0152711805445382\n+1 48993 0.01641330410746537\n+1 50742 0.02500295910517705\n+1 52051 0.01880886984658204\n+1 52833 0.002524608640256409\n+1 53918 0.01294450932696249\n+1 54190 0.01252350362464065\n+1 54267 0.00970919546725775\n+1 54837 0.009940534656094338\n+1 55562 0.02588901865392498\n+1 55759 0.02083333333333341\n+1 55865 0.009940534656094338\n+1 56669 0.01294450932696249\n+1 57379 0.00970919546725775\n+1 57633 0.0194183909345155\n+1 58567 0.01641330410746537\n+1 58964 0.007693218186208322\n+1 59338 0.01641330410746537\n+1 60239 0.02083333333333341\n+1 60904 0.0152711805445382\n+'..b'4 983313 0.01343038273375637\n+4 983688 0.02083333333333338\n+4 983770 0.01362848167001797\n+4 984175 0.01662975263094352\n+4 984900 0.02686076546751275\n+4 985526 0.01343038273375637\n+4 985593 0.02531848417709173\n+4 985753 0.01343038273375637\n+4 985859 0.01641330410746536\n+4 986055 0.02083333333333338\n+4 986185 0.01253136767792717\n+4 987191 0.01056442818410648\n+4 987694 0.0221186977601905\n+4 989433 0.04423739552038099\n+4 989840 0.01253136767792717\n+4 990517 0.01641330410746536\n+4 990522 0.01362848167001797\n+4 991282 0.01253136767792717\n+4 991559 0.01265924208854587\n+4 991935 0.01641330410746536\n+4 992416 0.01327797629320365\n+4 993308 0.02506273535585434\n+4 993319 0.02054987341316971\n+4 994759 0.01641330410746536\n+4 995303 0.01679782851708494\n+4 996150 0.01343038273375637\n+4 996559 0.01343038273375637\n+4 997115 0.02083333333333338\n+4 997142 0.01056442818410648\n+4 997713 0.02083333333333338\n+4 999660 0.01662975263094352\n+4 1000382 0.02054987341316971\n+4 1000967 0.03132841919481793\n+4 1001151 0.01679782851708494\n+4 1001447 0.02014557410063456\n+4 1002865 0.01265924208854587\n+4 1002928 0.02054987341316971\n+4 1003223 0.03027609162416319\n+4 1003297 0.02054987341316971\n+4 1003745 0.01679782851708494\n+4 1004821 0.02519674277562741\n+4 1004897 0.02083333333333338\n+4 1005440 0.02054987341316971\n+4 1006479 0.01343038273375637\n+4 1007088 0.02506273535585434\n+4 1008466 0.01327797629320365\n+4 1008887 0.01056442818410648\n+4 1009081 0.01343038273375637\n+4 1009095 0.01679782851708494\n+4 1009375 0.01662975263094352\n+4 1009826 0.01679782851708494\n+4 1009939 0.01105934888009525\n+4 1011571 0.01343038273375637\n+4 1011599 0.01362848167001797\n+4 1011879 0.01327797629320365\n+4 1012444 0.02044272250502696\n+4 1012825 0.02054987341316971\n+4 1012961 0.01641330410746536\n+4 1014776 0.01056442818410648\n+4 1014805 0.01679782851708494\n+4 1015053 0.02083333333333338\n+4 1015289 0.02054987341316971\n+4 1015354 0.02494462894641529\n+4 1017199 0.02054987341316971\n+4 1017321 0.01253136767792717\n+4 1017519 0.02083333333333338\n+4 1019441 0.01253136767792717\n+4 1020330 0.01253136767792717\n+4 1020740 0.05068391965620721\n+4 1021555 0.01105934888009525\n+4 1021677 0.01362848167001797\n+4 1021933 0.01056442818410648\n+4 1021999 0.01265924208854587\n+4 1022039 0.04109974682633943\n+4 1022303 0.01105934888009525\n+4 1022561 0.01362848167001797\n+4 1022916 0.01879705151689075\n+4 1023951 0.01343038273375637\n+4 1024145 0.02083333333333338\n+4 1024308 0.01343038273375637\n+4 1024517 0.01343038273375637\n+4 1024933 0.01641330410746536\n+4 1025625 0.02083333333333338\n+4 1026107 0.03759410303378151\n+4 1026881 0.01327797629320365\n+4 1027598 0.01679782851708494\n+4 1027994 0.02083333333333338\n+4 1028315 0.01641330410746536\n+4 1028744 0.01662975263094352\n+4 1029411 0.01679782851708494\n+4 1029532 0.01343038273375637\n+4 1031669 0.01327797629320365\n+4 1032010 0.02686076546751275\n+4 1032743 0.01056442818410648\n+4 1033350 0.01343038273375637\n+4 1033833 0.01056442818410648\n+4 1034007 0.02083333333333338\n+4 1034211 0.01641330410746536\n+4 1035601 0.01327797629320365\n+4 1036032 0.01265924208854587\n+4 1036554 0.02494462894641529\n+4 1037044 0.01662975263094352\n+4 1037251 0.01641330410746536\n+4 1037360 0.005433498852303906\n+4 1037802 0.02014557410063456\n+4 1038132 0.01641330410746536\n+4 1038217 0.01362848167001797\n+4 1038423 0.01679782851708494\n+4 1038643 0.01327797629320365\n+4 1038911 0.01343038273375637\n+4 1040201 0.004223659971350601\n+4 1040407 0.01253136767792717\n+4 1040538 0.01056442818410648\n+4 1040866 0.01265924208854587\n+4 1041961 0.01327797629320365\n+4 1042120 0.01105934888009525\n+4 1042128 0.01327797629320365\n+4 1042396 0.02083333333333338\n+4 1042483 0.01991696443980548\n+4 1042647 0.01362848167001797\n+4 1043095 0.03319494073300912\n+4 1044911 0.01105934888009525\n+4 1045283 0.02494462894641529\n+4 1045305 0.01056442818410648\n+4 1045987 0.01265924208854587\n+4 1046201 0.01105934888009525\n+4 1046367 0.02686076546751275\n+4 1046859 0.01679782851708494\n+4 1047379 0.02519674277562741\n+4 1047477 0.01105934888009525\n'
b
diff -r 000000000000 -r af2624d5ab32 test-data/sparse_u.txt
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/sparse_u.txt Sat May 01 01:24:32 2021 +0000
b
b'@@ -0,0 +1,101 @@\n+0\t1\t2\t3\t4\t5\t6\t7\t8\t9\t0\n+-1.74976547305470\t0.34268040332750\t1.15303580256364\t-0.25243603652139\t0.98132078695123\t0.51421884139438\t0.22117966922140\t-1.07004333056829\t-0.18949583082318\t0.25500144427338\t-3.20268957071762\n+-0.45802698550262\t0.43516348812289\t-0.58359505032266\t0.81684707168578\t0.67272080570966\t-0.10441114339063\t-0.53128037685191\t1.02973268513335\t-0.43813562270442\t-1.11831824625544\t-0.32269568196899\n+1.61898166067526\t1.54160517451341\t-0.25187913921321\t-0.84243573825130\t0.18451869056394\t0.93708220110895\t0.73100034383481\t1.36155612514533\t-0.32623805920230\t0.05567601485478\t6.60472167175290\n+0.22239960855530\t-1.44321699522534\t-0.75635230559444\t0.81645401101929\t0.75044476153418\t-0.45594692746800\t1.18962226802913\t-1.69061682638360\t-1.35639904886131\t-1.23243451391493\t-2.54026382137261\n+-0.54443916167246\t-0.66817173681343\t0.00731456322890\t-0.61293873547816\t1.29974807475531\t-1.73309562365328\t-0.98331009912963\t0.35750775316737\t-1.61357850282218\t1.47071386661213\t0.12371686928073\n+-1.18801759731772\t-0.54974619353549\t-0.94004616154477\t-0.82793236436587\t0.10886346783368\t0.50780959049232\t-0.86222734651048\t1.24946974272698\t-0.07961124591740\t-0.88973148126503\t0.24874422979447\n+-0.88179838948302\t0.01863894948806\t0.23784462192362\t0.01354854862861\t-1.63552939938082\t-1.04420987770932\t0.61303888168755\t0.73620521332382\t1.02692143939979\t-1.43219061105893\t-0.30787880264154\n+-1.84118830018672\t0.36609322616730\t-0.33177713505281\t-0.68921797808975\t2.03460756150493\t-0.55071441191459\t0.75045333032684\t-1.30699233908082\t0.58057333579427\t-1.10452309266229\t-0.19844513286689\n+0.69012147022471\t0.68689006613840\t-1.56668752957839\t0.90497412146668\t0.77882239932307\t0.42823287059674\t0.10887198989791\t0.02828363482307\t-0.57882582479099\t-1.19945119919393\t5.45755744088132\n+-1.70595200573817\t0.36916395710701\t1.87657342696217\t-0.37690335016897\t1.83193608182554\t0.00301743403121\t-0.07602346572462\t0.00395759398760\t-0.18501411089711\t-2.48715153522277\t-2.38855416852243\n+-1.70465120576096\t-1.13626100682736\t-2.97331547405089\t0.03331727813886\t-0.24888866705811\t-0.45017643501165\t0.13242780114877\t0.02221392803939\t0.31736797594107\t-0.75241417772504\t1.94619608927940\n+-1.29639180715015\t0.09513944356545\t-0.42371509994342\t-1.18598356492917\t-0.36546199267663\t-1.27102304084666\t1.58617093842324\t0.69339065851659\t-1.95808123420787\t-0.13480131198999\t0.82641742518663\n+-1.54061602455261\t2.04671396848214\t-1.39699934495328\t-1.09717198463982\t-0.23871286931468\t-1.42906689844829\t0.94900477650526\t-0.01939758596247\t0.89459770576001\t0.75969311985021\t8.96789675053551\n+-1.49772038108317\t-1.19388597679194\t1.29626258639906\t0.95227562608189\t-1.21725413064101\t-0.15726516737514\t-1.50758516026439\t0.10788413080661\t0.74705565509915\t0.42967643586261\t-9.12681793825348\n+-1.41504292085253\t-0.64075992301057\t0.77962630366370\t-0.43812091634884\t2.07479316794657\t-0.34329768218247\t-0.61662937168319\t0.76318364605999\t0.19291719182331\t-0.34845893065237\t-4.73013238453105\n+2.29865394071368\t-0.16520955264073\t0.46629936835719\t0.26998723863109\t-0.31983104711809\t-1.14774159987659\t1.70362398812070\t-0.72215077005575\t1.09368664965872\t-0.22951775323996\t3.15772616155389\n+-0.00889866329211\t-0.54319800840717\t0.75306218769198\t-1.60943889617295\t1.94326226343400\t-1.44743611231959\t0.13024845535270\t0.94936086466099\t-2.01518871712253\t-0.07954058693411\t-0.92407555288350\n+0.30104946378807\t-1.68489996168518\t0.22239080944545\t-0.68492173524723\t-0.12620118371358\t1.99027364975409\t0.52299780452075\t-0.01634540275749\t-0.41581633584065\t-1.35850293675980\t-2.23623579455562\n+-0.51442989136879\t-0.21606012000326\t0.42238022042198\t-1.09404293103224\t1.23690788519023\t-0.23028467842711\t-0.70441819973502\t-0.59137512108517\t0.73699516901821\t0.43586725251491\t-0.55744774763256\n+1.77599358550677\t0.51307437883965\t1.17052698294814\t2.07771223225020\t-0.45592201921402\t0.64917292725468\t-0.17478155445150\t1.01726434325117\t-0.59998304484887\t1.57616672431921\t-3.18381332274444\n+0.60442353858920\t-0.90703041748070\t0.59202326936038\t-0.43706441565157\t0.1017757'..b'194420\t-1.08125857121519\t-0.06307879670507\t-0.50356048600791\t-2.05090576304937\t0.08725798075221\t-1.32944561779624\t-1.65101496770809\n+0.75637688792742\t0.82428920150463\t0.37967322200031\t0.52422365195372\t-0.45271329511708\t0.68759278675132\t0.91674695152792\t1.11971610167859\t1.26354483633054\t-1.45610559752933\t0.32205421816296\n+0.32128693891874\t-2.43702669941400\t0.97337371093006\t-0.64248112674987\t0.29283256357178\t-0.46398126201592\t0.38673364020325\t0.67249644253334\t-1.09097595301491\t-0.52700342019866\t-6.40574884617228\n+-0.30440284592937\t0.77081843337639\t-0.23575096986828\t-0.17778292517384\t2.28863529133324\t-2.52894751088469\t0.56775355409626\t0.07355255089438\t0.74832418672378\t0.91465664311128\t2.18526983290342\n+1.25223156262730\t-0.88472860123867\t1.17560948074634\t0.47969620845726\t-0.58996743406612\t0.86216891849810\t-1.47265712624577\t0.65231019836457\t-0.15168996527867\t1.34323688312126\t-4.23943249822781\n+-0.65948406246294\t-0.40906579310461\t-0.33858048238969\t-0.39661868538565\t-1.45824184628667\t-0.01090659216795\t-0.76657297567464\t0.84217249032250\t0.79187920141782\t-1.31762772533865\t0.55888844122735\n+0.01315303655787\t0.15323002771334\t-0.78639503041184\t1.36810521913452\t0.00400879553357\t0.45319420435997\t-0.40637868993132\t0.68411948932681\t2.88396925177584\t-0.58818877513993\t-1.21925440704826\n+0.36522540717597\t0.32310950138879\t0.58240426467360\t-0.00845748620002\t-1.72365143380736\t-1.02553725476702\t0.53492759374879\t-1.65002519482436\t0.66894730906415\t0.28032230350373\t-0.37071369664525\n+0.40271751266444\t0.59519151467352\t-0.04489492214522\t0.64534644308214\t-1.12745914989289\t0.22451442073277\t0.10571095020939\t-1.00134678404358\t-0.18618164454287\t1.99795151776873\t2.59957677613228\n+0.57255675159723\t-1.36871620107071\t-1.15772004612616\t1.06582622979255\t-1.65499679923647\t1.47713538228764\t-0.93286094213424\t0.13042091725382\t-0.03102869757093\t-0.08768373982573\t-1.99798668296313\n+0.61775947077628\t2.88575179539228\t1.75982110036090\t1.09133090752907\t-2.21346060739119\t-0.02398076189861\t1.23725351268738\t-0.45619206614093\t2.12474539951312\t0.24074228458820\t1.48332247916783\n+-0.05864391264017\t-0.87399991994747\t-0.12278570026443\t0.91179656231331\t-0.10746411474279\t-0.72747083460478\t1.59576290128042\t0.98774702734042\t-0.48811101691646\t0.62969480563024\t-2.66581892706104\n+-0.45339969843110\t0.60909959142993\t-0.85224895065849\t-0.05454962931514\t1.09079462734148\t-1.84634161455384\t-0.41243382161399\t-0.41546602951165\t-1.30175658921974\t-1.13609897454000\t2.95605090229365\n+-1.79763757816820\t-0.66155694349019\t2.54928569877370\t-1.63767562413715\t0.00631766181730\t0.54171265127420\t-0.13210003839032\t-0.37873629845671\t1.94062280466786\t-1.04187437109117\t-7.36529395665408\n+-0.28559377932837\t-0.59892262579212\t-0.38234037804602\t-0.98598081157493\t-1.36447657201091\t-0.82353342400180\t-1.68138681554986\t-0.91621993917044\t0.54362793226983\t1.52486260054826\t0.69972992752598\n+1.19741734779592\t-1.22250529739662\t-2.02376353002073\t0.05371174766609\t-0.53629036242314\t0.10714253527293\t0.61515218539276\t0.90506544040104\t1.65258564139881\t-0.84281968759992\t2.02183560039596\n+-0.06340532135398\t0.48905360189012\t0.70453542780530\t-1.07173556758791\t0.41375864841338\t-0.34502527403732\t1.24018981821212\t0.10342901781619\t-2.14185161440355\t-0.68365014168151\t1.26361248319262\n+-1.18079802759063\t1.18100216181730\t-1.06605525816211\t-0.74304592231484\t-0.88592524951272\t-0.49581833514280\t0.52738768762870\t-0.30175139488690\t0.35564438892659\t1.32813211314365\t4.35007881758459\n+0.23807425695170\t0.17185882517031\t1.11676824680888\t-0.01368885520815\t1.28290975661447\t-1.12997104581712\t0.75872144408905\t-1.09860423557013\t-0.30290404839246\t1.49961056170431\t-1.50599861767127\n+0.14614254213308\t1.90341641619205\t-0.30620639436988\t-0.45706533906365\t-2.38861407368411\t-0.86179917990581\t-0.53439383530306\t-1.26260428547622\t-1.02319884121943\t0.53846601124160\t4.25252010016803\n+1.71650277423513\t0.17912390469971\t-0.45596835004497\t0.32669169909967\t0.68196488401329\t-0.73798870708179\t1.32634484032934\t0.92700352399697\t0.21309224825039\t1.32833309213812\t2.24067452098234\n'
b
diff -r 000000000000 -r af2624d5ab32 test-data/svc_model01
b
Binary file test-data/svc_model01 has changed
b
diff -r 000000000000 -r af2624d5ab32 test-data/svc_model02
b
Binary file test-data/svc_model02 has changed
b
diff -r 000000000000 -r af2624d5ab32 test-data/svc_model03
b
Binary file test-data/svc_model03 has changed
b
diff -r 000000000000 -r af2624d5ab32 test-data/svc_prediction_result01.tabular
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/svc_prediction_result01.tabular Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,49 @@
+0 58 56 -67 0
+0 44 64 -76 0
+0 51 48 -73 0
+0 58 65 -49 0
+0 43 61 -49 0
+0 45 43 -79 0
+0 42 60 -98 0
+0 50 55 -59 0
+0 53 53 -56 0
+0 45 44 -61 0
+0 43 65 -84 0
+0 35 52 -75 0
+0 56 56 -70 0
+1 -61 86 43 2
+1 -67 93 15 2
+1 -59 94 36 2
+1 -50 92 62 2
+1 -78 91 70 2
+1 -35 87 47 2
+1 -56 91 52 2
+1 -61 81 46 2
+1 -83 78 34 2
+1 -50 87 45 2
+1 -67 73 50 2
+1 -50 97 45 2
+1 -61 111 45 2
+2 -109 23 -92 1
+2 -94 20 -96 1
+2 -85 26 -88 1
+2 -90 33 -114 1
+2 -63 9 -106 1
+2 -79 9 -93 1
+2 -99 26 -108 1
+2 -81 19 -110 1
+2 -108 21 -108 1
+2 -92 27 -106 1
+2 -88 2 -106 1
+2 -88 15 -103 1
+3 54 -74 4 3
+3 42 -92 31 3
+3 39 -99 -7 3
+3 48 -115 -5 3
+3 39 -96 2 3
+3 31 -109 9 3
+3 33 -96 -8 3
+3 23 -102 4 3
+3 38 -90 21 3
+3 34 -107 1 3
+3 35 -78 18 3
b
diff -r 000000000000 -r af2624d5ab32 test-data/svc_prediction_result02.tabular
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/svc_prediction_result02.tabular Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,49 @@
+0 58 56 -67 0
+0 44 64 -76 0
+0 51 48 -73 0
+0 58 65 -49 0
+0 43 61 -49 0
+0 45 43 -79 0
+0 42 60 -98 0
+0 50 55 -59 0
+0 53 53 -56 0
+0 45 44 -61 0
+0 43 65 -84 0
+0 35 52 -75 0
+0 56 56 -70 0
+1 -61 86 43 2
+1 -67 93 15 2
+1 -59 94 36 2
+1 -50 92 62 2
+1 -78 91 70 2
+1 -35 87 47 2
+1 -56 91 52 2
+1 -61 81 46 2
+1 -83 78 34 2
+1 -50 87 45 2
+1 -67 73 50 2
+1 -50 97 45 2
+1 -61 111 45 2
+2 -109 23 -92 1
+2 -94 20 -96 1
+2 -85 26 -88 1
+2 -90 33 -114 1
+2 -63 9 -106 1
+2 -79 9 -93 1
+2 -99 26 -108 1
+2 -81 19 -110 1
+2 -108 21 -108 1
+2 -92 27 -106 1
+2 -88 2 -106 1
+2 -88 15 -103 1
+3 54 -74 4 3
+3 42 -92 31 3
+3 39 -99 -7 3
+3 48 -115 -5 3
+3 39 -96 2 3
+3 31 -109 9 3
+3 33 -96 -8 3
+3 23 -102 4 3
+3 38 -90 21 3
+3 34 -107 1 3
+3 35 -78 18 3
b
diff -r 000000000000 -r af2624d5ab32 test-data/svc_prediction_result03.tabular
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/svc_prediction_result03.tabular Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,49 @@
+0 58 56 -67 0
+0 44 64 -76 0
+0 51 48 -73 0
+0 58 65 -49 0
+0 43 61 -49 0
+0 45 43 -79 0
+0 42 60 -98 0
+0 50 55 -59 0
+0 53 53 -56 0
+0 45 44 -61 0
+0 43 65 -84 0
+0 35 52 -75 0
+0 56 56 -70 0
+1 -61 86 43 2
+1 -67 93 15 2
+1 -59 94 36 2
+1 -50 92 62 2
+1 -78 91 70 2
+1 -35 87 47 2
+1 -56 91 52 2
+1 -61 81 46 2
+1 -83 78 34 2
+1 -50 87 45 2
+1 -67 73 50 2
+1 -50 97 45 2
+1 -61 111 45 2
+2 -109 23 -92 2
+2 -94 20 -96 1
+2 -85 26 -88 2
+2 -90 33 -114 0
+2 -63 9 -106 0
+2 -79 9 -93 3
+2 -99 26 -108 1
+2 -81 19 -110 0
+2 -108 21 -108 1
+2 -92 27 -106 1
+2 -88 2 -106 3
+2 -88 15 -103 3
+3 54 -74 4 3
+3 42 -92 31 3
+3 39 -99 -7 3
+3 48 -115 -5 3
+3 39 -96 2 3
+3 31 -109 9 3
+3 33 -96 -8 3
+3 23 -102 4 3
+3 38 -90 21 3
+3 34 -107 1 3
+3 35 -78 18 3
b
diff -r 000000000000 -r af2624d5ab32 test-data/swiss_r.txt
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/swiss_r.txt Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,101 @@
+0 1 2 0
+-9.02243256039137 16.34407352444682 -3.91158679018559 9.83385989954935
+3.63249892240844 16.37156637082574 6.37347946891175 7.33595862596962
+-6.59990374867870 12.81689121739727 5.68894891723748 8.71337301363603
+12.60058269943729 6.48900731901245 1.36421345798185 12.67421645405731
+0.21148781431603 14.65243305777208 -4.75215950419191 4.75686315221341
+5.33691943034302 18.05198421031037 -2.41562073899157 5.85815095065150
+0.42444009063172 13.13179891289296 -11.02588381005155 11.03405017132387
+12.46482443712306 20.63056442180054 -0.88026386103874 12.49586782553941
+5.76317738541424 20.50650266733296 -1.67200654933799 6.00081823393938
+-7.69903426445702 3.50057675517602 -6.58731737523598 10.13251591695612
+11.20278590321885 0.48674086604648 6.81511168671079 13.11290049133681
+6.15412671259387 3.37563551864871 2.60836155375894 6.68407253026096
+6.35942079065830 19.39343333044084 1.13017904280432 6.45906629950887
+4.89011222063691 20.02454684747021 -2.99390399145637 5.73381710908892
+5.95327096920981 4.43054679308737 3.25076997787065 6.78298906691301
+2.78861701451918 7.57103026710677 13.65384098195710 13.93570085837942
+12.10585740780788 11.53688049418112 -2.50504139885041 12.36232243504963
+6.32507204556097 5.70844783271641 0.31467565998178 6.33289484773921
+12.24420955660564 9.67263404257185 -1.99160324599471 12.40512600321051
+3.86603085500482 14.61939286129109 6.18690023441702 7.29547319112878
+-7.02397880690916 10.50747383017216 5.26986980172594 8.78110505614482
+7.26899971380247 15.03749080185106 11.46122929160598 13.57196130682766
+12.28312638165453 11.04507466082536 -1.82900534786211 12.41855282511636
+-0.20634108007217 0.02937948549992 7.87746751157422 7.88016948023535
+6.34399103378833 8.28870602048655 0.52398389323880 6.36559355890402
+-2.99211446321268 10.33550636792415 7.66279394446575 8.22624823332651
+0.25539688163080 8.46048695896198 -4.75915397262811 4.76600189911090
+4.89844343501214 7.44026430223273 5.12776107471194 7.09145131304900
+11.44974583747064 10.51290070830202 -4.24498953448257 12.21133145445421
+0.69579063231352 9.34870920545390 -4.80605880631806 4.85616369717936
+-6.17828212297832 1.89908855212515 -8.31161329742931 10.35635484121306
+-5.80914779869685 5.74482132057625 -8.63009591761129 10.40311269256829
+4.77128296187839 19.81301905259727 -3.12464903951709 5.70338256848215
+-3.67628694703398 0.55743746801278 7.45494950830712 8.31212114254485
+1.70416677720490 0.83997248245367 -4.76032011047383 5.05616771465411
+11.25347968088436 5.94594755411621 6.71434319469841 13.10432025952777
+2.49626118444553 12.22922757455215 13.73230884573197 13.95735025481333
+2.82529764315011 20.80874886142137 -4.45734155189065 5.27732891552161
+11.24605365926439 20.84548698546233 6.72924080159274 13.10558677331444
+-7.59842145099956 20.85546482210194 -6.72885645955675 10.14955751746493
+7.67310519862389 2.31101495029782 -8.84589090944236 11.71009519051421
+-3.59076659321246 13.95411036524274 -10.02825028857387 10.65173265610870
+-7.31035544660664 11.00372352425457 -7.10771526578241 10.19612245195043
+0.93904272696270 3.63614810598335 -4.81429662072219 4.90502326144929
+6.14080381732072 19.80216514321554 2.65918955384942 6.69184284081168
+-8.98560037833939 5.07906180550130 -4.02506967318433 9.84592301580716
+9.83640736238632 20.97757764570745 -6.80541621380709 11.96112869431461
+4.96998862744940 12.23657011814787 5.03554965727086 7.07513585083827
+3.20231600088142 3.84885901324209 6.67886419948077 7.40689238241025
+12.54088626453474 8.12375386027360 2.27756112448486 12.74602341813951
+3.24443881123445 3.98314410715514 13.51770472571597 13.90160869293509
+11.54328489548810 8.62618413353151 6.09143462816640 13.05193479939386
+-1.97813913943140 12.48828144693581 7.85543311765692 8.10067058464492
+-6.17714998090234 15.04830795569514 -8.31263754702176 10.35650157508401
+-1.61850746821552 10.22472112975159 7.89200482133423 8.05625884171010
+-0.51138056305762 6.50138617310080 7.90207659171039 7.91860622467934
+6.35381855577856 12.12626882938480 0.68617671578531 6.39076276551100
+5.45211008381264 9.27586421094360 4.31430175957628 6.95260411922716
+2.10713448194703 7.55324015461126 -4.68298251756532 5.13520603139082
+-9.46355519159139 6.74797057218509 -0.48486315240582 9.47596797909624
+-3.25022653114329 4.37235204411648 7.59200889141913 8.25848481933186
+-6.60452000241460 9.47643110529852 -7.90309749198288 10.29944825852728
+-3.61286141728060 10.32870111554513 -10.01788625769654 10.64945128598746
+5.90124016577200 18.88060261066794 -1.36195628598096 6.05636528117681
+7.89068480280119 15.31656968161827 10.97066306101087 13.51363587845203
+6.59943795203969 16.17188523131361 11.92785150077068 13.63180922354986
+-5.92310072764215 7.88422419880175 -8.53499542293883 10.38890124601161
+-4.10660182718020 7.21853023994307 7.28989105932790 8.36700007313664
+-2.25898912612109 13.75573932598577 7.81543176115041 8.13535527713031
+6.22411695981015 14.93179785742045 2.30810881728065 6.63829784220360
+3.72076470595161 2.38428907959220 6.30480629460181 7.32083823134385
+5.13490876903855 2.79360247684508 4.81010313126482 7.03593492009076
+6.33502516186602 9.57682020972860 0.41503304021319 6.34860584899899
+4.27843646254146 3.35446083332871 13.14365544274426 13.82247072565484
+5.41252501545014 20.19447997926756 12.62033886030277 13.73202024436834
+-6.24125765198132 17.58993064209800 -8.25415535789923 10.34815818156797
+6.64000170297454 10.92337442779639 -9.51736726558904 11.60473620051790
+-0.52602499579966 4.58371741229124 7.90295722301686 7.92044412675453
+4.25631510283951 2.83329317318037 -3.60833745464785 5.57999260225982
+-8.54867168512033 20.56047725516125 3.06273111245095 9.08075489412379
+-9.47483083800073 14.84791340947201 -0.77854042515321 9.50676309804379
+4.10653195087950 18.05948669585893 -3.72769262484286 5.54610646930818
+-9.35274677942807 8.13062518440142 -2.53039931260771 9.68900371562256
+1.03845201115681 5.26751441646622 14.02486534548972 14.06325817648934
+-4.63367754105607 6.28819839783875 7.04889668487289 8.43551492373165
+-0.16796208628964 17.99480609650533 7.87351960373171 7.87531092800648
+11.88118942484094 9.93266380193264 -3.19616245269299 12.30358145309554
+8.69490880845012 13.92881798733869 -8.00982627814450 11.82196075925300
+1.45482693805244 16.92030075617249 7.52360274153979 7.66297067932457
+-3.23533266731161 5.31259059764421 -10.18660422887945 10.68804393628686
+-9.10316962454735 1.67104221837682 -3.64441261846314 9.80558211157782
+2.53532307433492 15.38797270533002 7.06868388652787 7.50960418257273
+4.97651933022564 20.18934702757571 -2.89344295855440 5.75654034976890
+1.48425390920063 20.02989941752092 7.51375887065696 7.65895436945237
+-8.28803153159531 10.30048008956189 3.55761030474492 9.01931581380457
+-0.79470794357540 13.27603335308786 -10.89380414305644 10.92275283172038
+4.82092592045283 15.39289541659784 5.22421533655818 7.10870963067305
+-2.56630792573832 18.95059956820729 -10.44394771632344 10.75462599400857
+6.27317580578337 3.40718529371220 2.04636103864001 6.59850954312636
+-0.92862878529940 8.52350776971879 -10.87076506408071 10.91035675398950
b
diff -r 000000000000 -r af2624d5ab32 test-data/test.tabular
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/test.tabular Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,5 @@
+3.68258022948 2.82110345641 -3.990140724 -1.9523364774
+0.015942057224 -0.711958594347 0.125502976978 -0.972218263337
+2.08690768825 0.929399321468 -2.12924084484 -1.99714022188
+1.41321052084 0.523750660422 -1.4210539291 -1.49298569451
+0.76831404394 1.38267855169 -0.989045048734 0.649504257894
b
diff -r 000000000000 -r af2624d5ab32 test-data/test2.tabular
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/test2.tabular Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,9 @@
+0 1 2 3
+3.68258022948 2.82110345641 -3.990140724 -1.9523364774
+0.015942057224 -0.711958594347 0.125502976978 -0.972218263337
+2.08690768825 0.929399321468 -2.12924084484 -1.99714022188
+1.41321052084 0.523750660422 -1.4210539291 -1.49298569451
+0.76831404394 1.38267855169 -0.989045048734 0.649504257894
+0.76831404394 1.38267855169 -0.989045048734 0.649504257894
+0 1 -0.9 0.6
+1 2 2 5
b
diff -r 000000000000 -r af2624d5ab32 test-data/test3.tabular
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/test3.tabular Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,51 @@
+Age Race AIDS Total target
+0 4 2555.0 14443382.0 1
+1 4 55300.0 14704293.0 1
+2 4 82334.0 16641977.0 1
+3 4 38006.0 13888285.0 1
+4 4 16068.0 21845911.0 1
+0 2 2489.0 2367256.0 1
+1 2 34204.0 2410019.0 1
+2 2 51776.0 2727604.0 1
+3 2 23896.0 2276276.0 1
+4 2 10169.0 3580523.0 1
+0 3 1363.0 1542563.0 1
+1 3 20712.0 1570428.0 1
+2 3 27200.0 1777374.0 1
+3 3 11251.0 1483278.0 1
+4 3 4674.0 2333158.0 1
+0 1 38.0 699627.0 1
+1 1 731.0 712265.0 1
+2 1 1162.0 806125.0 1
+3 1 560.0 672738.0 1
+4 1 258.0 1058200.0 1
+0 0 26.0 169115.0 1
+1 0 390.0 172170.0 1
+2 0 417.0 194858.0 1
+3 0 140.0 162616.0 1
+4 0 48.0 255790.0 1
+0 4 490.0 14999423.0 0
+1 4 4788.0 15270378.0 0
+2 4 5377.0 17282659.0 0
+3 4 2152.0 14422956.0 0
+4 4 1790.0 22686934.0 0
+0 2 1490.0 2458391.0 0
+1 2 12280.0 2502800.0 0
+2 2 15713.0 2832611.0 0
+3 2 5788.0 2363908.0 0
+4 2 2534.0 3718366.0 0
+0 3 493.0 1601948.0 0
+1 3 4660.0 1630887.0 0
+2 3 5153.0 1845800.0 0
+3 3 1944.0 1540381.0 0
+4 3 910.0 2422980.0 0
+0 1 6.0 726561.0 0
+1 1 83.0 739686.0 0
+2 1 106.0 837159.0 0
+3 1 69.0 698637.0 0
+4 1 55.0 1098938.0 0
+0 0 3.0 175626.0 0
+1 0 78.0 178798.0 0
+2 0 77.0 202360.0 0
+3 0 31.0 168876.0 0
+4 0 14.0 265637.0 0
b
diff -r 000000000000 -r af2624d5ab32 test-data/test_set.tabular
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/test_set.tabular Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,49 @@
+0 58 56 -67
+0 44 64 -76
+0 51 48 -73
+0 58 65 -49
+0 43 61 -49
+0 45 43 -79
+0 42 60 -98
+0 50 55 -59
+0 53 53 -56
+0 45 44 -61
+0 43 65 -84
+0 35 52 -75
+0 56 56 -70
+1 -61 86 43
+1 -67 93 15
+1 -59 94 36
+1 -50 92 62
+1 -78 91 70
+1 -35 87 47
+1 -56 91 52
+1 -61 81 46
+1 -83 78 34
+1 -50 87 45
+1 -67 73 50
+1 -50 97 45
+1 -61 111 45
+2 -109 23 -92
+2 -94 20 -96
+2 -85 26 -88
+2 -90 33 -114
+2 -63 9 -106
+2 -79 9 -93
+2 -99 26 -108
+2 -81 19 -110
+2 -108 21 -108
+2 -92 27 -106
+2 -88 2 -106
+2 -88 15 -103
+3 54 -74 4
+3 42 -92 31
+3 39 -99 -7
+3 48 -115 -5
+3 39 -96 2
+3 31 -109 9
+3 33 -96 -8
+3 23 -102 4
+3 38 -90 21
+3 34 -107 1
+3 35 -78 18
b
diff -r 000000000000 -r af2624d5ab32 test-data/train.tabular
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/train.tabular Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,15 @@
+-0.409899987374 -0.649450145317 0.510268556953 -0.229110484125 0
+-1.10383560019 0.0611191480175 1.01725565283 1.79193066057 0
+-0.41009731911 0.731046118333 0.238276079462 1.60843479815 1
+1.48390157074 2.30714564103 -1.83858336229 0.770904924566 1
+0.74006063964 1.38952620136 -0.96404935579 0.702401167325 1
+0.331307031883 1.10808437795 -0.527405721679 0.961279646112 1
+-1.4627878344 -0.343655746393 1.43177660405 1.80949467985 0
+-1.33544682955 -2.24827087098 1.6885444678 -0.922608257112 0
+-0.0417384245742 0.906486336146 -0.13980113811 1.27108242642 1
+-2.73189476502 -1.46239633785 2.83576394706 2.28732123255 0
+-0.300256196558 -0.305034204892 0.340123288396 0.0593443810367 0
+-0.523654501136 -0.426496596688 0.572385315213 0.243891110892 0
+-0.00757221265553 -0.254805682403 0.0572980350837 -0.327374762308 0
+-1.87242461384 -0.413385894664 1.82750303608 2.35149919802 1
+-0.168117705611 -0.811895938369 0.316838713275 -0.819986910541 0
b
diff -r 000000000000 -r af2624d5ab32 test-data/train_set.tabular
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/train_set.tabular Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,49 @@
+0 58 56 -67 0
+0 44 64 -76 0
+0 51 48 -73 0
+0 58 65 -49 0
+0 43 61 -49 0
+0 45 43 -79 0
+0 42 60 -98 0
+0 50 55 -59 0
+0 53 53 -56 0
+0 45 44 -61 0
+0 43 65 -84 0
+0 35 52 -75 0
+0 56 56 -70 0
+1 -61 86 43 2
+1 -67 93 15 2
+1 -59 94 36 2
+1 -50 92 62 2
+1 -78 91 70 2
+1 -35 87 47 2
+1 -56 91 52 2
+1 -61 81 46 2
+1 -83 78 34 2
+1 -50 87 45 2
+1 -67 73 50 2
+1 -50 97 45 2
+1 -61 111 45 2
+2 -109 23 -92 1
+2 -94 20 -96 1
+2 -85 26 -88 1
+2 -90 33 -114 1
+2 -63 9 -106 1
+2 -79 9 -93 1
+2 -99 26 -108 1
+2 -81 19 -110 1
+2 -108 21 -108 1
+2 -92 27 -106 1
+2 -88 2 -106 1
+2 -88 15 -103 1
+3 54 -74 4 3
+3 42 -92 31 3
+3 39 -99 -7 3
+3 48 -115 -5 3
+3 39 -96 2 3
+3 31 -109 9 3
+3 33 -96 -8 3
+3 23 -102 4 3
+3 38 -90 21 3
+3 34 -107 1 3
+3 35 -78 18 3
b
diff -r 000000000000 -r af2624d5ab32 test-data/train_test_eval01.tabular
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/train_test_eval01.tabular Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,2 @@
+neg_mean_absolute_error r2
+-5.29904520286704 0.6841931628349759
b
diff -r 000000000000 -r af2624d5ab32 test-data/train_test_eval03.tabular
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/train_test_eval03.tabular Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,2 @@
+neg_mean_absolute_error r2
+-4.811320754716981 0.7343422874316201
b
diff -r 000000000000 -r af2624d5ab32 test-data/train_test_eval_model01
b
Binary file test-data/train_test_eval_model01 has changed
b
diff -r 000000000000 -r af2624d5ab32 test-data/train_test_eval_weights01.h5
b
Binary file test-data/train_test_eval_weights01.h5 has changed
b
diff -r 000000000000 -r af2624d5ab32 test-data/train_test_eval_weights02.h5
b
Binary file test-data/train_test_eval_weights02.h5 has changed
b
diff -r 000000000000 -r af2624d5ab32 test-data/train_test_split_test01.tabular
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/train_test_split_test01.tabular Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,67 @@
+year month day temp_2 temp_1 average forecast_noaa forecast_acc forecast_under friend week_Fri week_Mon week_Sat week_Sun week_Thurs week_Tues week_Wed
+2016 11 2 59 57 54.2 54 58 55 70 0 0 0 0 0 0 1
+2016 11 8 61 63 52.7 49 57 52 49 0 0 0 0 0 1 0
+2016 7 13 74 77 75.6 74 78 76 56 0 0 0 0 0 0 1
+2016 3 14 52 54 53.4 49 58 55 44 0 1 0 0 0 0 0
+2016 6 13 65 70 69.3 66 72 69 79 0 1 0 0 0 0 0
+2016 5 21 63 66 65.7 62 67 65 49 0 0 1 0 0 0 0
+2016 7 4 76 71 73.8 71 76 73 86 0 1 0 0 0 0 0
+2016 1 15 55 49 47.1 46 51 46 65 1 0 0 0 0 0 0
+2016 2 1 48 47 48.8 46 49 49 51 0 1 0 0 0 0 0
+2016 1 11 50 52 46.7 42 48 48 39 0 1 0 0 0 0 0
+2016 6 8 86 85 68.5 67 70 69 81 0 0 0 0 0 0 1
+2016 7 23 81 71 77.0 75 81 76 86 0 0 1 0 0 0 0
+2016 9 14 74 75 71.2 67 75 73 77 0 0 0 0 0 0 1
+2016 9 12 77 70 71.8 67 73 73 90 0 1 0 0 0 0 0
+2016 10 17 62 60 59.1 57 63 59 62 0 1 0 0 0 0 0
+2016 1 19 50 54 47.6 47 49 48 53 0 0 0 0 0 1 0
+2016 9 26 67 76 67.2 64 69 69 74 0 1 0 0 0 0 0
+2016 9 15 75 79 71.0 66 76 69 64 0 0 0 0 1 0 0
+2016 7 28 79 83 77.3 76 80 78 76 0 0 0 0 1 0 0
+2016 12 24 45 40 45.1 44 47 46 39 0 0 1 0 0 0 0
+2016 6 1 71 79 67.4 65 69 66 58 0 0 0 0 0 0 1
+2016 10 3 63 65 64.5 63 68 65 49 0 1 0 0 0 0 0
+2016 4 8 68 77 57.1 57 61 57 41 1 0 0 0 0 0 0
+2016 11 17 55 50 50.5 46 51 50 57 0 0 0 0 1 0 0
+2016 12 4 50 49 46.8 45 47 47 53 0 0 0 1 0 0 0
+2016 9 10 72 74 72.3 70 77 74 91 0 0 1 0 0 0 0
+2016 7 29 83 85 77.3 77 80 79 77 1 0 0 0 0 0 0
+2016 10 14 66 60 60.2 56 64 60 78 1 0 0 0 0 0 0
+2016 3 30 56 64 55.7 51 57 56 57 0 0 0 0 0 0 1
+2016 12 5 49 46 46.6 43 50 45 65 0 1 0 0 0 0 0
+2016 4 18 68 77 58.8 55 59 57 39 0 1 0 0 0 0 0
+2016 12 19 35 39 45.1 42 46 45 51 0 1 0 0 0 0 0
+2016 2 4 51 49 49.0 44 54 51 44 0 0 0 0 1 0 0
+2016 4 30 64 61 61.4 60 65 62 78 0 0 1 0 0 0 0
+2016 4 5 69 60 56.6 52 58 56 72 0 0 0 0 0 1 0
+2016 11 16 57 55 50.7 50 51 49 34 0 0 0 0 0 0 1
+2016 9 28 77 69 66.5 66 68 66 62 0 0 0 0 0 0 1
+2016 1 13 45 49 46.9 45 51 46 33 0 0 0 0 0 0 1
+2016 3 5 59 57 52.1 49 53 51 46 0 0 1 0 0 0 0
+2016 1 24 57 48 48.1 46 50 48 54 0 0 0 1 0 0 0
+2016 7 14 77 75 75.8 74 76 77 77 0 0 0 0 1 0 0
+2016 8 23 84 81 75.7 73 78 77 89 0 0 0 0 0 1 0
+2016 12 25 40 41 45.1 42 49 44 31 0 0 0 1 0 0 0
+2016 9 25 64 67 67.6 64 72 67 62 0 0 0 1 0 0 0
+2016 11 21 57 55 49.5 46 51 49 67 0 1 0 0 0 0 0
+2016 1 16 49 48 47.3 45 52 46 28 0 0 1 0 0 0 0
+2016 2 24 51 60 50.8 47 53 50 46 0 0 0 0 0 0 1
+2016 8 4 73 75 77.3 73 79 78 66 0 0 0 0 1 0 0
+2016 3 2 54 58 51.6 47 54 52 37 0 0 0 0 0 0 1
+2016 1 25 48 51 48.2 45 51 49 63 0 1 0 0 0 0 0
+2016 1 18 54 50 47.5 44 48 49 58 0 1 0 0 0 0 0
+2016 11 22 55 54 49.3 46 54 49 58 0 0 0 0 0 1 0
+2016 3 13 55 52 53.3 50 55 53 54 0 0 0 1 0 0 0
+2016 5 17 57 60 65.0 62 65 65 55 0 0 0 0 0 1 0
+2016 1 28 56 57 48.4 44 52 48 34 0 0 0 0 1 0 0
+2016 5 24 66 65 66.2 66 71 66 67 0 0 0 0 0 1 0
+2016 11 6 65 58 53.2 52 57 55 71 0 0 0 1 0 0 0
+2016 12 23 49 45 45.1 45 49 44 35 1 0 0 0 0 0 0
+2016 6 25 68 69 71.7 68 73 73 89 0 0 1 0 0 0 0
+2016 4 2 73 71 56.2 55 58 58 45 0 0 1 0 0 0 0
+2016 6 26 69 71 71.9 67 74 72 70 0 0 0 1 0 0 0
+2016 11 26 52 52 48.4 48 50 47 58 0 0 1 0 0 0 0
+2016 9 13 70 74 71.5 71 75 70 82 0 0 0 0 0 1 0
+2016 12 2 52 46 47.2 46 51 49 41 1 0 0 0 0 0 0
+2016 8 6 80 79 77.2 76 81 79 60 0 0 1 0 0 0 0
+2016 10 29 60 65 55.3 55 59 55 65 0 0 1 0 0 0 0
b
diff -r 000000000000 -r af2624d5ab32 test-data/train_test_split_test02.tabular
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/train_test_split_test02.tabular Sat May 01 01:24:32 2021 +0000
b
b'@@ -0,0 +1,201 @@\n+-1.3022497239876525\t0.41162245619920174\t0.3850631031897158\t-1.065301842496646\t-0.6940008550138481\t2.2608403458600925\t3.622204434814536\t-0.3183465181327487\t-1.410027169684386\t-0.6307904628990526\t2.809174035044597\t0.7840390953413314\t-0.032913359309272236\t0.1269040356918228\t-0.7038487276500461\t-1.5433857418796189\t-0.2658388398378144\t-1.204125138751038\t-0.4106305941465671\t-2.1530032168711024\n+-0.4107989913365759\t0.9675376475353166\t0.09374211379388764\t1.7143886101095047\t-0.11156554775507473\t1.6257337330303492\t5.671063244915109\t-0.3775968070412295\t0.8772742813833009\t-0.2249373445476654\t3.541130040089443\t0.7064690478674034\t0.3274452454361061\t0.4095309780710557\t-0.04020259217468653\t0.3999351212624621\t-0.4789427070381956\t-0.8383398308678357\t-0.7084990898469742\t-3.5921789270343747\n+-1.0046430489468259\t-0.2475198782602121\t1.8722558073924007\t-2.050734120852677\t0.223218415351888\t0.9972967022037826\t0.21687494749301134\t0.6815453371376522\t-1.2369792180109709\t-1.7937590177703913\t-0.595814082168741\t-0.3714655242486308\t0.8054558366241785\t0.707291290265989\t0.0026761403473940892\t0.6858925338135025\t1.0460915051165451\t-1.05529607831364\t-0.8524278739013349\t-1.0937845388370384\n+-0.6601752137721719\t-0.11000001206134824\t-2.1153815467792265\t0.7939530261454807\t0.14074473863377998\t3.3552079891275923\t-0.8369407002892686\t-0.5714820686564377\t-0.37412481389886265\t0.16669033299410288\t-3.6319951227966674\t-0.6639361788987586\t0.5554669721932757\t0.7479717178718552\t-0.016560794142802523\t0.19859811525823087\t-1.9152321429437595\t-0.4582315336475037\t-2.2285961423670955\t-3.4228140259065998\n+0.7866152217561416\t-0.2291058850235269\t-0.3527520240499313\t0.6723966958156411\t-1.6682659534205586\t2.7789914613781272\t1.906164582945605\t1.0761421124464927\t0.09690167407822936\t1.6513613104097675\t2.2258330065926084\t-0.8734144600762542\t-1.0066865968249934\t-0.13471591695058407\t0.015184991621273526\t0.41810514195584253\t-0.3760878884398714\t2.2903405971801156\t1.0522116184673187\t-0.9159796436696128\n+0.2814798326149793\t0.5875101493421397\t0.21729777590682087\t-1.485801637332555\t-0.7259055545195056\t2.3934625979413915\t2.795967841759341\t0.1748287231468569\t0.7064308999942802\t0.3497777551584115\t2.225996647861514\t1.6301969056059509\t0.07651250932855069\t-2.0342494286984243\t-0.8883453790706329\t-0.7345168234009436\t1.5287683026280032\t-0.4421021715011357\t-0.5779836284098872\t-1.8023368901730872\n+0.023561266296767996\t0.01327469130218088\t0.9878045214079304\t0.5750648387066529\t0.4047426855593061\t2.730429552257033\t1.0141221327309589\t-0.0010397698579166187\t1.2950034987670118\t-1.805850216908488\t1.6388229124609937\t0.9286520099757948\t-0.34109406603463605\t-0.02757550682732839\t-1.2286674947471106\t0.8011744540858317\t0.8424403652177841\t-0.14115310456128674\t-0.44894002007093775\t-0.4406268508179094\n+0.2456307272179787\t0.5943091746736674\t-1.273655669405128\t0.16873404654912996\t0.005752441478044986\t0.5666353702678641\t4.842127705182824\t0.698622620435285\t1.2592032824188062\t-1.3867865971369038\t2.0103146282963\t0.25453278665231965\t1.037764245051936\t-0.14900969999222113\t-1.3508991449570242\t-0.6347960472728013\t0.01478239489509124\t0.1237920700532843\t-0.8008367439748938\t-3.7595616099202216\n+-1.4928016688154506\t0.6922526483668314\t0.7340706436196134\t0.3473096338667893\t-0.2626210985357605\t3.4791405788113354\t1.805377038112414\t1.3002542896922045\t-0.9818090439589664\t-1.983507863053584\t3.1109989936861995\t-1.5167130756726412\t2.115406032275567\t-0.06319774436121431\t0.31045881394126296\t1.5773205208380376\t0.11953451934790252\t-0.3678585275873511\t-0.6436336614328086\t-0.1923418873135878\n+-1.1092740315883938\t-0.9086267440397304\t-0.9317250076628589\t0.10305857018240576\t0.569614735498199\t3.3180899169801226\t-0.12789255109919928\t-0.225656531827112\t-0.6679424977863244\t0.4743665910531477\t-1.90983381933296\t-0.015442113772508715\t0.7947216167107651\t0.8564724155111614\t0.7221596369993102\t-0.9866727547841551\t0.8360620842096383\t0.6950101534147096\t0.04441865129686528\t-2.6156995904444718\n+1.0098923348657989\t-0.3404395572391499\t0.28768679961742755\t-'..b'394916943092\t-0.08986731773440884\t1.227196153928395\t0.9070135114981376\t-0.4301867214198333\t-1.4492302926076932\n+-0.06615816960203896\t2.009979529130306\t0.3713735532042358\t-0.5487484003197485\t2.3544159434087883\t1.8572881651916524\t3.3230022631402014\t0.3735478888166094\t-0.8598539493190498\t0.7274541656791573\t2.205532939957485\t0.29758553036688457\t0.8972227445878997\t-0.5747601621532991\t-0.2127621916795853\t0.040064364498694015\t0.5849058397345099\t0.8758434197897889\t0.4663260930810838\t-2.254363887228946\n+0.18543498213986814\t3.0513112038934844\t-2.6424015306921946\t0.8764021246988886\t-0.3953153229944255\t1.9075565797529936\t1.4218322330290696\t-0.5195408321168391\t0.5455073292906822\t0.6246218548016428\t0.9584355772452136\t-2.2635771383414567\t-0.6561863207944872\t0.8486496057693781\t-0.5966266151068456\t-0.6006020054228821\t2.0603605160777265\t0.11602230574467433\t0.4886550176001555\t-1.2835462572257614\n+-0.1582698552315506\t-0.08048346990253155\t-2.148011786893936\t2.047644705860473\t0.7947162744855929\t3.242804563537072\t3.1537786543701785\t0.5402497023814611\t0.4272506159045248\t-0.6354699283615589\t3.262065129084129\t-0.22929604213545826\t0.7154856008886161\t-0.2042624800307618\t-0.2578743486811804\t0.13661938345994426\t0.4553653167841669\t-0.6670519044995711\t-2.0893270217727435\t-1.499879266505479\n+-0.8484574739703497\t1.3067865576457078\t0.25715573889589005\t-0.5778920236798556\t1.2522052635779308\t2.5540397800380448\t3.62109581483752\t-0.32782688264878435\t0.7393667994651832\t-0.28375737263272044\t3.182336120597001\t0.6388288113204441\t0.6913878844603908\t-0.42013735166981375\t0.1445696954158848\t1.7972784288866317\t-1.3269163979305345\t-0.5374183207933991\t-1.1487633221563704\t-1.8939359370372515\n+-2.130317782257829\t0.6944206556053942\t-0.5187934367784872\t0.4910182874266096\t0.9821391691462148\t1.5947125814644691\t4.651398959285967\t-0.4079668226972564\t-0.7617607267021139\t0.37200223540319977\t2.9925378597902497\t0.3213832180477288\t-1.8009468379200382\t0.022873767566392908\t-0.5948190671258752\t-0.18142573586761535\t1.0527453107966451\t-0.7914376218356579\t-1.2023900300673969\t-2.9428283401869946\n+0.6749106319022494\t-0.14100011324901496\t0.9696745674485816\t-0.6012318064205764\t0.9706395894078412\t2.0205295534128647\t-0.5705109230704828\t1.107471162440306\t-0.2333200858753319\t0.5489383517969392\t-2.331823083983417\t0.5241875376117929\t-1.607427755534678\t1.2124152543792104\t0.25644841454138195\t0.5333111287645858\t-1.7715901663386604\t0.7643998152072085\t-1.088005122340949\t-2.120248490613845\n+1.0784246103336974\t0.6750275474270194\t0.883320881578071\t0.6851873084466028\t0.2463794964155742\t1.6240981608723588\t3.9093035073408418\t0.2591824998427575\t-1.6014038225855325\t1.1801464748015662\t2.4755532139585203\t0.7995931657601443\t1.6483349264511815\t-1.269517021279204\t0.7198065388081868\t-0.3671739224800498\t-0.7364785132472684\t-0.6205826123141913\t1.708837288406762\t-2.5947560181445284\n+0.010035987199388642\t0.2446441667110395\t1.245919130033156\t0.8854157890056191\t-1.573923287330914\t2.8875386799155955\t-0.513386992362383\t0.40135785761620013\t0.5076563896403061\t-0.20239357501585714\t-2.560644060182517\t-0.1450215571363124\t0.5199643185069369\t0.6728828829265034\t1.5303075053292063\t-0.9794419968244896\t0.3655133608469972\t-1.327131896650437\t-1.904372466358065\t-2.6555099509371605\n+-0.2984991804837332\t-1.6426421983629622\t-1.0099344497295062\t-0.20683063259480788\t1.7371391385934103\t1.9175803121382835\t2.5305082449767884\t0.6198917597202278\t-0.5024984291905042\t0.6767881974129001\t1.569111670968616\t-0.8206492678463314\t-0.35119699167786794\t1.0578552660085534\t-1.0111524265487517\t1.5038720931452612\t-0.7474037040854009\t0.6582529782133406\t0.7064620422956671\t-1.969356801153876\n+-0.6512454621212219\t-1.37373475613224\t0.30085906666200124\t0.0797497766512836\t-2.195376961647302\t1.132356514093129\t5.6861294740324535\t-0.1068624210733533\t0.4255497794528917\t-0.14106687226428918\t2.6052434613346884\t-0.01934725939162056\t1.0454590995696535\t-0.8660690232570448\t-1.29000104081957\t0.10819900014776096\t0.7755088867812867\t0.6015079687881466\t0.955602538442458\t-4.328064444458374\n'
b
diff -r 000000000000 -r af2624d5ab32 test-data/train_test_split_test03.tabular
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/train_test_split_test03.tabular Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,54 @@
+year month day temp_2 temp_1 average forecast_noaa forecast_acc forecast_under friend week_Fri week_Mon week_Sat week_Sun week_Thurs week_Tues week_Wed
+2016 9 19 68 69 69.7 65 74 71 88 0 1 0 0 0 0 0
+2016 1 25 48 51 48.2 45 51 49 63 0 1 0 0 0 0 0
+2016 12 17 39 35 45.2 43 47 46 38 0 0 1 0 0 0 0
+2016 7 17 76 72 76.3 76 78 77 88 0 0 0 1 0 0 0
+2016 6 27 71 78 72.2 70 74 72 84 0 1 0 0 0 0 0
+2016 4 17 60 68 58.6 58 62 59 54 0 0 0 1 0 0 0
+2016 11 2 59 57 54.2 54 58 55 70 0 0 0 0 0 0 1
+2016 12 27 42 42 45.2 41 50 47 47 0 0 0 0 0 1 0
+2016 1 16 49 48 47.3 45 52 46 28 0 0 1 0 0 0 0
+2016 12 7 40 42 46.3 44 51 46 62 0 0 0 0 0 0 1
+2016 8 28 81 79 75.0 71 77 76 85 0 0 0 1 0 0 0
+2016 10 19 60 61 58.4 58 60 57 41 0 0 0 0 0 0 1
+2016 5 5 74 60 62.5 58 66 62 56 0 0 0 0 1 0 0
+2016 12 11 36 44 45.7 41 46 47 35 0 0 0 1 0 0 0
+2016 3 30 56 64 55.7 51 57 56 57 0 0 0 0 0 0 1
+2016 10 9 64 68 62.1 58 65 63 55 0 0 0 1 0 0 0
+2016 1 12 52 45 46.8 44 50 45 61 0 0 0 0 0 1 0
+2016 8 13 80 87 76.8 73 79 78 73 0 0 1 0 0 0 0
+2016 9 23 68 67 68.3 67 69 67 61 1 0 0 0 0 0 0
+2016 6 16 60 67 69.8 68 72 71 87 0 0 0 0 1 0 0
+2016 9 8 68 67 72.8 69 77 73 56 0 0 0 0 1 0 0
+2016 12 4 50 49 46.8 45 47 47 53 0 0 0 1 0 0 0
+2016 1 13 45 49 46.9 45 51 46 33 0 0 0 0 0 0 1
+2016 2 5 49 49 49.1 47 50 49 45 1 0 0 0 0 0 0
+2016 6 22 76 73 71.0 66 71 72 78 0 0 0 0 0 0 1
+2016 5 25 65 66 66.4 65 67 66 60 0 0 0 0 0 0 1
+2016 4 8 68 77 57.1 57 61 57 41 1 0 0 0 0 0 0
+2016 10 11 57 60 61.4 58 66 61 58 0 0 0 0 0 1 0
+2016 11 4 57 65 53.7 49 55 54 38 1 0 0 0 0 0 0
+2016 11 30 52 52 47.6 47 52 49 44 0 0 0 0 0 0 1
+2016 8 4 73 75 77.3 73 79 78 66 0 0 0 0 1 0 0
+2016 9 20 69 71 69.4 67 73 69 81 0 0 0 0 0 1 0
+2016 2 19 57 53 50.2 50 52 51 42 1 0 0 0 0 0 0
+2016 9 4 70 67 73.7 72 77 75 64 0 0 0 1 0 0 0
+2016 10 4 65 61 64.1 62 69 65 60 0 0 0 0 0 1 0
+2016 5 21 63 66 65.7 62 67 65 49 0 0 1 0 0 0 0
+2016 1 9 45 48 46.4 46 50 45 47 0 0 1 0 0 0 0
+2016 8 3 77 73 77.3 77 81 77 93 0 0 0 0 0 0 1
+2016 10 7 66 63 62.9 62 67 64 78 1 0 0 0 0 0 0
+2016 10 17 62 60 59.1 57 63 59 62 0 1 0 0 0 0 0
+2016 6 18 71 67 70.2 67 75 69 77 0 0 1 0 0 0 0
+2016 12 26 41 42 45.2 45 48 46 58 0 1 0 0 0 0 0
+2016 11 20 55 57 49.8 47 54 48 30 0 0 0 1 0 0 0
+2016 2 22 53 51 50.6 46 51 50 59 0 1 0 0 0 0 0
+2016 6 26 69 71 71.9 67 74 72 70 0 0 0 1 0 0 0
+2016 7 11 71 74 75.3 74 79 75 71 0 1 0 0 0 0 0
+2016 6 21 70 76 70.8 68 75 71 57 0 0 0 0 0 1 0
+2016 3 2 54 58 51.6 47 54 52 37 0 0 0 0 0 0 1
+2016 6 12 67 65 69.1 65 73 70 83 0 0 0 1 0 0 0
+2016 5 13 81 77 64.3 63 67 66 67 1 0 0 0 0 0 0
+2016 4 12 59 58 57.7 54 59 57 61 0 0 0 0 0 1 0
+2016 10 14 66 60 60.2 56 64 60 78 1 0 0 0 0 0 0
+2016 4 15 59 59 58.3 58 61 60 40 1 0 0 0 0 0 0
b
diff -r 000000000000 -r af2624d5ab32 test-data/train_test_split_train01.tabular
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/train_test_split_train01.tabular Sat May 01 01:24:32 2021 +0000
b
b'@@ -0,0 +1,196 @@\n+year\tmonth\tday\ttemp_2\ttemp_1\taverage\tforecast_noaa\tforecast_acc\tforecast_under\tfriend\tweek_Fri\tweek_Mon\tweek_Sat\tweek_Sun\tweek_Thurs\tweek_Tues\tweek_Wed\n+2016\t4\t11\t66\t59\t57.6\t56\t60\t58\t40\t0\t1\t0\t0\t0\t0\t0\n+2016\t11\t7\t58\t61\t52.9\t51\t56\t51\t35\t0\t1\t0\t0\t0\t0\t0\n+2016\t5\t12\t75\t81\t64.1\t62\t67\t63\t81\t0\t0\t0\t0\t1\t0\t0\n+2016\t3\t31\t64\t68\t55.9\t55\t59\t56\t56\t0\t0\t0\t0\t1\t0\t0\n+2016\t12\t28\t42\t47\t45.3\t41\t49\t44\t58\t0\t0\t0\t0\t0\t0\t1\n+2016\t5\t30\t64\t64\t67.1\t64\t70\t66\t69\t0\t1\t0\t0\t0\t0\t0\n+2016\t7\t17\t76\t72\t76.3\t76\t78\t77\t88\t0\t0\t0\t1\t0\t0\t0\n+2016\t10\t1\t66\t67\t65.3\t64\t70\t64\t54\t0\t0\t1\t0\t0\t0\t0\n+2016\t2\t7\t53\t49\t49.2\t46\t51\t48\t63\t0\t0\t0\t1\t0\t0\t0\n+2016\t5\t13\t81\t77\t64.3\t63\t67\t66\t67\t1\t0\t0\t0\t0\t0\t0\n+2016\t8\t28\t81\t79\t75.0\t71\t77\t76\t85\t0\t0\t0\t1\t0\t0\t0\n+2016\t2\t8\t49\t51\t49.3\t49\t52\t50\t34\t0\t1\t0\t0\t0\t0\t0\n+2016\t10\t20\t61\t58\t58.1\t58\t59\t58\t43\t0\t0\t0\t0\t1\t0\t0\n+2016\t4\t24\t64\t65\t60.1\t57\t61\t60\t41\t0\t0\t0\t1\t0\t0\t0\n+2016\t12\t31\t48\t57\t45.5\t42\t48\t47\t57\t0\t0\t1\t0\t0\t0\t0\n+2016\t10\t11\t57\t60\t61.4\t58\t66\t61\t58\t0\t0\t0\t0\t0\t1\t0\n+2016\t2\t11\t62\t56\t49.5\t46\t53\t50\t37\t0\t0\t0\t0\t1\t0\t0\n+2016\t12\t3\t46\t50\t47.0\t42\t52\t47\t58\t0\t0\t1\t0\t0\t0\t0\n+2016\t7\t9\t68\t74\t74.9\t70\t79\t76\t60\t0\t0\t1\t0\t0\t0\t0\n+2016\t9\t3\t75\t70\t73.9\t71\t75\t73\t68\t0\t0\t1\t0\t0\t0\t0\n+2016\t2\t25\t60\t59\t50.9\t49\t51\t49\t35\t0\t0\t0\t0\t1\t0\t0\n+2016\t4\t19\t77\t89\t59.0\t59\t63\t59\t61\t0\t0\t0\t0\t0\t1\t0\n+2016\t7\t12\t74\t74\t75.4\t74\t77\t77\t71\t0\t0\t0\t0\t0\t1\t0\n+2016\t6\t22\t76\t73\t71.0\t66\t71\t72\t78\t0\t0\t0\t0\t0\t0\t1\n+2016\t8\t9\t72\t73\t77.1\t77\t80\t79\t94\t0\t0\t0\t0\t0\t1\t0\n+2016\t9\t4\t70\t67\t73.7\t72\t77\t75\t64\t0\t0\t0\t1\t0\t0\t0\n+2016\t5\t10\t63\t67\t63.6\t61\t66\t64\t68\t0\t0\t0\t0\t0\t1\t0\n+2016\t8\t30\t79\t75\t74.6\t74\t76\t75\t63\t0\t0\t0\t0\t0\t1\t0\n+2016\t6\t14\t70\t66\t69.5\t66\t71\t69\t85\t0\t0\t0\t0\t0\t1\t0\n+2016\t2\t5\t49\t49\t49.1\t47\t50\t49\t45\t1\t0\t0\t0\t0\t0\t0\n+2016\t11\t14\t59\t55\t51.2\t49\t53\t53\t42\t0\t1\t0\t0\t0\t0\t0\n+2016\t4\t12\t59\t58\t57.7\t54\t59\t57\t61\t0\t0\t0\t0\t0\t1\t0\n+2016\t3\t8\t60\t53\t52.5\t48\t56\t51\t70\t0\t0\t0\t0\t0\t1\t0\n+2016\t4\t28\t60\t61\t61.0\t56\t65\t62\t73\t0\t0\t0\t0\t1\t0\t0\n+2016\t7\t27\t85\t79\t77.3\t73\t78\t79\t79\t0\t0\t0\t0\t0\t0\t1\n+2016\t1\t1\t45\t45\t45.6\t43\t50\t44\t29\t1\t0\t0\t0\t0\t0\t0\n+2016\t7\t24\t71\t75\t77.1\t76\t78\t78\t75\t0\t0\t0\t1\t0\t0\t0\n+2016\t1\t21\t48\t52\t47.8\t43\t51\t46\t57\t0\t0\t0\t0\t1\t0\t0\n+2016\t5\t4\t87\t74\t62.3\t59\t65\t64\t61\t0\t0\t0\t0\t0\t0\t1\n+2016\t2\t21\t51\t53\t50.5\t49\t54\t52\t46\t0\t0\t0\t1\t0\t0\t0\n+2016\t4\t16\t59\t60\t58.5\t56\t60\t59\t59\t0\t0\t1\t0\t0\t0\t0\n+2016\t6\t12\t67\t65\t69.1\t65\t73\t70\t83\t0\t0\t0\t1\t0\t0\t0\n+2016\t12\t17\t39\t35\t45.2\t43\t47\t46\t38\t0\t0\t1\t0\t0\t0\t0\n+2016\t12\t16\t39\t39\t45.3\t44\t49\t44\t39\t1\t0\t0\t0\t0\t0\t0\n+2016\t10\t16\t60\t62\t59.5\t57\t60\t59\t40\t0\t0\t0\t1\t0\t0\t0\n+2016\t8\t8\t72\t72\t77.1\t76\t78\t77\t65\t0\t1\t0\t0\t0\t0\t0\n+2016\t2\t16\t58\t55\t49.9\t47\t54\t51\t55\t0\t0\t0\t0\t0\t1\t0\n+2016\t6\t18\t71\t67\t70.2\t67\t75\t69\t77\t0\t0\t1\t0\t0\t0\t0\n+2016\t2\t15\t55\t58\t49.9\t46\t52\t49\t53\t0\t1\t0\t0\t0\t0\t0\n+2016\t1\t22\t52\t52\t47.9\t47\t48\t48\t60\t1\t0\t0\t0\t0\t0\t0\n+2016\t11\t3\t57\t57\t53.9\t53\t54\t54\t35\t0\t0\t0\t0\t1\t0\t0\n+2016\t5\t20\t64\t63\t65.6\t63\t70\t64\t73\t1\t0\t0\t0\t0\t0\t0\n+2016\t2\t22\t53\t51\t50.6\t46\t51\t50\t59\t0\t1\t0\t0\t0\t0\t0\n+2016\t5\t27\t66\t65\t66.7\t64\t67\t68\t73\t1\t0\t0\t0\t0\t0\t0\n+2016\t3\t18\t53\t58\t54.0\t51\t57\t54\t56\t1\t0\t0\t0\t0\t0\t0\n+2016\t9\t30\t68\t66\t65.7\t64\t67\t65\t74\t1\t0\t0\t0\t0\t0\t0\n+2016\t12\t15\t40\t39\t45.3\t45\t49\t47\t46\t0\t0\t0\t0\t1\t0\t0\n+2016\t6\t21\t70\t76\t70.8\t68\t75\t71\t57\t0\t0\t0\t0\t0\t1\t0\n+2016\t9\t21\t71\t67\t69.0\t65\t70\t70\t76\t0\t0\t0\t0\t0\t0\t1\n+2016\t1\t14\t49\t55\t47.0\t43\t47\t46\t58\t0\t0\t0\t0\t1\t0\t0\n+2016\t7\t20\t73\t78\t76.7\t75\t78\t77\t66\t0\t0\t0\t0\t0\t0\t1\n+2016\t8\t5\t75\t80\t77.3\t75\t81\t78\t71\t1\t0\t0\t0\t0\t0\t0\n+2016\t9\t22\t67\t68\t68.7\t65\t70\t69\t56\t0\t0\t0\t0\t1\t0\t0\n+2016\t2\t19\t57\t53\t50.2\t50\t52\t51\t42\t1\t0\t0\t0\t0\t0\t0\n+2016\t6\t2\t79\t75\t67.6\t64\t71\t67\t77\t0\t0\t0\t0\t1\t0\t0\n+2016\t3\t11\t55\t56\t53.0\t53\t53\t51\t36\t1\t0\t0\t0\t0\t0\t0\n+2016\t3\t12\t56\t55\t53.1\t52\t58\t53\t65\t0\t0\t1\t0\t0\t0\t0\n+2016\t11\t29\t48\t52\t47.8\t43\t48\t47\t50\t0\t0\t0\t0\t0\t1\t0\n+2016\t9\t9\t67\t72\t72.6\t68\t77\t71\t78\t1\t0\t0\t0\t0\t0\t0\n+2016\t2\t10\t57\t62\t49.4\t48\t50\t49\t30\t0\t0\t0\t0\t0\t0\t1\n+2016\t9\t19\t68\t69\t69.7\t65\t74\t71\t88\t0\t1\t0\t0\t0\t0\t0\n+2016\t3\t25\t53\t54\t55.0\t53\t57\t57\t42\t1\t0\t0\t0\t0\t0\t0\n+2016\t9\t24\t67\t64\t68.0\t65\t71\t66\t64\t0\t0\t1\t0\t0\t0\t0\n+2016\t10\t19\t60\t61\t58.4\t58\t60\t57\t41\t0\t0\t0\t0\t0\t0\t1\n+2016\t10\t13\t62\t66\t60.6\t60\t62\t60\t57\t0\t0\t0\t0\t1\t0\t0\n+2016\t10\t24\t62\t62\t56.8\t52\t61\t57\t70\t0\t1\t0\t0\t0\t0\t0\n+2016\t7\t22\t82\t81\t76.9\t72\t77\t76\t70\t1\t0\t0\t0\t0\t0\t0\n+2016\t8\t3\t77\t73\t77.3\t77\t81\t77\t93\t0\t0\t0\t0\t0\t0\t1\n+2016\t10\t31\t65\t117\t54.8\t51\t59\t56\t62\t0\t1\t0\t0\t0\t0\t0\n+2016\t5\t14\t77\t82\t64.5\t64\t66\t66\t65\t0\t'..b'8\t66\t0\t0\t0\t0\t1\t0\t0\n+2016\t3\t23\t56\t57\t54.7\t50\t58\t55\t70\t0\t0\t0\t0\t0\t0\t1\n+2016\t7\t7\t69\t76\t74.4\t73\t77\t74\t72\t0\t0\t0\t0\t1\t0\t0\n+2016\t2\t20\t53\t51\t50.4\t48\t55\t51\t43\t0\t0\t1\t0\t0\t0\t0\n+2016\t5\t26\t66\t66\t66.5\t64\t70\t65\t85\t0\t0\t0\t0\t1\t0\t0\n+2016\t11\t11\t65\t64\t51.9\t50\t53\t52\t55\t1\t0\t0\t0\t0\t0\t0\n+2016\t12\t12\t44\t44\t45.6\t43\t50\t45\t42\t0\t1\t0\t0\t0\t0\t0\n+2016\t1\t9\t45\t48\t46.4\t46\t50\t45\t47\t0\t0\t1\t0\t0\t0\t0\n+2016\t11\t5\t65\t65\t53.4\t49\t58\t52\t41\t0\t0\t1\t0\t0\t0\t0\n+2016\t6\t9\t85\t67\t68.6\t66\t73\t69\t80\t0\t0\t0\t0\t1\t0\t0\n+2016\t1\t29\t57\t56\t48.5\t48\t52\t47\t49\t1\t0\t0\t0\t0\t0\t0\n+2016\t5\t11\t67\t75\t63.8\t62\t68\t63\t60\t0\t0\t0\t0\t0\t0\t1\n+2016\t5\t3\t77\t87\t62.1\t62\t66\t64\t69\t0\t0\t0\t0\t0\t1\t0\n+2016\t6\t27\t71\t78\t72.2\t70\t74\t72\t84\t0\t1\t0\t0\t0\t0\t0\n+2016\t8\t14\t87\t90\t76.7\t75\t78\t78\t65\t0\t0\t0\t1\t0\t0\t0\n+2016\t11\t30\t52\t52\t47.6\t47\t52\t49\t44\t0\t0\t0\t0\t0\t0\t1\n+2016\t7\t31\t88\t76\t77.4\t76\t78\t79\t95\t0\t0\t0\t1\t0\t0\t0\n+2016\t10\t26\t61\t65\t56.2\t53\t57\t57\t41\t0\t0\t0\t0\t0\t0\t1\n+2016\t12\t13\t44\t43\t45.5\t41\t47\t46\t46\t0\t0\t0\t0\t0\t1\t0\n+2016\t4\t1\t68\t73\t56.0\t54\t59\t55\t41\t1\t0\t0\t0\t0\t0\t0\n+2016\t11\t13\t63\t59\t51.4\t48\t56\t50\t64\t0\t0\t0\t1\t0\t0\t0\n+2016\t10\t8\t63\t64\t62.5\t60\t65\t61\t73\t0\t0\t1\t0\t0\t0\t0\n+2016\t5\t31\t64\t71\t67.3\t63\t72\t68\t85\t0\t0\t0\t0\t0\t1\t0\n+2016\t1\t27\t54\t56\t48.4\t45\t51\t49\t54\t0\t0\t0\t0\t0\t0\t1\n+2016\t4\t17\t60\t68\t58.6\t58\t62\t59\t54\t0\t0\t0\t1\t0\t0\t0\n+2016\t6\t30\t79\t74\t72.8\t71\t76\t72\t87\t0\t0\t0\t0\t1\t0\t0\n+2016\t4\t10\t76\t66\t57.4\t57\t60\t57\t60\t0\t0\t0\t1\t0\t0\t0\n+2016\t6\t23\t73\t75\t71.3\t68\t72\t71\t56\t0\t0\t0\t0\t1\t0\t0\n+2016\t7\t3\t76\t76\t73.5\t69\t76\t75\t85\t0\t0\t0\t1\t0\t0\t0\n+2016\t5\t5\t74\t60\t62.5\t58\t66\t62\t56\t0\t0\t0\t0\t1\t0\t0\n+2016\t5\t29\t64\t64\t67.0\t65\t71\t65\t76\t0\t0\t0\t1\t0\t0\t0\n+2016\t7\t10\t74\t71\t75.1\t71\t77\t76\t95\t0\t0\t0\t1\t0\t0\t0\n+2016\t5\t25\t65\t66\t66.4\t65\t67\t66\t60\t0\t0\t0\t0\t0\t0\t1\n+2016\t9\t20\t69\t71\t69.4\t67\t73\t69\t81\t0\t0\t0\t0\t0\t1\t0\n+2016\t10\t9\t64\t68\t62.1\t58\t65\t63\t55\t0\t0\t0\t1\t0\t0\t0\n+2016\t12\t27\t42\t42\t45.2\t41\t50\t47\t47\t0\t0\t0\t0\t0\t1\t0\n+2016\t4\t13\t58\t60\t57.9\t55\t62\t56\t77\t0\t0\t0\t0\t0\t0\t1\n+2016\t6\t16\t60\t67\t69.8\t68\t72\t71\t87\t0\t0\t0\t0\t1\t0\t0\n+2016\t9\t27\t76\t77\t66.8\t66\t67\t68\t64\t0\t0\t0\t0\t0\t1\t0\n+2016\t10\t7\t66\t63\t62.9\t62\t67\t64\t78\t1\t0\t0\t0\t0\t0\t0\n+2016\t6\t6\t81\t92\t68.2\t65\t70\t67\t71\t0\t1\t0\t0\t0\t0\t0\n+2016\t9\t7\t68\t68\t73.0\t72\t78\t71\t70\t0\t0\t0\t0\t0\t0\t1\n+2016\t3\t6\t57\t64\t52.2\t52\t53\t51\t49\t0\t0\t0\t1\t0\t0\t0\n+2016\t9\t23\t68\t67\t68.3\t67\t69\t67\t61\t1\t0\t0\t0\t0\t0\t0\n+2016\t7\t8\t76\t68\t74.6\t72\t79\t75\t77\t1\t0\t0\t0\t0\t0\t0\n+2016\t1\t3\t45\t44\t45.8\t43\t46\t47\t56\t0\t0\t0\t1\t0\t0\t0\n+2016\t7\t18\t72\t80\t76.4\t75\t77\t75\t66\t0\t1\t0\t0\t0\t0\t0\n+2016\t2\t23\t51\t51\t50.7\t49\t53\t51\t43\t0\t0\t0\t0\t0\t1\t0\n+2016\t10\t4\t65\t61\t64.1\t62\t69\t65\t60\t0\t0\t0\t0\t0\t1\t0\n+2016\t10\t27\t65\t58\t55.9\t51\t60\t55\t39\t0\t0\t0\t0\t1\t0\t0\n+2016\t6\t3\t75\t71\t67.7\t64\t71\t66\t55\t1\t0\t0\t0\t0\t0\t0\n+2016\t12\t30\t48\t48\t45.4\t44\t46\t44\t42\t1\t0\t0\t0\t0\t0\t0\n+2016\t10\t22\t62\t59\t57.4\t56\t59\t58\t44\t0\t0\t1\t0\t0\t0\t0\n+2016\t1\t7\t44\t51\t46.2\t45\t49\t46\t38\t0\t0\t0\t0\t1\t0\t0\n+2016\t8\t13\t80\t87\t76.8\t73\t79\t78\t73\t0\t0\t1\t0\t0\t0\t0\n+2016\t7\t30\t85\t88\t77.3\t75\t79\t77\t70\t0\t0\t1\t0\t0\t0\t0\n+2016\t12\t1\t52\t52\t47.4\t44\t48\t49\t39\t0\t0\t0\t0\t1\t0\t0\n+2016\t4\t9\t77\t76\t57.2\t53\t61\t57\t74\t0\t0\t1\t0\t0\t0\t0\n+2016\t11\t18\t50\t52\t50.3\t50\t53\t50\t35\t1\t0\t0\t0\t0\t0\t0\n+2016\t5\t8\t77\t82\t63.2\t62\t65\t63\t83\t0\t0\t0\t1\t0\t0\t0\n+2016\t1\t2\t44\t45\t45.7\t41\t50\t44\t61\t0\t0\t1\t0\t0\t0\t0\n+2016\t12\t14\t43\t40\t45.4\t45\t48\t45\t49\t0\t0\t0\t0\t0\t0\t1\n+2016\t2\t6\t49\t53\t49.1\t47\t53\t49\t56\t0\t0\t1\t0\t0\t0\t0\n+2016\t9\t16\t79\t71\t70.7\t70\t74\t71\t52\t1\t0\t0\t0\t0\t0\t0\n+2016\t3\t15\t54\t49\t53.6\t49\t58\t52\t70\t0\t0\t0\t0\t0\t1\t0\n+2016\t11\t12\t64\t63\t51.7\t50\t52\t52\t63\t0\t0\t1\t0\t0\t0\t0\n+2016\t3\t27\t57\t59\t55.3\t52\t58\t55\t39\t0\t0\t0\t1\t0\t0\t0\n+2016\t11\t20\t55\t57\t49.8\t47\t54\t48\t30\t0\t0\t0\t1\t0\t0\t0\n+2016\t10\t18\t60\t60\t58.8\t54\t60\t57\t53\t0\t0\t0\t0\t0\t1\t0\n+2016\t3\t3\t58\t55\t51.8\t49\t54\t50\t71\t0\t0\t0\t0\t1\t0\t0\n+2016\t6\t20\t65\t70\t70.6\t67\t71\t70\t79\t0\t1\t0\t0\t0\t0\t0\n+2016\t7\t16\t77\t76\t76.1\t76\t78\t75\t61\t0\t0\t1\t0\t0\t0\t0\n+2016\t6\t11\t65\t67\t69.0\t69\t72\t71\t87\t0\t0\t1\t0\t0\t0\t0\n+2016\t4\t20\t89\t81\t59.2\t56\t63\t61\t66\t0\t0\t0\t0\t0\t0\t1\n+2016\t9\t6\t68\t68\t73.3\t73\t76\t75\t79\t0\t0\t0\t0\t0\t1\t0\n+2016\t12\t18\t35\t35\t45.2\t44\t46\t46\t36\t0\t0\t0\t1\t0\t0\t0\n+2016\t4\t4\t63\t69\t56.5\t54\t59\t56\t45\t0\t1\t0\t0\t0\t0\t0\n+2016\t10\t10\t68\t57\t61.8\t58\t64\t61\t62\t0\t1\t0\t0\t0\t0\t0\n+2016\t3\t29\t51\t56\t55.6\t53\t59\t54\t45\t0\t0\t0\t0\t0\t1\t0\n+2016\t11\t15\t55\t57\t51.0\t47\t54\t51\t46\t0\t0\t0\t0\t0\t1\t0\n+2016\t1\t12\t52\t45\t46.8\t44\t50\t45\t61\t0\t0\t0\t0\t0\t1\t0\n+2016\t8\t10\t73\t72\t77.0\t77\t78\t77\t68\t0\t0\t0\t0\t0\t0\t1\n+2016\t7\t11\t71\t74\t75.3\t74\t79\t75\t71\t0\t1\t0\t0\t0\t0\t0\n+2016\t9\t8\t68\t67\t72.8\t69\t77\t73\t56\t0\t0\t0\t0\t1\t0\t0\n'
b
diff -r 000000000000 -r af2624d5ab32 test-data/train_test_split_train02.tabular
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/train_test_split_train02.tabular Sat May 01 01:24:32 2021 +0000
b
b'@@ -0,0 +1,800 @@\n+0.13074624395513548\t-0.469611133626014\t-0.5710665790468505\t0.03279113352421141\t2.003536501469461\t2.3320994929619165\t2.5655773908930333\t-1.8172726174227096\t0.31252740842018656\t0.4183877613375451\t2.3746178626049312\t-0.6930727012865296\t-0.013183556173275029\t1.1098774440423256\t1.4603607557778286\t0.5412632236853618\t0.6061667777690624\t0.4212995019384291\t0.14980350057199987\t-1.3870421561971842\n+0.1025284847407583\t-2.6554352025337806\t-0.71518541502396\t-1.1404916299860086\t1.1910205067228126\t2.113153625179661\t2.9349032443668133\t-1.2362775613386645\t-0.05861748263617049\t-0.12932403608468454\t2.217536166240706\t-1.19338504289619\t0.08517274490563755\t0.8749991601378865\t0.03823939811250166\t0.7007347847223218\t0.6221756436475849\t-1.9582697041316883\t0.1486878915218063\t-1.8828047461722932\n+-0.3579496672192898\t0.5691405803600221\t-0.3135941251222193\t0.6099240993754877\t-0.21623755767016947\t1.2273086492959706\t1.6963625000374265\t0.4917445652599018\t1.51820010664321\t-0.6179648499957521\t0.4424061323382702\t0.37607271963750777\t0.0955642147899332\t1.1428211235733463\t1.3792380662910433\t0.8392247210016273\t-1.3784520073608069\t0.6806565402268875\t-0.4079706906458002\t-1.8670081757072128\n+-0.4813193986666376\t-0.1886485401626124\t0.048734923506973636\t-0.20004930206569047\t-1.0585699644909594\t2.7625383995667336\t1.8863896126660609\t0.8214112065844242\t-0.4384103073465777\t-0.3211449191911812\t2.19052189921114\t-1.59109564541547\t1.3097995624508914\t1.5592201449464334\t-0.3552421947179116\t-0.4128075508328489\t0.5596595170526524\t-1.176294355286121\t0.16888633455190946\t-0.9214884435605952\n+-0.2572336609353247\t0.29438982269850145\t-0.06531975450102831\t1.5097968126742924\t-0.7755962651137243\t2.4354435421606127\t0.38216873050665007\t1.1239051747279731\t-0.2442436451866952\t0.12718619074952095\t0.9026611100653392\t-1.803720014048137\t1.2266258763633622\t0.22899043555447016\t-0.6493009189318991\t0.21750122466449906\t-0.4382663216525586\t-0.2972087114804226\t-1.5229655091814298\t-0.3225053056087868\n+1.4069915349949509\t0.3654232815183534\t-1.097052189453232\t-0.5389149543134537\t-1.5728963747716522\t1.6783401449848374\t0.9288455507296128\t-0.4925716601774063\t1.0392596016586455\t-0.2847157775591438\t0.5210189577500189\t-2.65782453257402\t-1.67318496169606\t0.4719725602155527\t-1.0949050649335628\t0.08630539086516381\t1.016831070562736\t-0.9944516912574556\t-0.6752082767957616\t-1.0649707211089758\n+-0.1186989836246748\t1.7836421698313514\t-0.7750775352454679\t-1.6293416755674714\t-0.6843986506548367\t1.6772721667636452\t5.61626113564464\t0.2921048965669931\t-0.03311146686259204\t-0.20216240643483607\t3.174632106697607\t1.3260918422916352\t-1.4169867073972098\t1.1177286442516994\t1.1442261013773558\t2.2927637054906245\t-1.1696635334539611\t0.9572219962948342\t-0.99260262548243\t-3.88516570201557\n+-1.6188277521439098\t-0.6049258835366146\t-2.1216868938554883\t0.6816156489035747\t-0.3909183237429715\t1.8546492624641897\t3.5484612828339506\t0.8719065415632481\t2.758577973437618\t1.6571275711005302\t2.2964938011707874\t-1.3015552984330785\t0.6517060330634804\t0.5957551049011494\t1.7890274248449136\t-0.7415803218575354\t-0.005766275627966389\t-0.15804411491961362\t0.13620848005420536\t-2.4231894996131182\n+-0.8844255979781576\t-1.067022557954717\t0.4268970268412451\t-0.4792374662006493\t0.8774697010725497\t2.031228226698857\t4.956071644421575\t0.3213541753652649\t-0.8329849287815198\t-2.9127670891791504\t3.303547980533676\t0.6551018446390298\t0.5601240239650124\t1.9378083839436648\t0.6510057852005603\t0.5434997376470951\t-0.16431466813504966\t-1.2255895916041704\t-0.6701271433847471\t-3.1283762290921477\n+-0.30746702969320694\t-0.8391679152764611\t-0.1179283406215597\t-0.426295494661604\t-1.691982298012858\t2.8901125627044437\t2.0602489643699675\t0.9458180233686614\t0.793907788630693\t-1.364580463112297\t2.4726804852199185\t0.8429876604473175\t0.2306659754164001\t2.228388534591572\t0.3261200509781369\t0.23298923486173995\t-1.5934373922813216\t0.3574092709432904\t-1.8018244078785832\t-0.8941426836775552\n+-0.03042402302151745\t0.5533032756826644\t-0.4112837804349074\t-0.8355476515317032\t-0.262'..b'250746598924\t0.18458094061494845\t0.2312626005625568\t0.5086324430299911\t-1.2655949713688883\n+2.4366892748151594\t-0.5693156806025699\t-1.7218141143792118\t-0.7636370379358908\t1.3812428414296332\t0.8734261792585589\t3.6993297964062575\t-0.2510229748899681\t-0.2572996499581653\t1.0939573204735948\t1.4250691293913331\t-0.6234909491978371\t0.8946129186610708\t0.11348850342063865\t-0.8171226347069339\t0.4036243685718015\t1.2492832667321032\t-0.16559924725384395\t0.05010698769682866\t-3.1064820228464267\n+-0.6496553421679686\t-1.4224279723935236\t2.3012734316107286\t-1.6307384651011865\t0.7899921830677415\t1.5784780783388637\t1.5937350935854364\t0.2033287108801172\t0.03485866731366751\t0.6478279768265606\t0.5072168351442272\t-1.6486585166575147\t-0.3823982996033502\t2.3256408720316006\t-0.9273509613624984\t0.6528468905997087\t0.8314107815153837\t1.2344031799078437\t-0.2712026087680339\t-1.7964285078767936\n+1.556971762459764\t-1.2439952121813922\t-0.42294148920420016\t1.2509123545030678\t-0.04525686050637002\t1.8102334072756012\t4.330921368106597\t0.4369341397955197\t1.7090276790490326\t-1.3105903617385728\t2.6507931144960315\t0.9560232948982376\t0.9264898048764156\t1.27342213352265\t-0.1775463778209161\t-0.5020139494158932\t1.0777715747655348\t-1.5004727301982392\t-0.8712982816000493\t-2.9628149579187566\n+0.9217291089973372\t-1.3885863242255478\t0.25423533911482016\t0.1168834752581415\t0.3075246148086876\t2.583752655948304\t1.868779214202141\t-1.5598686552244263\t-0.43742348357135097\t-2.0674552381167857\t2.1004247677315293\t0.592164188729302\t-0.4145039221243959\t0.8609838368049071\t-0.7423945821145248\t1.546996722395656\t0.4044604320792881\t-1.3908367691435546\t-0.19382679005878886\t-0.9316346070105346\n+-0.5219973387100996\t0.9905632425118324\t-1.2688367548190436\t-1.3062113291308677\t1.2638138110709067\t1.8660691408757044\t0.5445787221442651\t1.4116584346018954\t-0.5641770654580077\t-0.3012039021140541\t0.2268327388683611\t-0.8279588610356573\t-0.6522929057307618\t-0.20603677850657687\t-0.135516011514525\t1.0275029807709108\t-0.19718057119851085\t-0.9413847947787156\t0.19608217733319547\t-0.9608113047816084\n+0.4424052465929388\t0.943928936525626\t1.738397490506961\t-0.12161122641383293\t0.15475728725187682\t1.8624246245418483\t3.2762488723359144\t-0.4270106111994435\t0.1528975135659882\t0.4771953229726215\t2.3155774125395427\t1.3689173890211586\t0.7770702960925243\t-1.4296307560984765\t0.7923063752623205\t0.2514409708101872\t1.1840866916876511\t0.8951950393049203\t-0.5737280626680346\t-2.1013927221698583\n+0.7693680917931209\t0.042252199267129815\t0.920578733178434\t1.2609933412881686\t-0.9009957896033098\t3.4649606386186127\t-0.09641604038965236\t-1.4408423082558597\t-1.3370985919131873\t-2.909342960508076\t1.3996034179270973\t1.1071348345938952\t0.6373319894768134\t-0.20576308333152926\t0.5627232979887723\t1.2446890017440848\t0.14542476550535846\t-0.27293462018189524\t-0.08718378360133876\t0.3686229650559225\n+0.7427620511228765\t-1.5580462215214408\t1.4680352994852566\t-0.7508175656670606\t0.6363631862918148\t3.1644775950816646\t1.8594024439897647\t-0.4499136700983101\t0.6875433245937749\t0.4124013786469116\t2.179503463347244\t0.8484523669327337\t-0.546863836293962\t0.17441446341147884\t0.24045384074599194\t-1.228725137426046\t0.7554095521777582\t-0.030134646614598738\t-0.4835932968055189\t-1.021435051734048\n+2.0468935191072437\t-0.7226970302245961\t-0.4839561868483981\t-2.222915078471478\t0.3459880131172701\t1.1324497189504088\t1.4912587172048224\t0.3411839598264167\t0.6715382471375413\t-0.3651029407087692\t0.03233087935168455\t-0.5081627405589572\t0.002075317851864144\t-0.07944497974608919\t-0.13805622601618786\t0.4878193412223996\t-0.3974492638991908\t0.3347669895977678\t0.9512754223441522\t-1.987373538202905\n+-1.785494148707842\t1.3285224891343512\t-0.5279590208716799\t2.675167568819385\t1.5490279490427394\t1.9850254692433156\t-0.4538705494124088\t0.2596309736678987\t0.1769080847916054\t0.2504311940060068\t-0.03754622317067513\t-2.2382627787119773\t0.3799303209778132\t1.027127616405047\t-0.8246136050829563\t0.4127647478763152\t-0.34515534022029715\t0.8158793586435744\t-0.06121611794895705\t-0.11706301505657656\n'
b
diff -r 000000000000 -r af2624d5ab32 test-data/train_test_split_train03.tabular
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/train_test_split_train03.tabular Sat May 01 01:24:32 2021 +0000
b
b'@@ -0,0 +1,209 @@\n+year\tmonth\tday\ttemp_2\ttemp_1\taverage\tforecast_noaa\tforecast_acc\tforecast_under\tfriend\tweek_Fri\tweek_Mon\tweek_Sat\tweek_Sun\tweek_Thurs\tweek_Tues\tweek_Wed\n+2016\t4\t14\t60\t59\t58.1\t57\t63\t58\t66\t0\t0\t0\t0\t1\t0\t0\n+2016\t7\t30\t85\t88\t77.3\t75\t79\t77\t70\t0\t0\t1\t0\t0\t0\t0\n+2016\t5\t15\t82\t65\t64.7\t63\t69\t64\t58\t0\t0\t0\t1\t0\t0\t0\n+2016\t1\t18\t54\t50\t47.5\t44\t48\t49\t58\t0\t1\t0\t0\t0\t0\t0\n+2016\t11\t25\t49\t52\t48.6\t45\t52\t47\t41\t1\t0\t0\t0\t0\t0\t0\n+2016\t7\t20\t73\t78\t76.7\t75\t78\t77\t66\t0\t0\t0\t0\t0\t0\t1\n+2016\t12\t8\t42\t40\t46.1\t45\t51\t47\t36\t0\t0\t0\t0\t1\t0\t0\n+2016\t12\t28\t42\t47\t45.3\t41\t49\t44\t58\t0\t0\t0\t0\t0\t0\t1\n+2016\t7\t7\t69\t76\t74.4\t73\t77\t74\t72\t0\t0\t0\t0\t1\t0\t0\n+2016\t12\t15\t40\t39\t45.3\t45\t49\t47\t46\t0\t0\t0\t0\t1\t0\t0\n+2016\t5\t31\t64\t71\t67.3\t63\t72\t68\t85\t0\t0\t0\t0\t0\t1\t0\n+2016\t1\t20\t54\t48\t47.7\t44\t52\t49\t61\t0\t0\t0\t0\t0\t0\t1\n+2016\t8\t10\t73\t72\t77.0\t77\t78\t77\t68\t0\t0\t0\t0\t0\t0\t1\n+2016\t3\t23\t56\t57\t54.7\t50\t58\t55\t70\t0\t0\t0\t0\t0\t0\t1\n+2016\t12\t24\t45\t40\t45.1\t44\t47\t46\t39\t0\t0\t1\t0\t0\t0\t0\n+2016\t1\t19\t50\t54\t47.6\t47\t49\t48\t53\t0\t0\t0\t0\t0\t1\t0\n+2016\t11\t6\t65\t58\t53.2\t52\t57\t55\t71\t0\t0\t0\t1\t0\t0\t0\n+2016\t10\t29\t60\t65\t55.3\t55\t59\t55\t65\t0\t0\t1\t0\t0\t0\t0\n+2016\t2\t1\t48\t47\t48.8\t46\t49\t49\t51\t0\t1\t0\t0\t0\t0\t0\n+2016\t12\t12\t44\t44\t45.6\t43\t50\t45\t42\t0\t1\t0\t0\t0\t0\t0\n+2016\t5\t30\t64\t64\t67.1\t64\t70\t66\t69\t0\t1\t0\t0\t0\t0\t0\n+2016\t10\t23\t59\t62\t57.1\t57\t58\t59\t67\t0\t0\t0\t1\t0\t0\t0\n+2016\t9\t30\t68\t66\t65.7\t64\t67\t65\t74\t1\t0\t0\t0\t0\t0\t0\n+2016\t9\t12\t77\t70\t71.8\t67\t73\t73\t90\t0\t1\t0\t0\t0\t0\t0\n+2016\t11\t17\t55\t50\t50.5\t46\t51\t50\t57\t0\t0\t0\t0\t1\t0\t0\n+2016\t3\t3\t58\t55\t51.8\t49\t54\t50\t71\t0\t0\t0\t0\t1\t0\t0\n+2016\t11\t21\t57\t55\t49.5\t46\t51\t49\t67\t0\t1\t0\t0\t0\t0\t0\n+2016\t4\t24\t64\t65\t60.1\t57\t61\t60\t41\t0\t0\t0\t1\t0\t0\t0\n+2016\t5\t20\t64\t63\t65.6\t63\t70\t64\t73\t1\t0\t0\t0\t0\t0\t0\n+2016\t1\t7\t44\t51\t46.2\t45\t49\t46\t38\t0\t0\t0\t0\t1\t0\t0\n+2016\t9\t24\t67\t64\t68.0\t65\t71\t66\t64\t0\t0\t1\t0\t0\t0\t0\n+2016\t8\t30\t79\t75\t74.6\t74\t76\t75\t63\t0\t0\t0\t0\t0\t1\t0\n+2016\t1\t11\t50\t52\t46.7\t42\t48\t48\t39\t0\t1\t0\t0\t0\t0\t0\n+2016\t6\t9\t85\t67\t68.6\t66\t73\t69\t80\t0\t0\t0\t0\t1\t0\t0\n+2016\t9\t22\t67\t68\t68.7\t65\t70\t69\t56\t0\t0\t0\t0\t1\t0\t0\n+2016\t3\t25\t53\t54\t55.0\t53\t57\t57\t42\t1\t0\t0\t0\t0\t0\t0\n+2016\t10\t24\t62\t62\t56.8\t52\t61\t57\t70\t0\t1\t0\t0\t0\t0\t0\n+2016\t7\t16\t77\t76\t76.1\t76\t78\t75\t61\t0\t0\t1\t0\t0\t0\t0\n+2016\t7\t1\t74\t73\t73.1\t71\t75\t72\t93\t1\t0\t0\t0\t0\t0\t0\n+2016\t11\t18\t50\t52\t50.3\t50\t53\t50\t35\t1\t0\t0\t0\t0\t0\t0\n+2016\t9\t3\t75\t70\t73.9\t71\t75\t73\t68\t0\t0\t1\t0\t0\t0\t0\n+2016\t8\t2\t73\t77\t77.4\t75\t80\t79\t62\t0\t0\t0\t0\t0\t1\t0\n+2016\t4\t5\t69\t60\t56.6\t52\t58\t56\t72\t0\t0\t0\t0\t0\t1\t0\n+2016\t3\t13\t55\t52\t53.3\t50\t55\t53\t54\t0\t0\t0\t1\t0\t0\t0\n+2016\t4\t9\t77\t76\t57.2\t53\t61\t57\t74\t0\t0\t1\t0\t0\t0\t0\n+2016\t5\t26\t66\t66\t66.5\t64\t70\t65\t85\t0\t0\t0\t0\t1\t0\t0\n+2016\t10\t10\t68\t57\t61.8\t58\t64\t61\t62\t0\t1\t0\t0\t0\t0\t0\n+2016\t4\t10\t76\t66\t57.4\t57\t60\t57\t60\t0\t0\t0\t1\t0\t0\t0\n+2016\t3\t12\t56\t55\t53.1\t52\t58\t53\t65\t0\t0\t1\t0\t0\t0\t0\n+2016\t1\t24\t57\t48\t48.1\t46\t50\t48\t54\t0\t0\t0\t1\t0\t0\t0\n+2016\t2\t7\t53\t49\t49.2\t46\t51\t48\t63\t0\t0\t0\t1\t0\t0\t0\n+2016\t5\t27\t66\t65\t66.7\t64\t67\t68\t73\t1\t0\t0\t0\t0\t0\t0\n+2016\t3\t11\t55\t56\t53.0\t53\t53\t51\t36\t1\t0\t0\t0\t0\t0\t0\n+2016\t10\t22\t62\t59\t57.4\t56\t59\t58\t44\t0\t0\t1\t0\t0\t0\t0\n+2016\t5\t8\t77\t82\t63.2\t62\t65\t63\t83\t0\t0\t0\t1\t0\t0\t0\n+2016\t5\t29\t64\t64\t67.0\t65\t71\t65\t76\t0\t0\t0\t1\t0\t0\t0\n+2016\t12\t13\t44\t43\t45.5\t41\t47\t46\t46\t0\t0\t0\t0\t0\t1\t0\n+2016\t11\t8\t61\t63\t52.7\t49\t57\t52\t49\t0\t0\t0\t0\t0\t1\t0\n+2016\t6\t20\t65\t70\t70.6\t67\t71\t70\t79\t0\t1\t0\t0\t0\t0\t0\n+2016\t11\t9\t63\t71\t52.4\t48\t56\t52\t42\t0\t0\t0\t0\t0\t0\t1\n+2016\t7\t3\t76\t76\t73.5\t69\t76\t75\t85\t0\t0\t0\t1\t0\t0\t0\n+2016\t12\t16\t39\t39\t45.3\t44\t49\t44\t39\t1\t0\t0\t0\t0\t0\t0\n+2016\t9\t16\t79\t71\t70.7\t70\t74\t71\t52\t1\t0\t0\t0\t0\t0\t0\n+2016\t6\t25\t68\t69\t71.7\t68\t73\t73\t89\t0\t0\t1\t0\t0\t0\t0\n+2016\t9\t13\t70\t74\t71.5\t71\t75\t70\t82\t0\t0\t0\t0\t0\t1\t0\n+2016\t5\t12\t75\t81\t64.1\t62\t67\t63\t81\t0\t0\t0\t0\t1\t0\t0\n+2016\t2\t8\t49\t51\t49.3\t49\t52\t50\t34\t0\t1\t0\t0\t0\t0\t0\n+2016\t7\t4\t76\t71\t73.8\t71\t76\t73\t86\t0\t1\t0\t0\t0\t0\t0\n+2016\t4\t25\t65\t55\t60.3\t56\t64\t61\t77\t0\t1\t0\t0\t0\t0\t0\n+2016\t8\t12\t76\t80\t76.9\t72\t79\t77\t81\t1\t0\t0\t0\t0\t0\t0\n+2016\t9\t21\t71\t67\t69.0\t65\t70\t70\t76\t0\t0\t0\t0\t0\t0\t1\n+2016\t4\t30\t64\t61\t61.4\t60\t65\t62\t78\t0\t0\t1\t0\t0\t0\t0\n+2016\t12\t5\t49\t46\t46.6\t43\t50\t45\t65\t0\t1\t0\t0\t0\t0\t0\n+2016\t12\t19\t35\t39\t45.1\t42\t46\t45\t51\t0\t1\t0\t0\t0\t0\t0\n+2016\t11\t29\t48\t52\t47.8\t43\t48\t47\t50\t0\t0\t0\t0\t0\t1\t0\n+2016\t9\t14\t74\t75\t71.2\t67\t75\t73\t77\t0\t0\t0\t0\t0\t0\t1\n+2016\t9\t6\t68\t68\t73.3\t73\t76\t75\t79\t0\t0\t0\t0\t0\t1\t0\n+2016\t6\t6\t81\t92\t68.2\t65\t70\t67\t71\t0\t1\t0\t0\t0\t0\t0\n+2016\t1\t3\t45\t44\t45.8\t43\t46\t47\t56\t0\t0\t0\t1\t0\t0\t0\n+2016\t4\t28\t60\t61\t61.0\t56\t65\t62\t73\t0\t0'..b'\t52\t63\t0\t0\t1\t0\t0\t0\t0\n+2016\t4\t13\t58\t60\t57.9\t55\t62\t56\t77\t0\t0\t0\t0\t0\t0\t1\n+2016\t8\t23\t84\t81\t75.7\t73\t78\t77\t89\t0\t0\t0\t0\t0\t1\t0\n+2016\t7\t14\t77\t75\t75.8\t74\t76\t77\t77\t0\t0\t0\t0\t1\t0\t0\n+2016\t11\t13\t63\t59\t51.4\t48\t56\t50\t64\t0\t0\t0\t1\t0\t0\t0\n+2016\t8\t9\t72\t73\t77.1\t77\t80\t79\t94\t0\t0\t0\t0\t0\t1\t0\n+2016\t4\t16\t59\t60\t58.5\t56\t60\t59\t59\t0\t0\t1\t0\t0\t0\t0\n+2016\t6\t23\t73\t75\t71.3\t68\t72\t71\t56\t0\t0\t0\t0\t1\t0\t0\n+2016\t4\t11\t66\t59\t57.6\t56\t60\t58\t40\t0\t1\t0\t0\t0\t0\t0\n+2016\t2\t6\t49\t53\t49.1\t47\t53\t49\t56\t0\t0\t1\t0\t0\t0\t0\n+2016\t8\t6\t80\t79\t77.2\t76\t81\t79\t60\t0\t0\t1\t0\t0\t0\t0\n+2016\t3\t5\t59\t57\t52.1\t49\t53\t51\t46\t0\t0\t1\t0\t0\t0\t0\n+2016\t6\t2\t79\t75\t67.6\t64\t71\t67\t77\t0\t0\t0\t0\t1\t0\t0\n+2016\t2\t2\t47\t46\t48.8\t48\t50\t50\t56\t0\t0\t0\t0\t0\t1\t0\n+2016\t7\t22\t82\t81\t76.9\t72\t77\t76\t70\t1\t0\t0\t0\t0\t0\t0\n+2016\t11\t24\t54\t49\t48.9\t47\t53\t48\t29\t0\t0\t0\t0\t1\t0\t0\n+2016\t1\t28\t56\t57\t48.4\t44\t52\t48\t34\t0\t0\t0\t0\t1\t0\t0\n+2016\t10\t18\t60\t60\t58.8\t54\t60\t57\t53\t0\t0\t0\t0\t0\t1\t0\n+2016\t6\t14\t70\t66\t69.5\t66\t71\t69\t85\t0\t0\t0\t0\t0\t1\t0\n+2016\t11\t11\t65\t64\t51.9\t50\t53\t52\t55\t1\t0\t0\t0\t0\t0\t0\n+2016\t3\t6\t57\t64\t52.2\t52\t53\t51\t49\t0\t0\t0\t1\t0\t0\t0\n+2016\t5\t18\t60\t71\t65.2\t61\t68\t65\t56\t0\t0\t0\t0\t0\t0\t1\n+2016\t5\t11\t67\t75\t63.8\t62\t68\t63\t60\t0\t0\t0\t0\t0\t0\t1\n+2016\t3\t8\t60\t53\t52.5\t48\t56\t51\t70\t0\t0\t0\t0\t0\t1\t0\n+2016\t1\t15\t55\t49\t47.1\t46\t51\t46\t65\t1\t0\t0\t0\t0\t0\t0\n+2016\t6\t8\t86\t85\t68.5\t67\t70\t69\t81\t0\t0\t0\t0\t0\t0\t1\n+2016\t2\t10\t57\t62\t49.4\t48\t50\t49\t30\t0\t0\t0\t0\t0\t0\t1\n+2016\t12\t3\t46\t50\t47.0\t42\t52\t47\t58\t0\t0\t1\t0\t0\t0\t0\n+2016\t10\t27\t65\t58\t55.9\t51\t60\t55\t39\t0\t0\t0\t0\t1\t0\t0\n+2016\t8\t7\t79\t72\t77.2\t74\t78\t77\t95\t0\t0\t0\t1\t0\t0\t0\n+2016\t11\t16\t57\t55\t50.7\t50\t51\t49\t34\t0\t0\t0\t0\t0\t0\t1\n+2016\t9\t10\t72\t74\t72.3\t70\t77\t74\t91\t0\t0\t1\t0\t0\t0\t0\n+2016\t7\t29\t83\t85\t77.3\t77\t80\t79\t77\t1\t0\t0\t0\t0\t0\t0\n+2016\t12\t1\t52\t52\t47.4\t44\t48\t49\t39\t0\t0\t0\t0\t1\t0\t0\n+2016\t9\t25\t64\t67\t67.6\t64\t72\t67\t62\t0\t0\t0\t1\t0\t0\t0\n+2016\t12\t23\t49\t45\t45.1\t45\t49\t44\t35\t1\t0\t0\t0\t0\t0\t0\n+2016\t12\t2\t52\t46\t47.2\t46\t51\t49\t41\t1\t0\t0\t0\t0\t0\t0\n+2016\t10\t13\t62\t66\t60.6\t60\t62\t60\t57\t0\t0\t0\t0\t1\t0\t0\n+2016\t7\t23\t81\t71\t77.0\t75\t81\t76\t86\t0\t0\t1\t0\t0\t0\t0\n+2016\t6\t13\t65\t70\t69.3\t66\t72\t69\t79\t0\t1\t0\t0\t0\t0\t0\n+2016\t2\t15\t55\t58\t49.9\t46\t52\t49\t53\t0\t1\t0\t0\t0\t0\t0\n+2016\t8\t8\t72\t72\t77.1\t76\t78\t77\t65\t0\t1\t0\t0\t0\t0\t0\n+2016\t7\t12\t74\t74\t75.4\t74\t77\t77\t71\t0\t0\t0\t0\t0\t1\t0\n+2016\t10\t3\t63\t65\t64.5\t63\t68\t65\t49\t0\t1\t0\t0\t0\t0\t0\n+2016\t4\t18\t68\t77\t58.8\t55\t59\t57\t39\t0\t1\t0\t0\t0\t0\t0\n+2016\t2\t25\t60\t59\t50.9\t49\t51\t49\t35\t0\t0\t0\t0\t1\t0\t0\n+2016\t1\t2\t44\t45\t45.7\t41\t50\t44\t61\t0\t0\t1\t0\t0\t0\t0\n+2016\t2\t21\t51\t53\t50.5\t49\t54\t52\t46\t0\t0\t0\t1\t0\t0\t0\n+2016\t3\t24\t57\t53\t54.9\t54\t56\t56\t72\t0\t0\t0\t0\t1\t0\t0\n+2016\t7\t27\t85\t79\t77.3\t73\t78\t79\t79\t0\t0\t0\t0\t0\t0\t1\n+2016\t2\t4\t51\t49\t49.0\t44\t54\t51\t44\t0\t0\t0\t0\t1\t0\t0\n+2016\t4\t4\t63\t69\t56.5\t54\t59\t56\t45\t0\t1\t0\t0\t0\t0\t0\n+2016\t2\t24\t51\t60\t50.8\t47\t53\t50\t46\t0\t0\t0\t0\t0\t0\t1\n+2016\t10\t8\t63\t64\t62.5\t60\t65\t61\t73\t0\t0\t1\t0\t0\t0\t0\n+2016\t9\t15\t75\t79\t71.0\t66\t76\t69\t64\t0\t0\t0\t0\t1\t0\t0\n+2016\t1\t14\t49\t55\t47.0\t43\t47\t46\t58\t0\t0\t0\t0\t1\t0\t0\n+2016\t4\t1\t68\t73\t56.0\t54\t59\t55\t41\t1\t0\t0\t0\t0\t0\t0\n+2016\t5\t17\t57\t60\t65.0\t62\t65\t65\t55\t0\t0\t0\t0\t0\t1\t0\n+2016\t12\t18\t35\t35\t45.2\t44\t46\t46\t36\t0\t0\t0\t1\t0\t0\t0\n+2016\t9\t17\t71\t75\t70.3\t66\t73\t70\t84\t0\t0\t1\t0\t0\t0\t0\n+2016\t2\t26\t59\t61\t51.1\t48\t56\t53\t65\t1\t0\t0\t0\t0\t0\t0\n+2016\t12\t30\t48\t48\t45.4\t44\t46\t44\t42\t1\t0\t0\t0\t0\t0\t0\n+2016\t7\t9\t68\t74\t74.9\t70\t79\t76\t60\t0\t0\t1\t0\t0\t0\t0\n+2016\t2\t20\t53\t51\t50.4\t48\t55\t51\t43\t0\t0\t1\t0\t0\t0\t0\n+2016\t9\t9\t67\t72\t72.6\t68\t77\t71\t78\t1\t0\t0\t0\t0\t0\t0\n+2016\t9\t26\t67\t76\t67.2\t64\t69\t69\t74\t0\t1\t0\t0\t0\t0\t0\n+2016\t1\t22\t52\t52\t47.9\t47\t48\t48\t60\t1\t0\t0\t0\t0\t0\t0\n+2016\t11\t27\t52\t53\t48.2\t48\t49\t49\t53\t0\t0\t0\t1\t0\t0\t0\n+2016\t10\t20\t61\t58\t58.1\t58\t59\t58\t43\t0\t0\t0\t0\t1\t0\t0\n+2016\t7\t13\t74\t77\t75.6\t74\t78\t76\t56\t0\t0\t0\t0\t0\t0\t1\n+2016\t11\t7\t58\t61\t52.9\t51\t56\t51\t35\t0\t1\t0\t0\t0\t0\t0\n+2016\t10\t1\t66\t67\t65.3\t64\t70\t64\t54\t0\t0\t1\t0\t0\t0\t0\n+2016\t11\t22\t55\t54\t49.3\t46\t54\t49\t58\t0\t0\t0\t0\t0\t1\t0\n+2016\t6\t1\t71\t79\t67.4\t65\t69\t66\t58\t0\t0\t0\t0\t0\t0\t1\n+2016\t6\t3\t75\t71\t67.7\t64\t71\t66\t55\t1\t0\t0\t0\t0\t0\t0\n+2016\t3\t31\t64\t68\t55.9\t55\t59\t56\t56\t0\t0\t0\t0\t1\t0\t0\n+2016\t12\t14\t43\t40\t45.4\t45\t48\t45\t49\t0\t0\t0\t0\t0\t0\t1\n+2016\t8\t5\t75\t80\t77.3\t75\t81\t78\t71\t1\t0\t0\t0\t0\t0\t0\n+2016\t5\t4\t87\t74\t62.3\t59\t65\t64\t61\t0\t0\t0\t0\t0\t0\t1\n+2016\t12\t31\t48\t57\t45.5\t42\t48\t47\t57\t0\t0\t1\t0\t0\t0\t0\n+2016\t1\t21\t48\t52\t47.8\t43\t51\t46\t57\t0\t0\t0\t0\t1\t0\t0\n+2016\t7\t10\t74\t71\t75.1\t71\t77\t76\t95\t0\t0\t0\t1\t0\t0\t0\n+2016\t3\t15\t54\t49\t53.6\t49\t58\t52\t70\t0\t0\t0\t0\t0\t1\t0\n+2016\t4\t19\t77\t89\t59.0\t59\t63\t59\t61\t0\t0\t0\t0\t0\t1\t0\n'
b
diff -r 000000000000 -r af2624d5ab32 test-data/true_header.tabular
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/true_header.tabular Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,35 @@
+cancer
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+1
+1
+1
+1
+0
+0
+1
+1
+0
+1
+1
+1
+1
+1
+1
+1
+0
+0
+0
+0
+0
+0
b
diff -r 000000000000 -r af2624d5ab32 test-data/vectorizer_result01.mtx
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/vectorizer_result01.mtx Sat May 01 01:24:32 2021 +0000
b
b'@@ -0,0 +1,3788 @@\n+%%MatrixMarket matrix coordinate real general\n+%\n+2 1048577 3785\n+1 1450 0.01431896616372403\n+1 1565 0.01370375121911989\n+1 1889 0.02282816271614068\n+1 1961 0.01205944593190492\n+1 2759 0.01929331884718118\n+1 2860 0.01370375121911989\n+1 4053 0.01894755120823075\n+1 4119 0.01370375121911989\n+1 4727 0.01205944593190492\n+1 6201 0.02791992442867134\n+1 6714 0.02282816271614068\n+1 7170 0.0127117721548238\n+1 8116 0.02282816271614068\n+1 9508 0.01370375121911989\n+1 9758 0.01431896616372403\n+1 9870 0.01431896616372403\n+1 10078 0.05787995654154355\n+1 10492 0.02791992442867134\n+1 10495 0.02852128038707927\n+1 12091 0.05210576582263455\n+1 13013 0.01431896616372403\n+1 13234 0.009507093462359758\n+1 13829 0.0127117721548238\n+1 14120 0.01861328295244756\n+1 14517 0.02282816271614068\n+1 15083 0.01431896616372403\n+1 16177 0.01205944593190492\n+1 16761 0.02282816271614068\n+1 17410 0.01370375121911989\n+1 17436 0.01431896616372403\n+1 18255 0.02282816271614068\n+1 18283 0.01205944593190492\n+1 19133 0.02863793232744806\n+1 19449 0.02282816271614068\n+1 19510 0.01431896616372403\n+1 20975 0.07579020483292298\n+1 21531 0.01370375121911989\n+1 22178 0.004736887802057686\n+1 22283 0.01663440851737127\n+1 22706 0.02740750243823978\n+1 22715 0.01370375121911989\n+1 23698 0.01205944593190492\n+1 24190 0.01861328295244756\n+1 24309 0.02282816271614068\n+1 24541 0.01370375121911989\n+1 25019 0.01205944593190492\n+1 25500 0.01861328295244756\n+1 26513 0.0127117721548238\n+1 26569 0.01929331884718118\n+1 27192 0.01431896616372403\n+1 27389 0.02791992442867134\n+1 27912 0.0127117721548238\n+1 28011 0.02282816271614068\n+1 29313 0.02495161277605691\n+1 29735 0.03858663769436237\n+1 30453 0.03326881703474255\n+1 31110 0.02863793232744806\n+1 31588 0.02740750243823978\n+1 31963 0.01370375121911989\n+1 31975 0.01929331884718118\n+1 32413 0.01370375121911989\n+1 33140 0.02791992442867134\n+1 33433 0.01929331884718118\n+1 33979 0.02495161277605691\n+1 34275 0.01431896616372403\n+1 35828 0.0127117721548238\n+1 35928 0.01663440851737127\n+1 36463 0.01929331884718118\n+1 37218 0.01431896616372403\n+1 37731 0.01929331884718118\n+1 38358 0.01370375121911989\n+1 39516 0.02147844924558604\n+1 39526 0.01370375121911989\n+1 39653 0.02282816271614068\n+1 39705 0.01205944593190492\n+1 39762 0.02542354430964759\n+1 40007 0.006403636341921261\n+1 40920 0.004736887802057686\n+1 40975 0.0127117721548238\n+1 41381 0.01370375121911989\n+1 41399 0.01861328295244756\n+1 41645 0.01929331884718118\n+1 41800 0.02191386749905983\n+1 42763 0.01370375121911989\n+1 42946 0.01370375121911989\n+1 43077 0.02282816271614068\n+1 43474 0.01431896616372403\n+1 43781 0.01901418692471952\n+1 43937 0.02282816271614068\n+1 44721 0.02282816271614068\n+1 45315 0.01431896616372403\n+1 45359 0.02282816271614068\n+1 45447 0.02495161277605691\n+1 45548 0.01929331884718118\n+1 46264 0.01205944593190492\n+1 46627 0.006403636341921261\n+1 47125 0.01861328295244756\n+1 48545 0.0127117721548238\n+1 49094 0.01861328295244756\n+1 49619 0.01370375121911989\n+1 50271 0.01431896616372403\n+1 50742 0.04026354631749034\n+1 51430 0.01370375121911989\n+1 52454 0.03177943038705949\n+1 52548 0.02282816271614068\n+1 52666 0.01370375121911989\n+1 52833 0.003201818170960631\n+1 54362 0.01431896616372403\n+1 54371 0.01861328295244756\n+1 56154 0.02411889186380985\n+1 57027 0.01431896616372403\n+1 57543 0.02282816271614068\n+1 57633 0.02542354430964759\n+1 58575 0.02863793232744806\n+1 58806 0.02282816271614068\n+1 58964 0.009473775604115373\n+1 59388 0.02282816271614068\n+1 59411 0.01370375121911989\n+1 59679 0.0127117721548238\n+1 60697 0.01205944593190492\n+1 61808 0.01861328295244756\n+1 63572 0.02740750243823978\n+1 64006 0.01426064019353964\n+1 64561 0.01205944593190492\n+1 65197 0.01861328295244756\n+1 65817 0.05720297469670708\n+1 66479 0.02740750243823978\n+1 66876 0.01861328295244756\n+1 67128 0.03425937804779972\n+1 68440 0.004736887802057686\n+1 68573 0.004753546731179879\n+1 70299 0.01370375121911989\n+1 70576 0.02791992442867134\n+1 71207 0.01929331884718118\n+1 71899 0.01861328295244756\n+1 72251 0.013'..b'674637\n+2 973333 0.01450010501959994\n+2 974699 0.0224596916827213\n+2 974851 0.01367466109027461\n+2 975218 0.01288938340939221\n+2 975811 0.0224596916827213\n+2 977200 0.0276941383527154\n+2 978046 0.01846275890181026\n+2 978257 0.05469864436109843\n+2 978285 0.02084171182520092\n+2 978287 0.02900021003919988\n+2 978511 0.0224596916827213\n+2 978773 0.02720735217809617\n+2 979446 0.02084171182520092\n+2 979478 0.0224596916827213\n+2 979904 0.01933407511408831\n+2 980261 0.01846275890181026\n+2 980319 0.01360367608904809\n+2 981883 0.01450010501959994\n+2 981921 0.0224596916827213\n+2 982012 0.01360367608904809\n+2 983673 0.01288938340939221\n+2 983781 0.01450010501959994\n+2 984700 0.01846275890181026\n+2 985549 0.02734932218054922\n+2 986324 0.0224596916827213\n+2 986791 0.0224596916827213\n+2 989163 0.02577876681878442\n+2 989433 0.02734932218054922\n+2 990804 0.01288938340939221\n+2 991032 0.01360367608904809\n+2 991475 0.02900021003919988\n+2 992490 0.01450010501959994\n+2 993053 0.03400919022262022\n+2 993292 0.01360367608904809\n+2 993510 0.0224596916827213\n+2 994106 0.02720735217809617\n+2 994950 0.01288938340939221\n+2 995085 0.02175015752939991\n+2 995605 0.01367466109027461\n+2 995653 0.02084171182520092\n+2 996536 0.01288938340939221\n+2 998175 0.01450010501959994\n+2 1000016 0.03222345852348052\n+2 1000099 0.01360367608904809\n+2 1000335 0.01450010501959994\n+2 1001147 0.0224596916827213\n+2 1002097 0.01288938340939221\n+2 1002929 0.0224596916827213\n+2 1003113 0.0224596916827213\n+2 1003223 0.04197132442339169\n+2 1003232 0.03400919022262022\n+2 1003403 0.02084171182520092\n+2 1006311 0.01367466109027461\n+2 1006560 0.01360367608904809\n+2 1007775 0.01450010501959994\n+2 1007893 0.01846275890181026\n+2 1007912 0.01288938340939221\n+2 1008831 0.01288938340939221\n+2 1008899 0.01360367608904809\n+2 1010438 0.01360367608904809\n+2 1010769 0.0224596916827213\n+2 1011301 0.01360367608904809\n+2 1012185 0.0224596916827213\n+2 1013049 0.02084171182520092\n+2 1014052 0.0276941383527154\n+2 1014788 0.02084171182520092\n+2 1014853 0.01450010501959994\n+2 1015155 0.01360367608904809\n+2 1016179 0.01846275890181026\n+2 1016289 0.01846275890181026\n+2 1016640 0.01846275890181026\n+2 1018557 0.01360367608904809\n+2 1018883 0.0224596916827213\n+2 1019025 0.01755361681674637\n+2 1019588 0.0224596916827213\n+2 1019929 0.01288938340939221\n+2 1020006 0.0224596916827213\n+2 1020311 0.0224596916827213\n+2 1020509 0.01755361681674637\n+2 1020555 0.01360367608904809\n+2 1020677 0.01846275890181026\n+2 1020740 0.0574370336070631\n+2 1021884 0.01288938340939221\n+2 1023224 0.01288938340939221\n+2 1024035 0.01846275890181026\n+2 1026711 0.01367466109027461\n+2 1026775 0.01360367608904809\n+2 1027300 0.01450010501959994\n+2 1027631 0.0224596916827213\n+2 1027749 0.02040551413357213\n+2 1027973 0.01360367608904809\n+2 1028155 0.02084171182520092\n+2 1028295 0.01360367608904809\n+2 1028315 0.02084171182520092\n+2 1030226 0.01367466109027461\n+2 1031121 0.01360367608904809\n+2 1032367 0.02900021003919988\n+2 1032791 0.02084171182520092\n+2 1033256 0.01360367608904809\n+2 1033699 0.02084171182520092\n+2 1033833 0.01288938340939221\n+2 1034350 0.02720735217809617\n+2 1035161 0.01367466109027461\n+2 1035177 0.01755361681674637\n+2 1037044 0.02577876681878442\n+2 1037251 0.02084171182520092\n+2 1037326 0.0276941383527154\n+2 1037360 0.02652924105156806\n+2 1037916 0.02900021003919988\n+2 1038132 0.02084171182520092\n+2 1038587 0.01360367608904809\n+2 1039251 0.01846275890181026\n+2 1039896 0.01846275890181026\n+2 1040201 0.004994524661483748\n+2 1040273 0.02175015752939991\n+2 1040325 0.0224596916827213\n+2 1040479 0.02051199163541191\n+2 1040601 0.0224596916827213\n+2 1040913 0.02175015752939991\n+2 1042343 0.0224596916827213\n+2 1042815 0.01450010501959994\n+2 1043231 0.01367466109027461\n+2 1043931 0.0224596916827213\n+2 1044011 0.0224596916827213\n+2 1045048 0.02577876681878442\n+2 1046133 0.01846275890181026\n+2 1046509 0.02084171182520092\n+2 1046561 0.02720735217809617\n+2 1046915 0.01360367608904809\n+2 1047335 0.01360367608904809\n+2 1048455 0.0224596916827213\n'
b
diff -r 000000000000 -r af2624d5ab32 test-data/vectorizer_result02.mtx
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/vectorizer_result02.mtx Sat May 01 01:24:32 2021 +0000
b
b'@@ -0,0 +1,7346 @@\n+%%MatrixMarket matrix coordinate real general\n+%\n+4 1048577 7343\n+1 1450 0.0152274037172392\n+1 1961 0.01275776096436371\n+1 4053 0.01582710983069052\n+1 4357 0.01446327540314412\n+1 4379 0.02292554618019654\n+1 4727 0.01275776096436371\n+1 6201 0.02859906492378073\n+1 7170 0.01341424712825137\n+1 8116 0.02292554618019654\n+1 10078 0.05679044019028674\n+1 10495 0.03256516109420249\n+1 12091 0.05275703276896841\n+1 12155 0.01906604328252049\n+1 13013 0.01517172746603997\n+1 13181 0.0152274037172392\n+1 13234 0.0108550536980675\n+1 13280 0.0152274037172392\n+1 14120 0.01906604328252049\n+1 14527 0.0152274037172392\n+1 15083 0.0152274037172392\n+1 15241 0.01446327540314412\n+1 15283 0.01895709357182213\n+1 16177 0.01275776096436371\n+1 17436 0.0152274037172392\n+1 19449 0.02255274822664651\n+1 20975 0.05803273604586524\n+1 21531 0.0144155416907593\n+1 22178 0.00527570327689684\n+1 22283 0.01674245171469339\n+1 22706 0.02892655080628824\n+1 22715 0.0144155416907593\n+1 23698 0.01275776096436371\n+1 24309 0.02255274822664651\n+1 24541 0.0144155416907593\n+1 25019 0.01275776096436371\n+1 26513 0.01341424712825137\n+1 26569 0.01893014673009558\n+1 27172 0.01446327540314412\n+1 27389 0.02859906492378073\n+1 29313 0.02511367757204008\n+1 29735 0.03786029346019116\n+1 30445 0.0152274037172392\n+1 30453 0.05022735514408017\n+1 30833 0.01341424712825137\n+1 31588 0.0288310833815186\n+1 31963 0.0144155416907593\n+1 33113 0.01517172746603997\n+1 33433 0.01893014673009558\n+1 33979 0.02511367757204008\n+1 34869 0.01895709357182213\n+1 35828 0.01341424712825137\n+1 35928 0.01674245171469339\n+1 36463 0.01893014673009558\n+1 37731 0.01893014673009558\n+1 37859 0.01275776096436371\n+1 38203 0.0144155416907593\n+1 39526 0.01446327540314412\n+1 39705 0.01275776096436371\n+1 39762 0.02682849425650273\n+1 40007 0.01412754739202977\n+1 40920 0.00527570327689684\n+1 40975 0.01341424712825137\n+1 41385 0.01906604328252049\n+1 41559 0.0288310833815186\n+1 41645 0.01893014673009558\n+1 41800 0.03886244041341189\n+1 43077 0.02255274822664651\n+1 43474 0.0152274037172392\n+1 43781 0.01628258054710124\n+1 43937 0.02292554618019654\n+1 44401 0.01895709357182213\n+1 45359 0.02255274822664651\n+1 45447 0.02511367757204008\n+1 45548 0.01893014673009558\n+1 46264 0.01275776096436371\n+1 46627 0.007063773696014885\n+1 48545 0.01341424712825137\n+1 50045 0.02255274822664651\n+1 50271 0.01517172746603997\n+1 50742 0.05803273604586524\n+1 50871 0.01517172746603997\n+1 51188 0.01446327540314412\n+1 52548 0.02255274822664651\n+1 52666 0.01446327540314412\n+1 52833 0.003531886848007443\n+1 53247 0.01913664144654556\n+1 54371 0.01895709357182213\n+1 55601 0.0144155416907593\n+1 56154 0.02551552192872741\n+1 57027 0.01517172746603997\n+1 57543 0.02292554618019654\n+1 57633 0.02682849425650273\n+1 58055 0.03786029346019116\n+1 58575 0.03045480743447839\n+1 58964 0.02110281310758736\n+1 59388 0.02255274822664651\n+1 59679 0.01341424712825137\n+1 60697 0.01275776096436371\n+1 61808 0.01895709357182213\n+1 63572 0.0288310833815186\n+1 64006 0.01628258054710124\n+1 64516 0.02169491310471618\n+1 65817 0.0402427413847541\n+1 66000 0.02859906492378073\n+1 66876 0.01906604328252049\n+1 68440 0.00527570327689684\n+1 68573 0.005427526849033748\n+1 68652 0.03034345493207994\n+1 69297 0.01893014673009558\n+1 69935 0.01446327540314412\n+1 70576 0.0284356403577332\n+1 70624 0.01446327540314412\n+1 71207 0.01893014673009558\n+1 71899 0.01906604328252049\n+1 72251 0.01446327540314412\n+1 72260 0.0152274037172392\n+1 72351 0.02292554618019654\n+1 73005 0.01275776096436371\n+1 73634 0.0144155416907593\n+1 73778 0.0144155416907593\n+1 75637 0.01906604328252049\n+1 75685 0.01674245171469339\n+1 76187 0.01341424712825137\n+1 76858 0.01446327540314412\n+1 77048 0.0144155416907593\n+1 78707 0.01895709357182213\n+1 79120 0.02255274822664651\n+1 79445 0.01628258054710124\n+1 79790 0.01895709357182213\n+1 80087 0.02110281310758736\n+1 80609 0.01341424712825137\n+1 80905 0.005427526849033748\n+1 82201 0.02292554618019654\n+1 82271 0.0144155416907593\n+1 84117 0.01895709357182213\n+1 84121 0.01893014673009558\n'..b'3096885777\n+4 967164 0.007264789390163445\n+4 967313 0.003055503872267962\n+4 967983 0.02245327618246397\n+4 968163 0.02288809645328665\n+4 968483 0.02876453510426408\n+4 969841 0.02179436817049034\n+4 970265 0.01525873096885777\n+4 971015 0.01917635673617606\n+4 972013 0.01425830829592147\n+4 972178 0.02876453510426408\n+4 972485 0.01695356088516536\n+4 973099 0.01586088310577529\n+4 973333 0.01586088310577529\n+4 974072 0.01525873096885777\n+4 974851 0.01425830829592147\n+4 975811 0.02245327618246397\n+4 976275 0.01525873096885777\n+4 977200 0.02876453510426408\n+4 977905 0.01360986045640215\n+4 978257 0.05703323318368587\n+4 978511 0.02245327618246397\n+4 978773 0.03051746193771554\n+4 979478 0.02245327618246397\n+4 980261 0.01917635673617606\n+4 980319 0.01525873096885777\n+4 981883 0.01586088310577529\n+4 982012 0.01525873096885777\n+4 983673 0.01360986045640215\n+4 984683 0.02245327618246397\n+4 985549 0.02851661659184293\n+4 986791 0.02245327618246397\n+4 989163 0.02721972091280431\n+4 989433 0.02851661659184293\n+4 990804 0.01360986045640215\n+4 991468 0.007629365484428886\n+4 991475 0.03172176621155059\n+4 991935 0.01843238994100031\n+4 992490 0.01586088310577529\n+4 993053 0.03814682742214443\n+4 993510 0.02245327618246397\n+4 994106 0.03051746193771554\n+4 994950 0.01360986045640215\n+4 995605 0.01425830829592147\n+4 995653 0.01843238994100031\n+4 996536 0.01360986045640215\n+4 997796 0.03051746193771554\n+4 997977 0.02245327618246397\n+4 1000099 0.01525873096885777\n+4 1000335 0.01586088310577529\n+4 1001147 0.02245327618246397\n+4 1001734 0.01917635673617606\n+4 1003113 0.02245327618246397\n+4 1003223 0.03666604646721554\n+4 1003403 0.01843238994100031\n+4 1005765 0.01917635673617606\n+4 1006311 0.01425830829592147\n+4 1006560 0.01525873096885777\n+4 1007775 0.01586088310577529\n+4 1007912 0.01360986045640215\n+4 1007977 0.02245327618246397\n+4 1008679 0.01525873096885777\n+4 1008831 0.01360986045640215\n+4 1008899 0.01525873096885777\n+4 1010769 0.02245327618246397\n+4 1010791 0.01586088310577529\n+4 1011301 0.01525873096885777\n+4 1012185 0.02245327618246397\n+4 1012987 0.02245327618246397\n+4 1014667 0.01586088310577529\n+4 1014853 0.01586088310577529\n+4 1016179 0.01917635673617606\n+4 1016289 0.01917635673617606\n+4 1016369 0.007129154147960733\n+4 1018165 0.02245327618246397\n+4 1018883 0.02245327618246397\n+4 1019196 0.01360986045640215\n+4 1019328 0.01525873096885777\n+4 1019929 0.01360986045640215\n+4 1020006 0.02245327618246397\n+4 1020555 0.01525873096885777\n+4 1020677 0.01917635673617606\n+4 1020740 0.04432879980054553\n+4 1021884 0.01360986045640215\n+4 1022182 0.006804930228201077\n+4 1023273 0.01425830829592147\n+4 1026711 0.01425830829592147\n+4 1026790 0.01525873096885777\n+4 1027300 0.01586088310577529\n+4 1027631 0.02245327618246397\n+4 1027652 0.01917635673617606\n+4 1027973 0.01525873096885777\n+4 1028155 0.03686477988200062\n+4 1028295 0.01525873096885777\n+4 1028315 0.05529716982300092\n+4 1028912 0.01525873096885777\n+4 1029870 0.02876453510426408\n+4 1030226 0.01425830829592147\n+4 1031121 0.01525873096885777\n+4 1032791 0.01843238994100031\n+4 1033089 0.01917635673617606\n+4 1033256 0.01525873096885777\n+4 1033699 0.01843238994100031\n+4 1033833 0.01360986045640215\n+4 1034350 0.03051746193771554\n+4 1035161 0.01425830829592147\n+4 1035177 0.03390712177033073\n+4 1037044 0.02721972091280431\n+4 1037251 0.01843238994100031\n+4 1037326 0.02876453510426408\n+4 1037360 0.01452957878032689\n+4 1037916 0.03172176621155059\n+4 1039281 0.02245327618246397\n+4 1039896 0.01917635673617606\n+4 1040201 0.01182101328014547\n+4 1040273 0.02379132465866294\n+4 1040325 0.02245327618246397\n+4 1040891 0.01843238994100031\n+4 1040913 0.02379132465866294\n+4 1042343 0.02245327618246397\n+4 1042547 0.01695356088516536\n+4 1042815 0.01586088310577529\n+4 1043231 0.01425830829592147\n+4 1043781 0.01525873096885777\n+4 1046133 0.01917635673617606\n+4 1046509 0.01843238994100031\n+4 1046561 0.03051746193771554\n+4 1046703 0.01917635673617606\n+4 1047335 0.01525873096885777\n+4 1047547 0.01360986045640215\n+4 1048455 0.02245327618246397\n'
b
diff -r 000000000000 -r af2624d5ab32 test-data/vectorizer_result03.mtx
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/vectorizer_result03.mtx Sat May 01 01:24:32 2021 +0000
b
b'@@ -0,0 +1,9697 @@\n+%%MatrixMarket matrix coordinate real general\n+%\n+4 1048577 9694\n+1 71 0.01361331290463183\n+1 1450 0.01361331290463183\n+1 1961 0.01140545001599714\n+1 3747 0.01361331290463183\n+1 3795 0.02049545933475693\n+1 4053 0.01414945072069242\n+1 4357 0.01293018149022739\n+1 4379 0.02049545933475693\n+1 4727 0.01140545001599714\n+1 5795 0.01704506020053583\n+1 6201 0.02556759030080374\n+1 7170 0.01199234924928183\n+1 8116 0.02049545933475693\n+1 8523 0.01293018149022739\n+1 8531 0.01704506020053583\n+1 9300 0.02041996935694774\n+1 10078 0.05077070567367346\n+1 10495 0.02911328391873078\n+1 12091 0.0471648357356414\n+1 12155 0.01704506020053583\n+1 13013 0.01356353828493919\n+1 13181 0.01361331290463183\n+1 13234 0.009704427972910257\n+1 13280 0.01361331290463183\n+1 14120 0.01704506020053583\n+1 14527 0.01361331290463183\n+1 14873 0.01293018149022739\n+1 15083 0.01361331290463183\n+1 15241 0.01293018149022739\n+1 15283 0.01694765905913661\n+1 16177 0.01140545001599714\n+1 17436 0.01361331290463183\n+1 19449 0.02016217762198948\n+1 20413 0.01361331290463183\n+1 20975 0.05188131930920554\n+1 21084 0.01293018149022739\n+1 21531 0.01288750750752746\n+1 22178 0.004716483573564139\n+1 22283 0.01496776720543493\n+1 22554 0.01939527223534108\n+1 22570 0.01293018149022739\n+1 22706 0.02586036298045478\n+1 22715 0.01288750750752746\n+1 23188 0.01361331290463183\n+1 23698 0.01140545001599714\n+1 24309 0.02016217762198948\n+1 24510 0.01293018149022739\n+1 24541 0.01288750750752746\n+1 25019 0.01140545001599714\n+1 26513 0.01199234924928183\n+1 26569 0.01692356855789115\n+1 26721 0.01293018149022739\n+1 27172 0.01293018149022739\n+1 27389 0.02556759030080374\n+1 28621 0.01293018149022739\n+1 29313 0.0224516508081524\n+1 29735 0.03384713711578231\n+1 30007 0.02556759030080374\n+1 30445 0.01361331290463183\n+1 30453 0.04490330161630479\n+1 30833 0.01199234924928183\n+1 31159 0.01704506020053583\n+1 31588 0.02577501501505492\n+1 31713 0.01704506020053583\n+1 31931 0.02049545933475693\n+1 31963 0.01288750750752746\n+1 33113 0.01356353828493919\n+1 33433 0.01692356855789115\n+1 33979 0.0224516508081524\n+1 34869 0.01293018149022739\n+1 35056 0.01704506020053583\n+1 35828 0.01199234924928183\n+1 35928 0.01496776720543493\n+1 36463 0.01692356855789115\n+1 36581 0.01361331290463183\n+1 37021 0.01293018149022739\n+1 37256 0.01361331290463183\n+1 37731 0.01692356855789115\n+1 37831 0.01361331290463183\n+1 37859 0.01140545001599714\n+1 38203 0.01288750750752746\n+1 38978 0.02049545933475693\n+1 39526 0.01293018149022739\n+1 39705 0.01140545001599714\n+1 39762 0.02398469849856365\n+1 40007 0.01263004034003412\n+1 40920 0.004716483573564139\n+1 40975 0.01199234924928183\n+1 41367 0.01704506020053583\n+1 41385 0.01704506020053583\n+1 41559 0.02577501501505492\n+1 41645 0.01692356855789115\n+1 41800 0.03474305741210783\n+1 42042 0.02049545933475693\n+1 42057 0.01361331290463183\n+1 42103 0.01293018149022739\n+1 43077 0.02016217762198948\n+1 43474 0.01361331290463183\n+1 43781 0.01455664195936539\n+1 43937 0.02049545933475693\n+1 44401 0.01694765905913661\n+1 45359 0.02016217762198948\n+1 45447 0.0224516508081524\n+1 45548 0.01692356855789115\n+1 45746 0.01704506020053583\n+1 46264 0.01140545001599714\n+1 46627 0.006315020170017061\n+1 48545 0.01199234924928183\n+1 50045 0.02016217762198948\n+1 50271 0.01356353828493919\n+1 50742 0.05188131930920554\n+1 50871 0.01356353828493919\n+1 51188 0.01293018149022739\n+1 51486 0.02049545933475693\n+1 52094 0.01704506020053583\n+1 52548 0.02016217762198948\n+1 52666 0.01293018149022739\n+1 52833 0.003157510085008531\n+1 53247 0.0171081750239957\n+1 53497 0.01293018149022739\n+1 54371 0.01694765905913661\n+1 55601 0.01288750750752746\n+1 55999 0.02049545933475693\n+1 56154 0.02281090003199427\n+1 57027 0.01356353828493919\n+1 57543 0.02049545933475693\n+1 57633 0.02398469849856365\n+1 58055 0.03384713711578231\n+1 58277 0.01704506020053583\n+1 58575 0.02722662580926366\n+1 58845 0.01361331290463183\n+1 58964 0.01886593429425656\n+1 59388 0.02016217762198948\n+1 59679 0.01199234924928183\n+1 60697 0.01140545001599714\n+1 61549 0.013613'..b' 0.02434907336554575\n+4 989433 0.02550919576861992\n+4 990804 0.01217453668277288\n+4 991468 0.006824756966025142\n+4 991475 0.02837632374130612\n+4 991935 0.01648847232539554\n+4 991985 0.01715397885778408\n+4 992490 0.01418816187065306\n+4 993053 0.03412378483012571\n+4 993510 0.02008530766406564\n+4 994106 0.02729902786410057\n+4 994950 0.01217453668277288\n+4 995605 0.01275459788430996\n+4 995653 0.01648847232539554\n+4 995751 0.02008530766406564\n+4 996536 0.01217453668277288\n+4 997796 0.02729902786410057\n+4 997962 0.02008530766406564\n+4 997977 0.02008530766406564\n+4 998855 0.01418816187065306\n+4 999264 0.01364951393205028\n+4 1000099 0.01364951393205028\n+4 1000335 0.01418816187065306\n+4 1000999 0.01715397885778408\n+4 1001147 0.02008530766406564\n+4 1001734 0.01715397885778408\n+4 1002607 0.01715397885778408\n+4 1002835 0.02008530766406564\n+4 1003113 0.02008530766406564\n+4 1003223 0.03279917006918209\n+4 1003403 0.01648847232539554\n+4 1003884 0.02837632374130612\n+4 1005765 0.01715397885778408\n+4 1006311 0.01275459788430996\n+4 1006420 0.02008530766406564\n+4 1006560 0.01364951393205028\n+4 1006793 0.02008530766406564\n+4 1007602 0.01418816187065306\n+4 1007775 0.01418816187065306\n+4 1007912 0.01217453668277288\n+4 1007977 0.02008530766406564\n+4 1008679 0.01364951393205028\n+4 1008831 0.01217453668277288\n+4 1008899 0.01364951393205028\n+4 1009994 0.01715397885778408\n+4 1010769 0.02008530766406564\n+4 1010791 0.01418816187065306\n+4 1011301 0.01364951393205028\n+4 1011321 0.02729902786410057\n+4 1012185 0.02008530766406564\n+4 1012987 0.02008530766406564\n+4 1013009 0.01418816187065306\n+4 1014667 0.01418816187065306\n+4 1014853 0.01418816187065306\n+4 1015484 0.01418816187065306\n+4 1016179 0.01715397885778408\n+4 1016289 0.01715397885778408\n+4 1016369 0.00637729894215498\n+4 1017173 0.01715397885778408\n+4 1017602 0.02128224280597959\n+4 1018165 0.02008530766406564\n+4 1018883 0.02008530766406564\n+4 1019196 0.01217453668277288\n+4 1019328 0.01364951393205028\n+4 1019929 0.01217453668277288\n+4 1020006 0.02008530766406564\n+4 1020555 0.01364951393205028\n+4 1020677 0.01715397885778408\n+4 1020740 0.03965379373314344\n+4 1021827 0.02008530766406564\n+4 1021884 0.01217453668277288\n+4 1022182 0.006087268341386438\n+4 1023273 0.01275459788430996\n+4 1026711 0.01275459788430996\n+4 1026790 0.01364951393205028\n+4 1027300 0.01418816187065306\n+4 1027631 0.02008530766406564\n+4 1027652 0.01715397885778408\n+4 1027973 0.01364951393205028\n+4 1028155 0.03297694465079107\n+4 1028295 0.01364951393205028\n+4 1028315 0.04946541697618662\n+4 1028638 0.01715397885778408\n+4 1028912 0.01364951393205028\n+4 1029870 0.02573096828667612\n+4 1030059 0.01364951393205028\n+4 1030226 0.01275459788430996\n+4 1031121 0.01364951393205028\n+4 1032791 0.01648847232539554\n+4 1033089 0.01715397885778408\n+4 1033256 0.01364951393205028\n+4 1033699 0.01648847232539554\n+4 1033833 0.01217453668277288\n+4 1034350 0.02729902786410057\n+4 1034513 0.02008530766406564\n+4 1035161 0.01275459788430996\n+4 1035177 0.030331207224535\n+4 1036844 0.02573096828667612\n+4 1037044 0.02434907336554575\n+4 1037226 0.02008530766406564\n+4 1037251 0.01648847232539554\n+4 1037326 0.02573096828667612\n+4 1037360 0.01299725962753113\n+4 1037916 0.02837632374130612\n+4 1038731 0.01364951393205028\n+4 1039281 0.02008530766406564\n+4 1039328 0.01715397885778408\n+4 1039896 0.01715397885778408\n+4 1040201 0.01057434499550492\n+4 1040273 0.02128224280597959\n+4 1040325 0.02008530766406564\n+4 1040381 0.02729902786410057\n+4 1040891 0.01648847232539554\n+4 1040913 0.02128224280597959\n+4 1041230 0.01364951393205028\n+4 1042343 0.02008530766406564\n+4 1042547 0.0151656036122675\n+4 1042627 0.01715397885778408\n+4 1042774 0.01715397885778408\n+4 1042815 0.01418816187065306\n+4 1043231 0.01275459788430996\n+4 1043781 0.01364951393205028\n+4 1044112 0.01364951393205028\n+4 1046133 0.01715397885778408\n+4 1046509 0.01648847232539554\n+4 1046561 0.02729902786410057\n+4 1046703 0.01715397885778408\n+4 1047335 0.01364951393205028\n+4 1047547 0.01217453668277288\n+4 1048455 0.02008530766406564\n'
b
diff -r 000000000000 -r af2624d5ab32 test-data/vectorizer_result04.mtx
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/vectorizer_result04.mtx Sat May 01 01:24:32 2021 +0000
b
b'@@ -0,0 +1,9697 @@\n+%%MatrixMarket matrix coordinate real general\n+%\n+4 1048577 9694\n+1 71 0.01361331290463183\n+1 1450 0.01361331290463183\n+1 1961 0.01140545001599714\n+1 3747 0.01361331290463183\n+1 3795 0.02049545933475693\n+1 4053 0.01414945072069242\n+1 4357 0.01293018149022739\n+1 4379 0.02049545933475693\n+1 4727 0.01140545001599714\n+1 5795 0.01704506020053583\n+1 6201 0.02556759030080374\n+1 7170 0.01199234924928183\n+1 8116 0.02049545933475693\n+1 8523 0.01293018149022739\n+1 8531 0.01704506020053583\n+1 9300 0.02041996935694774\n+1 10078 0.05077070567367346\n+1 10495 0.02911328391873078\n+1 12091 0.0471648357356414\n+1 12155 0.01704506020053583\n+1 13013 0.01356353828493919\n+1 13181 0.01361331290463183\n+1 13234 0.009704427972910257\n+1 13280 0.01361331290463183\n+1 14120 0.01704506020053583\n+1 14527 0.01361331290463183\n+1 14873 0.01293018149022739\n+1 15083 0.01361331290463183\n+1 15241 0.01293018149022739\n+1 15283 0.01694765905913661\n+1 16177 0.01140545001599714\n+1 17436 0.01361331290463183\n+1 19449 0.02016217762198948\n+1 20413 0.01361331290463183\n+1 20975 0.05188131930920554\n+1 21084 0.01293018149022739\n+1 21531 0.01288750750752746\n+1 22178 0.004716483573564139\n+1 22283 0.01496776720543493\n+1 22554 0.01939527223534108\n+1 22570 0.01293018149022739\n+1 22706 0.02586036298045478\n+1 22715 0.01288750750752746\n+1 23188 0.01361331290463183\n+1 23698 0.01140545001599714\n+1 24309 0.02016217762198948\n+1 24510 0.01293018149022739\n+1 24541 0.01288750750752746\n+1 25019 0.01140545001599714\n+1 26513 0.01199234924928183\n+1 26569 0.01692356855789115\n+1 26721 0.01293018149022739\n+1 27172 0.01293018149022739\n+1 27389 0.02556759030080374\n+1 28621 0.01293018149022739\n+1 29313 0.0224516508081524\n+1 29735 0.03384713711578231\n+1 30007 0.02556759030080374\n+1 30445 0.01361331290463183\n+1 30453 0.04490330161630479\n+1 30833 0.01199234924928183\n+1 31159 0.01704506020053583\n+1 31588 0.02577501501505492\n+1 31713 0.01704506020053583\n+1 31931 0.02049545933475693\n+1 31963 0.01288750750752746\n+1 33113 0.01356353828493919\n+1 33433 0.01692356855789115\n+1 33979 0.0224516508081524\n+1 34869 0.01293018149022739\n+1 35056 0.01704506020053583\n+1 35828 0.01199234924928183\n+1 35928 0.01496776720543493\n+1 36463 0.01692356855789115\n+1 36581 0.01361331290463183\n+1 37021 0.01293018149022739\n+1 37256 0.01361331290463183\n+1 37731 0.01692356855789115\n+1 37831 0.01361331290463183\n+1 37859 0.01140545001599714\n+1 38203 0.01288750750752746\n+1 38978 0.02049545933475693\n+1 39526 0.01293018149022739\n+1 39705 0.01140545001599714\n+1 39762 0.02398469849856365\n+1 40007 0.01263004034003412\n+1 40920 0.004716483573564139\n+1 40975 0.01199234924928183\n+1 41367 0.01704506020053583\n+1 41385 0.01704506020053583\n+1 41559 0.02577501501505492\n+1 41645 0.01692356855789115\n+1 41800 0.03474305741210783\n+1 42042 0.02049545933475693\n+1 42057 0.01361331290463183\n+1 42103 0.01293018149022739\n+1 43077 0.02016217762198948\n+1 43474 0.01361331290463183\n+1 43781 0.01455664195936539\n+1 43937 0.02049545933475693\n+1 44401 0.01694765905913661\n+1 45359 0.02016217762198948\n+1 45447 0.0224516508081524\n+1 45548 0.01692356855789115\n+1 45746 0.01704506020053583\n+1 46264 0.01140545001599714\n+1 46627 0.006315020170017061\n+1 48545 0.01199234924928183\n+1 50045 0.02016217762198948\n+1 50271 0.01356353828493919\n+1 50742 0.05188131930920554\n+1 50871 0.01356353828493919\n+1 51188 0.01293018149022739\n+1 51486 0.02049545933475693\n+1 52094 0.01704506020053583\n+1 52548 0.02016217762198948\n+1 52666 0.01293018149022739\n+1 52833 0.003157510085008531\n+1 53247 0.0171081750239957\n+1 53497 0.01293018149022739\n+1 54371 0.01694765905913661\n+1 55601 0.01288750750752746\n+1 55999 0.02049545933475693\n+1 56154 0.02281090003199427\n+1 57027 0.01356353828493919\n+1 57543 0.02049545933475693\n+1 57633 0.02398469849856365\n+1 58055 0.03384713711578231\n+1 58277 0.01704506020053583\n+1 58575 0.02722662580926366\n+1 58845 0.01361331290463183\n+1 58964 0.01886593429425656\n+1 59388 0.02016217762198948\n+1 59679 0.01199234924928183\n+1 60697 0.01140545001599714\n+1 61549 0.013613'..b' 0.02434907336554575\n+4 989433 0.02550919576861992\n+4 990804 0.01217453668277288\n+4 991468 0.006824756966025142\n+4 991475 0.02837632374130612\n+4 991935 0.01648847232539554\n+4 991985 0.01715397885778408\n+4 992490 0.01418816187065306\n+4 993053 0.03412378483012571\n+4 993510 0.02008530766406564\n+4 994106 0.02729902786410057\n+4 994950 0.01217453668277288\n+4 995605 0.01275459788430996\n+4 995653 0.01648847232539554\n+4 995751 0.02008530766406564\n+4 996536 0.01217453668277288\n+4 997796 0.02729902786410057\n+4 997962 0.02008530766406564\n+4 997977 0.02008530766406564\n+4 998855 0.01418816187065306\n+4 999264 0.01364951393205028\n+4 1000099 0.01364951393205028\n+4 1000335 0.01418816187065306\n+4 1000999 0.01715397885778408\n+4 1001147 0.02008530766406564\n+4 1001734 0.01715397885778408\n+4 1002607 0.01715397885778408\n+4 1002835 0.02008530766406564\n+4 1003113 0.02008530766406564\n+4 1003223 0.03279917006918209\n+4 1003403 0.01648847232539554\n+4 1003884 0.02837632374130612\n+4 1005765 0.01715397885778408\n+4 1006311 0.01275459788430996\n+4 1006420 0.02008530766406564\n+4 1006560 0.01364951393205028\n+4 1006793 0.02008530766406564\n+4 1007602 0.01418816187065306\n+4 1007775 0.01418816187065306\n+4 1007912 0.01217453668277288\n+4 1007977 0.02008530766406564\n+4 1008679 0.01364951393205028\n+4 1008831 0.01217453668277288\n+4 1008899 0.01364951393205028\n+4 1009994 0.01715397885778408\n+4 1010769 0.02008530766406564\n+4 1010791 0.01418816187065306\n+4 1011301 0.01364951393205028\n+4 1011321 0.02729902786410057\n+4 1012185 0.02008530766406564\n+4 1012987 0.02008530766406564\n+4 1013009 0.01418816187065306\n+4 1014667 0.01418816187065306\n+4 1014853 0.01418816187065306\n+4 1015484 0.01418816187065306\n+4 1016179 0.01715397885778408\n+4 1016289 0.01715397885778408\n+4 1016369 0.00637729894215498\n+4 1017173 0.01715397885778408\n+4 1017602 0.02128224280597959\n+4 1018165 0.02008530766406564\n+4 1018883 0.02008530766406564\n+4 1019196 0.01217453668277288\n+4 1019328 0.01364951393205028\n+4 1019929 0.01217453668277288\n+4 1020006 0.02008530766406564\n+4 1020555 0.01364951393205028\n+4 1020677 0.01715397885778408\n+4 1020740 0.03965379373314344\n+4 1021827 0.02008530766406564\n+4 1021884 0.01217453668277288\n+4 1022182 0.006087268341386438\n+4 1023273 0.01275459788430996\n+4 1026711 0.01275459788430996\n+4 1026790 0.01364951393205028\n+4 1027300 0.01418816187065306\n+4 1027631 0.02008530766406564\n+4 1027652 0.01715397885778408\n+4 1027973 0.01364951393205028\n+4 1028155 0.03297694465079107\n+4 1028295 0.01364951393205028\n+4 1028315 0.04946541697618662\n+4 1028638 0.01715397885778408\n+4 1028912 0.01364951393205028\n+4 1029870 0.02573096828667612\n+4 1030059 0.01364951393205028\n+4 1030226 0.01275459788430996\n+4 1031121 0.01364951393205028\n+4 1032791 0.01648847232539554\n+4 1033089 0.01715397885778408\n+4 1033256 0.01364951393205028\n+4 1033699 0.01648847232539554\n+4 1033833 0.01217453668277288\n+4 1034350 0.02729902786410057\n+4 1034513 0.02008530766406564\n+4 1035161 0.01275459788430996\n+4 1035177 0.030331207224535\n+4 1036844 0.02573096828667612\n+4 1037044 0.02434907336554575\n+4 1037226 0.02008530766406564\n+4 1037251 0.01648847232539554\n+4 1037326 0.02573096828667612\n+4 1037360 0.01299725962753113\n+4 1037916 0.02837632374130612\n+4 1038731 0.01364951393205028\n+4 1039281 0.02008530766406564\n+4 1039328 0.01715397885778408\n+4 1039896 0.01715397885778408\n+4 1040201 0.01057434499550492\n+4 1040273 0.02128224280597959\n+4 1040325 0.02008530766406564\n+4 1040381 0.02729902786410057\n+4 1040891 0.01648847232539554\n+4 1040913 0.02128224280597959\n+4 1041230 0.01364951393205028\n+4 1042343 0.02008530766406564\n+4 1042547 0.0151656036122675\n+4 1042627 0.01715397885778408\n+4 1042774 0.01715397885778408\n+4 1042815 0.01418816187065306\n+4 1043231 0.01275459788430996\n+4 1043781 0.01364951393205028\n+4 1044112 0.01364951393205028\n+4 1046133 0.01715397885778408\n+4 1046509 0.01648847232539554\n+4 1046561 0.02729902786410057\n+4 1046703 0.01715397885778408\n+4 1047335 0.01364951393205028\n+4 1047547 0.01217453668277288\n+4 1048455 0.02008530766406564\n'
b
diff -r 000000000000 -r af2624d5ab32 test-data/y.tabular
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/y.tabular Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,39 @@
+0 0 0.0 1.0 2.0 0 0 -2.76903910779 -0.777269253713 2.08028572913
+2 2 0.0 0.0 1.0 1 1 -1.46032791667 0.555654963057 -1.54234795893
+1 1 0.0 1.0 0.0 1 1 1.72939677275 -1.3402943146 -7.95375106924
+0 0 1.0 0.0 0.0 0 0 -3.15016545997 0.19568758864 1.40593056786
+2 2 0.0 0.0 1.0 1 1 1.21845859294 -0.677633363546 -6.62421395692
+0 0 1.0 0.0 0.0 0 0 -3.25263252854 -0.908498631085 2.74671790479
+2 2 0.0 0.0 1.0 1 1 1.38291089706 -0.924165117418 -6.87449092795
+0 0 1.0 0.0 0.0 0 0 -1.7423603376 -0.326034812837 -1.12743832183
+1 2 0.0 1.0 0.0 1 1 -1.88141734237 0.0471879612496 -0.990485600884
+1 2 0.0 0.9 0.1 1 1 -1.32547081613 -0.193430743286 -1.66958283068
+1 1 0.0 1.0 0.0 1 1 -2.7799666645 0.485621555351 1.21494093967
+2 2 0.0 0.2 0.8 1 1 -1.60125339649 -0.493901618129 -1.20213785254
+1 2 0.0 1.0 0.0 1 1 -1.86658623206 0.162709340336 -0.691875382528
+1 1 0.0 1.0 0.0 1 1 -1.82214550549 -0.130278514956 -0.836834994045
+1 2 0.0 0.9 0.1 1 1 -1.910728736 -0.0978509403157 -0.469743754594
+1 2 0.0 1.0 0.0 1 1 1.1191441248 -0.350015230403 -6.43122655533
+0 0 1.0 0.0 0.0 0 0 -1.80789829975 -0.267725170783 -0.533251833633
+1 1 0.0 0.9 0.1 1 1 -1.82704375852 0.186802710054 -0.367392242502
+1 1 0.0 0.9 0.1 1 1 1.05683832083 -0.491476736579 -6.10526049159
+0 0 1.0 0.0 0.0 0 0 1.58740583243 -1.32084852823 -7.47140590741
+0 0 1.0 0.0 0.0 0 0 -2.47802529094 -0.500673021108 1.37455405057
+2 2 0.0 0.3 0.7 1 1 -1.85517293032 -0.363363308535 -0.177124010926
+1 1 0.0 0.8 0.2 1 1 0.84169544958 -0.533176028466 -5.7625592501
+0 0 1.0 0.0 0.0 0 0 0.971871089969 -0.336154264594 -5.74291415928
+0 0 1.0 0.0 0.0 0 0 -2.18006328471 -0.33580204472 0.261632810716
+2 2 0.0 0.2 0.8 1 1 1.62753221054 -1.0437871236 -7.15189570944
+0 0 1.0 0.0 0.0 0 0 0.982418549211 -1.02370887933 -6.10073429813
+0 0 1.0 0.0 0.0 0 0 -1.51375235626 -0.156051081077 -1.37297970696
+1 1 0.0 1.0 0.0 1 1 -1.05517039337 0.171153321655 -1.66261211523
+1 1 0.0 1.0 0.0 1 1 1.05117238483 -0.819727602718 -6.16276877471
+0 0 1.0 0.0 0.0 0 0 -2.60008493281 -0.303483971372 0.937773514338
+2 2 0.0 0.0 1.0 1 1 -1.89873152969 -0.370955554274 0.0400346749524
+1 1 0.0 0.8 0.2 1 1 1.30185976049 -0.750494764082 -6.91956219185
+0 0 1.0 0.0 0.0 0 0 -2.20545858405 -0.462493064934 0.374957060793
+2 2 0.0 0.3 0.7 1 1 -2.97088391755 -0.384323906096 1.93410852068
+2 2 0.0 0.0 1.0 1 1 -1.52001848153 -0.275207915229 -0.625142611926
+1 1 0.0 1.0 0.0 1 1 1.32168915538 -0.986903615337 -7.22461895473
+0 0 1.0 0.0 0.0 0 0 -2.42938278814 0.0312031758068 0.740031884365
+1 2 0.0 0.0 1.0 1 1 -1.52001848153 -0.370955554274 0.937773514338
b
diff -r 000000000000 -r af2624d5ab32 test-data/y_score.tabular
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/y_score.tabular Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,75 @@
+0.04521016253284027
+-0.0017878318955413253
+-0.3380009790698638
+-0.15416229901482092
+-0.008989122568787922
+0.3775746361984437
+-0.20342288788672414
+0.21787658306027935
+-0.5322523189136876
+-0.6361907868807346
+-0.036875765955103335
+-0.24857077769453662
+-0.5305978020035378
+-0.5288479779433272
+-0.22579627342382325
+0.4905346629557697
+-0.12238193946346121
+-0.42773421293023084
+0.16878080982659216
+0.051637548704625946
+0.023623352380110763
+-0.3553978552068183
+-0.4597636722184091
+-0.36924223816393
+-0.539585171546133
+-0.4138055622986405
+-0.25401950905817183
+0.35124248378117207
+-0.5767911246317095
+-0.4452974937020068
+0.13456824841567622
+-0.08366761511503285
+-0.5855411774730717
+0.4493951821813167
+-0.0008118901312900162
+-0.375188782981553
+-0.052180286682808386
+-0.3624923116131733
+-0.3212899940903371
+-0.6326134385656439
+-0.5951558341213625
+-0.026698968757988106
+-0.6389295278289815
+-0.4665622957151918
+0.24683878631472084
+0.06670297201702563
+-0.09995075976356604
+-0.0026791784207790825
+-0.26843502542172126
+-0.23167967546053814
+-0.5500853075669638
+-0.07278578744420061
+-0.1908269856404199
+-0.10431209677312014
+-0.40541232698507823
+-1.3031302463301446
+-0.10509162333664135
+-0.06155868232417461
+-0.4347097510343062
+-0.8391150198454305
+-0.5372307413404114
+-0.46030478301666744
+-0.11618205513493052
+-0.021278188504645024
+-0.16029035414173087
+-0.35975375227600914
+-0.4814892536194141
+-0.1385760560857231
+0.3409736022465082
+-0.5355178831501075
+0.22534151535735567
+0.07294052191693523
+-0.3386178239054628
+0.15540977852505278
+0.07383896651967975
b
diff -r 000000000000 -r af2624d5ab32 test-data/y_true.tabular
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/y_true.tabular Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,75 @@
+0
+1
+0
+0
+1
+1
+1
+1
+0
+0
+0
+0
+0
+0
+0
+1
+0
+1
+1
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+1
+1
+0
+1
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+1
+1
+1
+0
+1
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+1
+1
+0
+1
+0
+0
+0
+1
b
diff -r 000000000000 -r af2624d5ab32 test-data/zero_one_loss.txt
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/zero_one_loss.txt Sat May 01 01:24:32 2021 +0000
b
@@ -0,0 +1,2 @@
+zero_one_loss : 
+0.15384615384615385
b
diff -r 000000000000 -r af2624d5ab32 to_categorical.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/to_categorical.py Sat May 01 01:24:32 2021 +0000
[
@@ -0,0 +1,52 @@
+import argparse
+import json
+import warnings
+
+import numpy as np
+import pandas as pd
+from keras.utils import to_categorical
+
+
+def main(inputs, infile, outfile, num_classes=None):
+    """
+    Parameter
+    ---------
+    input : str
+        File path to galaxy tool parameter
+
+    infile : str
+        File paths of input vector
+
+    outfile : str
+        File path to output matrix
+
+    num_classes : str
+        Total number of classes. If None, this would be inferred as the (largest number in y) + 1
+
+    """
+    warnings.simplefilter("ignore")
+
+    with open(inputs, "r") as param_handler:
+        params = json.load(param_handler)
+
+    input_header = params["header0"]
+    header = "infer" if input_header else None
+
+    input_vector = pd.read_csv(infile, sep="\t", header=header)
+
+    output_matrix = to_categorical(input_vector, num_classes=num_classes)
+
+    np.savetxt(outfile, output_matrix, fmt="%d", delimiter="\t")
+
+
+if __name__ == "__main__":
+    aparser = argparse.ArgumentParser()
+    aparser.add_argument("-i", "--inputs", dest="inputs", required=True)
+    aparser.add_argument("-y", "--infile", dest="infile")
+    aparser.add_argument(
+        "-n", "--num_classes", dest="num_classes", type=int, default=None
+    )
+    aparser.add_argument("-o", "--outfile", dest="outfile")
+    args = aparser.parse_args()
+
+    main(args.inputs, args.infile, args.outfile, args.num_classes)
b
diff -r 000000000000 -r af2624d5ab32 train_test_eval.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/train_test_eval.py Sat May 01 01:24:32 2021 +0000
[
b'@@ -0,0 +1,477 @@\n+import argparse\n+import json\n+import os\n+import pickle\n+import warnings\n+from itertools import chain\n+\n+import joblib\n+import numpy as np\n+import pandas as pd\n+from galaxy_ml.model_validations import train_test_split\n+from galaxy_ml.utils import (get_module, get_scoring, load_model,\n+                             read_columns, SafeEval, try_get_attr)\n+from scipy.io import mmread\n+from sklearn import pipeline\n+from sklearn.metrics.scorer import _check_multimetric_scoring\n+from sklearn.model_selection import _search, _validation\n+from sklearn.model_selection._validation import _score\n+from sklearn.utils import indexable, safe_indexing\n+\n+_fit_and_score = try_get_attr("galaxy_ml.model_validations", "_fit_and_score")\n+setattr(_search, "_fit_and_score", _fit_and_score)\n+setattr(_validation, "_fit_and_score", _fit_and_score)\n+\n+N_JOBS = int(os.environ.get("GALAXY_SLOTS", 1))\n+CACHE_DIR = os.path.join(os.getcwd(), "cached")\n+del os\n+NON_SEARCHABLE = ("n_jobs", "pre_dispatch", "memory", "_path", "nthread", "callbacks")\n+ALLOWED_CALLBACKS = (\n+    "EarlyStopping",\n+    "TerminateOnNaN",\n+    "ReduceLROnPlateau",\n+    "CSVLogger",\n+    "None",\n+)\n+\n+\n+def _eval_swap_params(params_builder):\n+    swap_params = {}\n+\n+    for p in params_builder["param_set"]:\n+        swap_value = p["sp_value"].strip()\n+        if swap_value == "":\n+            continue\n+\n+        param_name = p["sp_name"]\n+        if param_name.lower().endswith(NON_SEARCHABLE):\n+            warnings.warn(\n+                "Warning: `%s` is not eligible for search and was "\n+                "omitted!" % param_name\n+            )\n+            continue\n+\n+        if not swap_value.startswith(":"):\n+            safe_eval = SafeEval(load_scipy=True, load_numpy=True)\n+            ev = safe_eval(swap_value)\n+        else:\n+            # Have `:` before search list, asks for estimator evaluatio\n+            safe_eval_es = SafeEval(load_estimators=True)\n+            swap_value = swap_value[1:].strip()\n+            # TODO maybe add regular express check\n+            ev = safe_eval_es(swap_value)\n+\n+        swap_params[param_name] = ev\n+\n+    return swap_params\n+\n+\n+def train_test_split_none(*arrays, **kwargs):\n+    """extend train_test_split to take None arrays\n+    and support split by group names.\n+    """\n+    nones = []\n+    new_arrays = []\n+    for idx, arr in enumerate(arrays):\n+        if arr is None:\n+            nones.append(idx)\n+        else:\n+            new_arrays.append(arr)\n+\n+    if kwargs["shuffle"] == "None":\n+        kwargs["shuffle"] = None\n+\n+    group_names = kwargs.pop("group_names", None)\n+\n+    if group_names is not None and group_names.strip():\n+        group_names = [name.strip() for name in group_names.split(",")]\n+        new_arrays = indexable(*new_arrays)\n+        groups = kwargs["labels"]\n+        n_samples = new_arrays[0].shape[0]\n+        index_arr = np.arange(n_samples)\n+        test = index_arr[np.isin(groups, group_names)]\n+        train = index_arr[~np.isin(groups, group_names)]\n+        rval = list(\n+            chain.from_iterable(\n+                (safe_indexing(a, train), safe_indexing(a, test)) for a in new_arrays\n+            )\n+        )\n+    else:\n+        rval = train_test_split(*new_arrays, **kwargs)\n+\n+    for pos in nones:\n+        rval[pos * 2: 2] = [None, None]\n+\n+    return rval\n+\n+\n+def main(\n+    inputs,\n+    infile_estimator,\n+    infile1,\n+    infile2,\n+    outfile_result,\n+    outfile_object=None,\n+    outfile_weights=None,\n+    groups=None,\n+    ref_seq=None,\n+    intervals=None,\n+    targets=None,\n+    fasta_path=None,\n+):\n+    """\n+    Parameter\n+    ---------\n+    inputs : str\n+        File path to galaxy tool parameter\n+\n+    infile_estimator : str\n+        File path to estimator\n+\n+    infile1 : str\n+        File path to dataset containing features\n+\n+    infile2 : str\n+        File path to dataset containing target values\n+\n+    outfile_result : str\n+        File path to save the results, either cv_r'..b'"]\n+\n+    # handle validation (second) split\n+    if exp_scheme == "train_val_test":\n+        val_split_options = params["experiment_schemes"]["val_split"]["split_algos"]\n+\n+        if val_split_options["shuffle"] == "group":\n+            val_split_options["labels"] = groups_train\n+        if val_split_options["shuffle"] == "stratified":\n+            if y_train is not None:\n+                val_split_options["labels"] = y_train\n+            else:\n+                raise ValueError(\n+                    "Stratified shuffle split is not "\n+                    "applicable on empty target values!"\n+                )\n+\n+        (\n+            X_train,\n+            X_val,\n+            y_train,\n+            y_val,\n+            groups_train,\n+            _groups_val,\n+        ) = train_test_split_none(X_train, y_train, groups_train, **val_split_options)\n+\n+    # train and eval\n+    if hasattr(estimator, "validation_data"):\n+        if exp_scheme == "train_val_test":\n+            estimator.fit(X_train, y_train, validation_data=(X_val, y_val))\n+        else:\n+            estimator.fit(X_train, y_train, validation_data=(X_test, y_test))\n+    else:\n+        estimator.fit(X_train, y_train)\n+\n+    if hasattr(estimator, "evaluate"):\n+        scores = estimator.evaluate(\n+            X_test, y_test=y_test, scorer=scorer, is_multimetric=True\n+        )\n+    else:\n+        scores = _score(estimator, X_test, y_test, scorer, is_multimetric=True)\n+    # handle output\n+    for name, score in scores.items():\n+        scores[name] = [score]\n+    df = pd.DataFrame(scores)\n+    df = df[sorted(df.columns)]\n+    df.to_csv(path_or_buf=outfile_result, sep="\\t", header=True, index=False)\n+\n+    memory.clear(warn=False)\n+\n+    if outfile_object:\n+        main_est = estimator\n+        if isinstance(estimator, pipeline.Pipeline):\n+            main_est = estimator.steps[-1][-1]\n+\n+        if hasattr(main_est, "model_") and hasattr(main_est, "save_weights"):\n+            if outfile_weights:\n+                main_est.save_weights(outfile_weights)\n+            if getattr(main_est, "model_", None):\n+                del main_est.model_\n+            if getattr(main_est, "fit_params", None):\n+                del main_est.fit_params\n+            if getattr(main_est, "model_class_", None):\n+                del main_est.model_class_\n+            if getattr(main_est, "validation_data", None):\n+                del main_est.validation_data\n+            if getattr(main_est, "data_generator_", None):\n+                del main_est.data_generator_\n+\n+        with open(outfile_object, "wb") as output_handler:\n+            pickle.dump(estimator, output_handler, pickle.HIGHEST_PROTOCOL)\n+\n+\n+if __name__ == "__main__":\n+    aparser = argparse.ArgumentParser()\n+    aparser.add_argument("-i", "--inputs", dest="inputs", required=True)\n+    aparser.add_argument("-e", "--estimator", dest="infile_estimator")\n+    aparser.add_argument("-X", "--infile1", dest="infile1")\n+    aparser.add_argument("-y", "--infile2", dest="infile2")\n+    aparser.add_argument("-O", "--outfile_result", dest="outfile_result")\n+    aparser.add_argument("-o", "--outfile_object", dest="outfile_object")\n+    aparser.add_argument("-w", "--outfile_weights", dest="outfile_weights")\n+    aparser.add_argument("-g", "--groups", dest="groups")\n+    aparser.add_argument("-r", "--ref_seq", dest="ref_seq")\n+    aparser.add_argument("-b", "--intervals", dest="intervals")\n+    aparser.add_argument("-t", "--targets", dest="targets")\n+    aparser.add_argument("-f", "--fasta_path", dest="fasta_path")\n+    args = aparser.parse_args()\n+\n+    main(\n+        args.inputs,\n+        args.infile_estimator,\n+        args.infile1,\n+        args.infile2,\n+        args.outfile_result,\n+        outfile_object=args.outfile_object,\n+        outfile_weights=args.outfile_weights,\n+        groups=args.groups,\n+        ref_seq=args.ref_seq,\n+        intervals=args.intervals,\n+        targets=args.targets,\n+        fasta_path=args.fasta_path,\n+    )\n'
b
diff -r 000000000000 -r af2624d5ab32 train_test_split.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/train_test_split.py Sat May 01 01:24:32 2021 +0000
[
@@ -0,0 +1,177 @@
+import argparse
+import json
+import warnings
+
+import pandas as pd
+from galaxy_ml.model_validations import train_test_split
+from galaxy_ml.utils import get_cv, read_columns
+
+
+def _get_single_cv_split(params, array, infile_labels=None, infile_groups=None):
+    """output (train, test) subset from a cv splitter
+
+    Parameters
+    ----------
+    params : dict
+        Galaxy tool inputs
+    array : pandas DataFrame object
+        The target dataset to split
+    infile_labels : str
+        File path to dataset containing target values
+    infile_groups : str
+        File path to dataset containing group values
+    """
+    y = None
+    groups = None
+
+    nth_split = params["mode_selection"]["nth_split"]
+
+    # read groups
+    if infile_groups:
+        header = (
+            "infer"
+            if (params["mode_selection"]["cv_selector"]["groups_selector"]["header_g"])
+            else None
+        )
+        column_option = params["mode_selection"]["cv_selector"]["groups_selector"][
+            "column_selector_options_g"
+        ]["selected_column_selector_option_g"]
+        if column_option in [
+            "by_index_number",
+            "all_but_by_index_number",
+            "by_header_name",
+            "all_but_by_header_name",
+        ]:
+            c = params["mode_selection"]["cv_selector"]["groups_selector"][
+                "column_selector_options_g"
+            ]["col_g"]
+        else:
+            c = None
+
+        groups = read_columns(
+            infile_groups,
+            c=c,
+            c_option=column_option,
+            sep="\t",
+            header=header,
+            parse_dates=True,
+        )
+        groups = groups.ravel()
+
+        params["mode_selection"]["cv_selector"]["groups_selector"] = groups
+
+    # read labels
+    if infile_labels:
+        target_input = params["mode_selection"]["cv_selector"].pop("target_input")
+        header = "infer" if target_input["header1"] else None
+        col_index = target_input["col"][0] - 1
+        df = pd.read_csv(infile_labels, sep="\t", header=header, parse_dates=True)
+        y = df.iloc[:, col_index].values
+
+    # construct the cv splitter object
+    splitter, groups = get_cv(params["mode_selection"]["cv_selector"])
+
+    total_n_splits = splitter.get_n_splits(array.values, y=y, groups=groups)
+    if nth_split > total_n_splits:
+        raise ValueError(
+            "Total number of splits is {}, but got `nth_split` "
+            "= {}".format(total_n_splits, nth_split)
+        )
+
+    i = 1
+    for train_index, test_index in splitter.split(array.values, y=y, groups=groups):
+        # suppose nth_split >= 1
+        if i == nth_split:
+            break
+        else:
+            i += 1
+
+    train = array.iloc[train_index, :]
+    test = array.iloc[test_index, :]
+
+    return train, test
+
+
+def main(
+    inputs,
+    infile_array,
+    outfile_train,
+    outfile_test,
+    infile_labels=None,
+    infile_groups=None,
+):
+    """
+    Parameter
+    ---------
+    inputs : str
+        File path to galaxy tool parameter
+
+    infile_array : str
+        File paths of input arrays separated by comma
+
+    infile_labels : str
+        File path to dataset containing labels
+
+    infile_groups : str
+        File path to dataset containing groups
+
+    outfile_train : str
+        File path to dataset containing train split
+
+    outfile_test : str
+        File path to dataset containing test split
+    """
+    warnings.simplefilter("ignore")
+
+    with open(inputs, "r") as param_handler:
+        params = json.load(param_handler)
+
+    input_header = params["header0"]
+    header = "infer" if input_header else None
+    array = pd.read_csv(infile_array, sep="\t", header=header, parse_dates=True)
+
+    # train test split
+    if params["mode_selection"]["selected_mode"] == "train_test_split":
+        options = params["mode_selection"]["options"]
+        shuffle_selection = options.pop("shuffle_selection")
+        options["shuffle"] = shuffle_selection["shuffle"]
+        if infile_labels:
+            header = "infer" if shuffle_selection["header1"] else None
+            col_index = shuffle_selection["col"][0] - 1
+            df = pd.read_csv(infile_labels, sep="\t", header=header, parse_dates=True)
+            labels = df.iloc[:, col_index].values
+            options["labels"] = labels
+
+        train, test = train_test_split(array, **options)
+
+    # cv splitter
+    else:
+        train, test = _get_single_cv_split(
+            params, array, infile_labels=infile_labels, infile_groups=infile_groups
+        )
+
+    print("Input shape: %s" % repr(array.shape))
+    print("Train shape: %s" % repr(train.shape))
+    print("Test shape: %s" % repr(test.shape))
+    train.to_csv(outfile_train, sep="\t", header=input_header, index=False)
+    test.to_csv(outfile_test, sep="\t", header=input_header, index=False)
+
+
+if __name__ == "__main__":
+    aparser = argparse.ArgumentParser()
+    aparser.add_argument("-i", "--inputs", dest="inputs", required=True)
+    aparser.add_argument("-X", "--infile_array", dest="infile_array")
+    aparser.add_argument("-y", "--infile_labels", dest="infile_labels")
+    aparser.add_argument("-g", "--infile_groups", dest="infile_groups")
+    aparser.add_argument("-o", "--outfile_train", dest="outfile_train")
+    aparser.add_argument("-t", "--outfile_test", dest="outfile_test")
+    args = aparser.parse_args()
+
+    main(
+        args.inputs,
+        args.infile_array,
+        args.outfile_train,
+        args.outfile_test,
+        args.infile_labels,
+        args.infile_groups,
+    )