Repository 'sklearn_build_pipeline'
hg clone https://toolshed.g2.bx.psu.edu/repos/bgruening/sklearn_build_pipeline

Changeset 25:118e230e85ce (2023-08-09)
Previous changeset 24:b1eda492f063 (2022-08-11) Next changeset 26:9160cb4069c2 (2023-09-22)
Commit message:
planemo upload for repository https://github.com/bgruening/galaxytools/tree/master/tools/sklearn commit 9981e25b00de29ed881b2229a173a8c812ded9bb
modified:
association_rules.py
fitted_model_eval.py
keras_deep_learning.py
keras_macros.xml
keras_train_and_eval.py
label_encoder.py
main_macros.xml
ml_visualization_ex.py
model_prediction.py
pipeline.xml
search_model_validation.py
simple_model_fit.py
stacking_ensembles.py
test-data/abc_model01
test-data/abc_result01
test-data/abr_model01
test-data/abr_result01
test-data/auc.txt
test-data/best_params_.txt
test-data/best_score_.tabular
test-data/cluster_result01.txt
test-data/cluster_result02.txt
test-data/cluster_result03.txt
test-data/cluster_result04.txt
test-data/cluster_result05.txt
test-data/cluster_result06.txt
test-data/cluster_result07.txt
test-data/cluster_result08.txt
test-data/cluster_result09.txt
test-data/cluster_result10.txt
test-data/cluster_result11.txt
test-data/cluster_result12.txt
test-data/cluster_result13.txt
test-data/cluster_result14.txt
test-data/cluster_result15.txt
test-data/cluster_result16.txt
test-data/cluster_result20.txt
test-data/feature_importances_.tabular
test-data/feature_selection_result01
test-data/feature_selection_result08
test-data/feature_selection_result09
test-data/feature_selection_result12
test-data/fitted_model_eval01.tabular
test-data/gbc_model01
test-data/gbc_result01
test-data/gbr_model01
test-data/get_params.tabular
test-data/glm_model01
test-data/glm_model02
test-data/glm_model03
test-data/glm_model04
test-data/glm_model05
test-data/glm_model06
test-data/glm_model07
test-data/glm_model08
test-data/glm_result01
test-data/glm_result02
test-data/glm_result03
test-data/glm_result04
test-data/glm_result05
test-data/glm_result06
test-data/glm_result07
test-data/glm_result08
test-data/jaccard_similarity_score.txt
test-data/keras01.json
test-data/keras02.json
test-data/keras03.json
test-data/keras04.json
test-data/keras_batch_model01
test-data/keras_batch_model02
test-data/keras_batch_model03
test-data/keras_batch_model04
test-data/keras_model01
test-data/keras_model02
test-data/keras_model04
test-data/lda_model01
test-data/lda_model02
test-data/lda_prediction_result01.tabular
test-data/lda_prediction_result02.tabular
test-data/lgb_class_model.txt
test-data/lgb_regr_model.txt
test-data/ml_vis01.html
test-data/ml_vis02.html
test-data/ml_vis03.html
test-data/ml_vis04.html
test-data/ml_vis05.html
test-data/ml_vis05.png
test-data/model_fit01
test-data/model_fit02
test-data/model_fit02.h5
test-data/model_pred01.tabular
test-data/mv_result03.tabular
test-data/mv_result05.tabular
test-data/named_steps.txt
test-data/nn_model01
test-data/nn_model02
test-data/nn_model03
test-data/pipeline01
test-data/pipeline02
test-data/pipeline03
test-data/pipeline04
test-data/pipeline05
test-data/pipeline06
test-data/pipeline07
test-data/pipeline08
test-data/pipeline09
test-data/pipeline10
test-data/pipeline11
test-data/pipeline12
test-data/pipeline14
test-data/pipeline15
test-data/pipeline16
test-data/pipeline17
test-data/precision_recall_curve.txt
test-data/prp_model01
test-data/prp_model02
test-data/prp_model03
test-data/prp_model04
test-data/prp_model05
test-data/prp_model07
test-data/prp_model08
test-data/prp_model09
test-data/prp_result01
test-data/prp_result02
test-data/prp_result03
test-data/prp_result04
test-data/prp_result07
test-data/pw_metric02.tabular
test-data/qda_model01
test-data/qda_prediction_result01.tabular
test-data/ranking_.tabular
test-data/rfc_model01
test-data/rfc_result01
test-data/rfr_model01
test-data/rfr_result01
test-data/searchCV01
test-data/searchCV02
test-data/svc_model01
test-data/svc_model02
test-data/svc_model03
test-data/svc_prediction_result03.tabular
test-data/train_test_eval_model01
test-data/train_test_eval_weights01.h5
test-data/train_test_eval_weights02.h5
test-data/train_test_split_test02.tabular
test-data/train_test_split_train02.tabular
test-data/y_score.tabular
test-data/y_true.tabular
train_test_eval.py
train_test_split.py
added:
pdb70_cs219.ffdata
test-data/GridSearchCV01.h5mlm
test-data/LinearRegression01.h5mlm
test-data/RFE.h5mlm
test-data/RandomForestClassifier.h5mlm
test-data/RandomForestRegressor01.h5mlm
test-data/StackingCVRegressor01.h5mlm
test-data/StackingRegressor02.h5mlm
test-data/StackingVoting03.h5mlm
test-data/XGBRegressor01.h5mlm
test-data/best_estimator_.h5mlm
test-data/classifier_y.tabular
test-data/final_estimator.h5mlm
test-data/keras05.json
test-data/keras_batch_model05
test-data/pipeline18
test-data/searchCV03
test-data/train_test_eval_model02
test-data/unsafe_model.h5mlm
test-data/y_sorted.tabular
b
diff -r b1eda492f063 -r 118e230e85ce association_rules.py
--- a/association_rules.py Thu Aug 11 09:20:25 2022 +0000
+++ b/association_rules.py Wed Aug 09 13:40:06 2023 +0000
[
@@ -7,7 +7,16 @@
 from mlxtend.preprocessing import TransactionEncoder
 
 
-def main(inputs, infile, outfile, min_support=0.5, min_confidence=0.5, min_lift=1.0, min_conviction=1.0, max_length=None):
+def main(
+    inputs,
+    infile,
+    outfile,
+    min_support=0.5,
+    min_confidence=0.5,
+    min_lift=1.0,
+    min_conviction=1.0,
+    max_length=None,
+):
     """
     Parameter
     ---------
@@ -36,13 +45,13 @@
         Maximum length
 
     """
-    warnings.simplefilter('ignore')
+    warnings.simplefilter("ignore")
 
-    with open(inputs, 'r') as param_handler:
+    with open(inputs, "r") as param_handler:
         params = json.load(param_handler)
 
-    input_header = params['header0']
-    header = 'infer' if input_header else None
+    input_header = params["header0"]
+    header = "infer" if input_header else None
 
     with open(infile) as fp:
         lines = fp.read().splitlines()
@@ -65,41 +74,45 @@
 
     # Extract frequent itemsets for association rule mining
     # use_colnames: Use DataFrames' column names in the returned DataFrame instead of column indices
-    frequent_itemsets = fpgrowth(df, min_support=min_support, use_colnames=True, max_len=max_length)
+    frequent_itemsets = fpgrowth(
+        df, min_support=min_support, use_colnames=True, max_len=max_length
+    )
 
     # Get association rules, with confidence larger than min_confidence
-    rules = association_rules(frequent_itemsets, metric="confidence", min_threshold=min_confidence)
+    rules = association_rules(
+        frequent_itemsets, metric="confidence", min_threshold=min_confidence
+    )
 
     # Filter association rules, keeping rules with lift and conviction larger than min_liftand and min_conviction
-    rules = rules[(rules['lift'] >= min_lift) & (rules['conviction'] >= min_conviction)]
+    rules = rules[(rules["lift"] >= min_lift) & (rules["conviction"] >= min_conviction)]
 
     # Convert columns from frozenset to list (more readable)
-    rules['antecedents'] = rules['antecedents'].apply(list)
-    rules['consequents'] = rules['consequents'].apply(list)
+    rules["antecedents"] = rules["antecedents"].apply(list)
+    rules["consequents"] = rules["consequents"].apply(list)
 
     # The next 3 steps are intended to fix the order of the association
     # rules generated, so tests that rely on diff'ing a desired output
     # with an expected output can pass
 
     # 1) Sort entry in every row/column for columns 'antecedents' and 'consequents'
-    rules['antecedents'] = rules['antecedents'].apply(lambda row: sorted(row))
-    rules['consequents'] = rules['consequents'].apply(lambda row: sorted(row))
+    rules["antecedents"] = rules["antecedents"].apply(lambda row: sorted(row))
+    rules["consequents"] = rules["consequents"].apply(lambda row: sorted(row))
 
     # 2) Create two temporary string columns to sort on
-    rules['ant_str'] = rules['antecedents'].apply(lambda row: " ".join(row))
-    rules['con_str'] = rules['consequents'].apply(lambda row: " ".join(row))
+    rules["ant_str"] = rules["antecedents"].apply(lambda row: " ".join(row))
+    rules["con_str"] = rules["consequents"].apply(lambda row: " ".join(row))
 
     # 3) Sort results so they are re-producable
-    rules.sort_values(by=['ant_str', 'con_str'], inplace=True)
-    del rules['ant_str']
-    del rules['con_str']
+    rules.sort_values(by=["ant_str", "con_str"], inplace=True)
+    del rules["ant_str"]
+    del rules["con_str"]
     rules.reset_index(drop=True, inplace=True)
 
     # Write association rules and metrics to file
     rules.to_csv(outfile, sep="\t", index=False)
 
 
-if __name__ == '__main__':
+if __name__ == "__main__":
     aparser = argparse.ArgumentParser()
     aparser.add_argument("-i", "--inputs", dest="inputs", required=True)
     aparser.add_argument("-y", "--infile", dest="infile", required=True)
@@ -111,6 +124,13 @@
     aparser.add_argument("-t", "--length", dest="length", default=5)
     args = aparser.parse_args()
 
-    main(args.inputs, args.infile, args.outfile,
-         min_support=float(args.support), min_confidence=float(args.confidence),
-         min_lift=float(args.lift), min_conviction=float(args.conviction), max_length=int(args.length))
+    main(
+        args.inputs,
+        args.infile,
+        args.outfile,
+        min_support=float(args.support),
+        min_confidence=float(args.confidence),
+        min_lift=float(args.lift),
+        min_conviction=float(args.conviction),
+        max_length=int(args.length),
+    )
b
diff -r b1eda492f063 -r 118e230e85ce fitted_model_eval.py
--- a/fitted_model_eval.py Thu Aug 11 09:20:25 2022 +0000
+++ b/fitted_model_eval.py Wed Aug 09 13:40:06 2023 +0000
[
@@ -3,11 +3,11 @@
 import warnings
 
 import pandas as pd
-from galaxy_ml.utils import get_scoring, load_model, read_columns
+from galaxy_ml.model_persist import load_model_from_h5
+from galaxy_ml.utils import clean_params, get_scoring, read_columns
 from scipy.io import mmread
-from sklearn.metrics.scorer import _check_multimetric_scoring
+from sklearn.metrics._scorer import _check_multimetric_scoring
 from sklearn.model_selection._validation import _score
-from sklearn.pipeline import Pipeline
 
 
 def _get_X_y(params, infile1, infile2):
@@ -75,7 +75,12 @@
         loaded_df[df_key] = infile2
 
     y = read_columns(
-        infile2, c=c, c_option=column_option, sep="\t", header=header, parse_dates=True
+        infile2,
+        c=c,
+        c_option=column_option,
+        sep="\t",
+        header=header,
+        parse_dates=True,
     )
     if len(y.shape) == 2 and y.shape[1] == 1:
         y = y.ravel()
@@ -83,14 +88,7 @@
     return X, y
 
 
-def main(
-    inputs,
-    infile_estimator,
-    outfile_eval,
-    infile_weights=None,
-    infile1=None,
-    infile2=None,
-):
+def main(inputs, infile_estimator, outfile_eval, infile1=None, infile2=None):
     """
     Parameter
     ---------
@@ -103,9 +101,6 @@
     outfile_eval : str
         File path to save the evalulation results, tabular
 
-    infile_weights : str
-        File path to weights input
-
     infile1 : str
         File path to dataset containing features
 
@@ -120,40 +115,20 @@
     X_test, y_test = _get_X_y(params, infile1, infile2)
 
     # load model
-    with open(infile_estimator, "rb") as est_handler:
-        estimator = load_model(est_handler)
-
-    main_est = estimator
-    if isinstance(estimator, Pipeline):
-        main_est = estimator.steps[-1][-1]
-    if hasattr(main_est, "config") and hasattr(main_est, "load_weights"):
-        if not infile_weights or infile_weights == "None":
-            raise ValueError(
-                "The selected model skeleton asks for weights, "
-                "but no dataset for weights was provided!"
-            )
-        main_est.load_weights(infile_weights)
+    estimator = load_model_from_h5(infile_estimator)
+    estimator = clean_params(estimator)
 
     # handle scorer, convert to scorer dict
-    # Check if scoring is specified
     scoring = params["scoring"]
-    if scoring is not None:
-        # get_scoring() expects secondary_scoring to be a comma separated string (not a list)
-        # Check if secondary_scoring is specified
-        secondary_scoring = scoring.get("secondary_scoring", None)
-        if secondary_scoring is not None:
-            # If secondary_scoring is specified, convert the list into comman separated string
-            scoring["secondary_scoring"] = ",".join(scoring["secondary_scoring"])
-
     scorer = get_scoring(scoring)
-    scorer, _ = _check_multimetric_scoring(estimator, scoring=scorer)
+    if not isinstance(scorer, (dict, list)):
+        scorer = [scoring["primary_scoring"]]
+    scorer = _check_multimetric_scoring(estimator, scoring=scorer)
 
     if hasattr(estimator, "evaluate"):
-        scores = estimator.evaluate(
-            X_test, y_test=y_test, scorer=scorer, is_multimetric=True
-        )
+        scores = estimator.evaluate(X_test, y_test=y_test, scorer=scorer)
     else:
-        scores = _score(estimator, X_test, y_test, scorer, is_multimetric=True)
+        scores = _score(estimator, X_test, y_test, scorer)
 
     # handle output
     for name, score in scores.items():
@@ -167,7 +142,6 @@
     aparser = argparse.ArgumentParser()
     aparser.add_argument("-i", "--inputs", dest="inputs", required=True)
     aparser.add_argument("-e", "--infile_estimator", dest="infile_estimator")
-    aparser.add_argument("-w", "--infile_weights", dest="infile_weights")
     aparser.add_argument("-X", "--infile1", dest="infile1")
     aparser.add_argument("-y", "--infile2", dest="infile2")
     aparser.add_argument("-O", "--outfile_eval", dest="outfile_eval")
@@ -177,7 +151,6 @@
         args.inputs,
         args.infile_estimator,
         args.outfile_eval,
-        infile_weights=args.infile_weights,
         infile1=args.infile1,
         infile2=args.infile2,
     )
b
diff -r b1eda492f063 -r 118e230e85ce keras_deep_learning.py
--- a/keras_deep_learning.py Thu Aug 11 09:20:25 2022 +0000
+++ b/keras_deep_learning.py Wed Aug 09 13:40:06 2023 +0000
[
b'@@ -1,21 +1,19 @@\n import argparse\n import json\n-import pickle\n import warnings\n from ast import literal_eval\n \n-import keras\n-import pandas as pd\n import six\n-from galaxy_ml.utils import get_search_params, SafeEval, try_get_attr\n-from keras.models import Model, Sequential\n+from galaxy_ml.model_persist import dump_model_to_h5\n+from galaxy_ml.utils import SafeEval, try_get_attr\n+from tensorflow import keras\n+from tensorflow.keras.models import Model, Sequential\n \n safe_eval = SafeEval()\n \n \n def _handle_shape(literal):\n-    """\n-    Eval integer or list/tuple of integers from string\n+    """Eval integer or list/tuple of integers from string\n \n     Parameters:\n     -----------\n@@ -32,8 +30,7 @@\n \n \n def _handle_regularizer(literal):\n-    """\n-    Construct regularizer from string literal\n+    """Construct regularizer from string literal\n \n     Parameters\n     ----------\n@@ -57,8 +54,7 @@\n \n \n def _handle_constraint(config):\n-    """\n-    Construct constraint from galaxy tool parameters.\n+    """Construct constraint from galaxy tool parameters.\n     Suppose correct dictionary format\n \n     Parameters\n@@ -91,9 +87,7 @@\n \n \n def _handle_layer_parameters(params):\n-    """\n-    Access to handle all kinds of parameters\n-    """\n+    """Access to handle all kinds of parameters"""\n     for key, value in six.iteritems(params):\n         if value in ("None", ""):\n             params[key] = None\n@@ -104,28 +98,24 @@\n         ):\n             continue\n \n-        if (\n-            key\n-            in [\n-                "input_shape",\n-                "noise_shape",\n-                "shape",\n-                "batch_shape",\n-                "target_shape",\n-                "dims",\n-                "kernel_size",\n-                "strides",\n-                "dilation_rate",\n-                "output_padding",\n-                "cropping",\n-                "size",\n-                "padding",\n-                "pool_size",\n-                "axis",\n-                "shared_axes",\n-            ]\n-            and isinstance(value, str)\n-        ):\n+        if key in [\n+            "input_shape",\n+            "noise_shape",\n+            "shape",\n+            "batch_shape",\n+            "target_shape",\n+            "dims",\n+            "kernel_size",\n+            "strides",\n+            "dilation_rate",\n+            "output_padding",\n+            "cropping",\n+            "size",\n+            "padding",\n+            "pool_size",\n+            "axis",\n+            "shared_axes",\n+        ] and isinstance(value, str):\n             params[key] = _handle_shape(value)\n \n         elif key.endswith("_regularizer") and isinstance(value, dict):\n@@ -141,8 +131,7 @@\n \n \n def get_sequential_model(config):\n-    """\n-    Construct keras Sequential model from Galaxy tool parameters\n+    """Construct keras Sequential model from Galaxy tool parameters\n \n     Parameters:\n     -----------\n@@ -165,7 +154,7 @@\n             options.update(kwargs)\n \n         # add input_shape to the first layer only\n-        if not getattr(model, "_layers") and input_shape is not None:\n+        if not model.get_config()["layers"] and input_shape is not None:\n             options["input_shape"] = input_shape\n \n         model.add(klass(**options))\n@@ -174,8 +163,7 @@\n \n \n def get_functional_model(config):\n-    """\n-    Construct keras functional model from Galaxy tool parameters\n+    """Construct keras functional model from Galaxy tool parameters\n \n     Parameters\n     -----------\n@@ -221,8 +209,7 @@\n \n \n def get_batch_generator(config):\n-    """\n-    Construct keras online data generator from Galaxy tool parameters\n+    """Construct keras online data generator from Galaxy tool parameters\n \n     Parameters\n     -----------\n@@ -246,8 +233,7 @@\n \n \n def config_keras_model(inputs, outfile):\n-    """\n-    config keras model layers and output JSON\n+    """config keras model layers and output JSON\n \n     Parameters\n     ----------\n@@ -271,16 +257,8 @@\n         json.dump(json.loads(json_string), f, indent'..b'    batch_mode=False,\n-    outfile_params=None,\n-):\n-    """\n-    for `keras_model_builder` tool\n+def build_keras_model(inputs, outfile, model_json, batch_mode=False):\n+    """for `keras_model_builder` tool\n \n     Parameters\n     ----------\n@@ -290,12 +268,8 @@\n         Path to galaxy dataset containing the keras_galaxy model output.\n     model_json : str\n         Path to dataset containing keras model JSON.\n-    infile_weights : str or None\n-        If string, path to dataset containing model weights.\n     batch_mode : bool, default=False\n         Whether to build online batch classifier.\n-    outfile_params : str, default=None\n-        File path to search parameters output.\n     """\n     with open(model_json, "r") as f:\n         json_model = json.load(f)\n@@ -307,7 +281,7 @@\n     if json_model["class_name"] == "Sequential":\n         options["model_type"] = "sequential"\n         klass = Sequential\n-    elif json_model["class_name"] == "Model":\n+    elif json_model["class_name"] == "Functional":\n         options["model_type"] = "functional"\n         klass = Model\n     else:\n@@ -315,8 +289,9 @@\n \n     # load prefitted model\n     if inputs["mode_selection"]["mode_type"] == "prefitted":\n-        estimator = klass.from_config(config)\n-        estimator.load_weights(infile_weights)\n+        # estimator = klass.from_config(config)\n+        # estimator.load_weights(infile_weights)\n+        raise Exception("Prefitted was deprecated!")\n     # build train model\n     else:\n         cls_name = inputs["mode_selection"]["learning_type"]\n@@ -338,8 +313,10 @@\n         )\n \n         train_metrics = inputs["mode_selection"]["compile_params"]["metrics"]\n+        if not isinstance(train_metrics, list):  # for older galaxy\n+            train_metrics = train_metrics.split(",")\n         if train_metrics[-1] == "none":\n-            train_metrics = train_metrics[:-1]\n+            train_metrics.pop()\n         options["metrics"] = train_metrics\n \n         options.update(inputs["mode_selection"]["fit_params"])\n@@ -355,19 +332,10 @@\n                 "class_positive_factor"\n             ]\n         estimator = klass(config, **options)\n-        if outfile_params:\n-            hyper_params = get_search_params(estimator)\n-            # TODO: remove this after making `verbose` tunable\n-            for h_param in hyper_params:\n-                if h_param[1].endswith("verbose"):\n-                    h_param[0] = "@"\n-            df = pd.DataFrame(hyper_params, columns=["", "Parameter", "Value"])\n-            df.to_csv(outfile_params, sep="\\t", index=False)\n \n     print(repr(estimator))\n-    # save model by pickle\n-    with open(outfile, "wb") as f:\n-        pickle.dump(estimator, f, pickle.HIGHEST_PROTOCOL)\n+    # save model\n+    dump_model_to_h5(estimator, outfile, verbose=1)\n \n \n if __name__ == "__main__":\n@@ -377,9 +345,7 @@\n     aparser.add_argument("-i", "--inputs", dest="inputs", required=True)\n     aparser.add_argument("-m", "--model_json", dest="model_json")\n     aparser.add_argument("-t", "--tool_id", dest="tool_id")\n-    aparser.add_argument("-w", "--infile_weights", dest="infile_weights")\n     aparser.add_argument("-o", "--outfile", dest="outfile")\n-    aparser.add_argument("-p", "--outfile_params", dest="outfile_params")\n     args = aparser.parse_args()\n \n     input_json_path = args.inputs\n@@ -388,9 +354,7 @@\n \n     tool_id = args.tool_id\n     outfile = args.outfile\n-    outfile_params = args.outfile_params\n     model_json = args.model_json\n-    infile_weights = args.infile_weights\n \n     # for keras_model_config tool\n     if tool_id == "keras_model_config":\n@@ -403,10 +367,5 @@\n             batch_mode = True\n \n         build_keras_model(\n-            inputs=inputs,\n-            model_json=model_json,\n-            infile_weights=infile_weights,\n-            batch_mode=batch_mode,\n-            outfile=outfile,\n-            outfile_params=outfile_params,\n+            inputs=inputs, model_json=model_json, batch_mode=batch_mode, outfile=outfile\n         )\n'
b
diff -r b1eda492f063 -r 118e230e85ce keras_macros.xml
--- a/keras_macros.xml Thu Aug 11 09:20:25 2022 +0000
+++ b/keras_macros.xml Wed Aug 09 13:40:06 2023 +0000
[
b'@@ -1,26 +1,27 @@\n <macros>\n-  <token name="@KERAS_VERSION@">0.5.0</token>\n+  <token name="@KERAS_VERSION@">2.10.0</token>\n \n   <xml name="macro_stdio">\n     <stdio>\n-        <exit_code range="1:" level="fatal" description="Error occurred. Please check Tool Standard Error"/>\n+        <exit_code range="1:" level="fatal" description="Error occurred. Please check Tool Standard Error" />\n     </stdio>\n   </xml>\n \n-  <xml name="keras_optimizer_common" token_lr="0.01">\n+  <xml name="keras_optimizer_common" token_learning_rate="0.01">\n     <section name="optimizer_options" title="Optimizer Advanced Options" expanded="false">\n-      <param argument="lr" type="float" value="@LR@" optional="true" label="Learning rate" help="float >= 0"/>\n-      <yield/>\n+      <param argument="learning_rate" type="float" value="@LEARNING_RATE@" optional="true" label="Learning rate" help="float >= 0" />\n+      <yield />\n       <!--param argument="clipnorm" type="float" value="" optional="true" label="clipnorm" help="float >= 0"/-->\n       <!--param argument="clipvalue" type="float" value="" optional="true" label="clipvalue" help="float >= 0"/-->\n     </section>\n   </xml>\n \n-  <xml name="keras_optimizer_common_more" token_lr="0.001">\n-    <expand macro="keras_optimizer_common" lr="@LR@">\n-      <!--param argument="epsilon" type="float" value="" label="epsilon" optional="true" help="Fuzz factor. If `None`, defaults to `K.epsilon()`"/>-->\n-      <param argument="decay" type="float" value="0" optional="true" label="decay" help="Learning rate decay over each update."/>\n-      <yield/>\n+  <xml name="keras_optimizer_common_more" token_learning_rate="0.001">\n+    <expand macro="keras_optimizer_common" learning_rate="@LEARNING_RATE@">\n+      <param argument="beta_1" type="float" value="0.9" optional="true" label="beta_1" help="Float. The exponential decay rate for the 1st moment estimates." />\n+      <param argument="beta_2" type="float" value="0.999" optional="true" label="beta_2" help="Float. The exponential decay rate for the exponentially weighted infinity norm." />\n+      <param argument="epsilon" type="float" value="1e-7" label="epsilon" optional="true" help="A small constant for numerical stability." />\n+      <yield />\n     </expand>\n   </xml>\n \n@@ -61,17 +62,17 @@\n \n   <xml name="keras_regularizers" token_argument="kernel_regularizer">\n     <param argument="@ARGUMENT@" type="text" value="(0. , 0.)" optional="true" label="@ARGUMENT@"\n-            help="(l1, l2). l1/l2: float; L1/l2 regularization factor. (0., 0.) is equivalent to `None`"/>\n+            help="(l1, l2). l1/l2: float; L1/l2 regularization factor. (0., 0.) is equivalent to `None`" />\n   </xml>\n \n   <xml name="keras_constraints_options">\n     <section name="constraint_options" title="Constraint Advanced Options" expanded="false">\n-      <yield/>\n+      <yield />\n       <param argument="axis" type="text" value="0" help="Integer or list of integers. axis along which to calculate weight norms">\n         <sanitizer>\n           <valid initial="default">\n-            <add value="["/>\n-            <add value="]"/>\n+            <add value="[" />\n+            <add value="]" />\n           </valid>\n         </sanitizer>\n       </param>\n@@ -87,22 +88,22 @@\n         <option value="UnitNorm">unitnorm / unit_norm / UnitNorm</option>\n         <option value="MinMaxNorm">min_max_norm / MinMaxNorm</option>\n       </param>\n-      <when value="None"/>\n+      <when value="None" />\n       <when value="MaxNorm">\n         <expand macro="keras_constraints_options">\n-          <param argument="max_value" type="float" value="2" help="the maximum norm for the incoming weights."/>\n+          <param argument="max_value" type="float" value="2" help="the maximum norm for the incoming weights." />\n         </expand>\n       </when>\n       <when value="NonNeg">\n       </when>\n       <when value="UnitNorm">\n-        <expand macro="keras_constraints_options"/>\n+        <expand macro="keras_constraints_options" />\n       </when>\n '..b'_optimizer_common_more" learning_rate="0.001" />\n         </when>\n         <when value="Nadam">\n-          <expand macro="keras_optimizer_common" lr="0.002">\n-            <param argument="beta_1" type="float" value="0.9" optional="true" label="beta_1" help="float, 0 &lt; beta &lt; 1. Generally close to 1."/>\n-            <param argument="beta_2" type="float" value="0.999" optional="true" label="beta_2" help="float, 0 &lt; beta &lt; 1. Generally close to 1."/>\n-            <!--param argument="epsilon" type="float" value="" label="epsilon" optional="true" help="Fuzz factor. If `None`, defaults to `K.epsilon()`"/>-->\n-            <param argument="schedule_decay" type="float" value="0.004" optional="true" label="schedule_decay" help="float, 0 &lt; beta &lt; 1."/>\n+          <expand macro="keras_optimizer_common_more" learning_rate="0.001" />\n+        </when>\n+        <when value="Ftrl">\n+          <expand macro="keras_optimizer_common" learning_rate="0.001">\n+            <param argument="learning_rate_power" type="float" value="-0.5" optional="true" label="learning_rate_power" help="Float, &gt;= 0." />\n+            <param argument="initial_accumulator_value" type="float" value="0.1" optional="true" label="initial_accumulator_value" help="Float, &lt;= 0." />\n+            <param argument="l1_regularization_strength" type="float" value="0." optional="true" label="l1_regularization_strength" help="Float, &gt;= 0." />\n+            <param argument="l2_regularization_strength" type="float" value="0." optional="true" label="l2_regularization_strength" help="Float, &gt;= 0." />\n+            <param argument="l2_shrinkage_regularization_strength" type="float" value="0." optional="true" label="l2_shrinkage_regularization_strength" help="Float, &gt;= 0." />\n+            <param argument="beta" type="float" value="0." optional="true" label="beta" help="Float." />\n           </expand>\n         </when>\n       </conditional>\n@@ -914,13 +923,15 @@\n \n   <xml name="keras_fit_params_section">\n     <section name="fit_params" title="Fit Parameters" expanded="true">\n-      <param name="epochs" type="integer" value="1" min="1" label="epochs"/>\n-      <param name="batch_size" type="integer" value="32" optional="true" label="batch_size" help="Integer or blank for 32"/>\n-      <param name="steps_per_epoch" type="integer" value="" optional="true" label="steps_per_epoch" help="The number of steps (batches of samples) before declaring one epoch finished and starting the next epoch. The default None is equal to the number of samples in your dataset divided by the batch size, or 1 if that cannot be determined."/>\n+      <param name="epochs" type="integer" value="1" min="1" label="epochs" />\n+      <param name="batch_size" type="integer" value="32" optional="true" label="batch_size" help="Integer or blank for 32" />\n+      <param name="steps_per_epoch" type="integer" value="" optional="true" label="steps_per_epoch" help="The number of steps (batches of samples) before declaring one epoch finished and starting the next epoch. The default None is equal to the number of samples in your dataset divided by the batch size, or 1 if that cannot be determined." />\n+      <yield />\n       <param name="validation_steps" type="integer" value="" optional="true" label="validation_steps" help="Default None. Total number of steps (batches of samples) to validate before stopping." />\n       <!--`validation_freq` will be available in next keras version-->\n       <!--param name="validation_freq" type="integer" value="1" optional="true" label="validation_freq" help="Integer only at current moment. If an integer, specifies how many training epochs to run before a new validation run is performed."/-->\n-      <expand macro="keras_callbacks"/>\n+      <expand macro="keras_callbacks" />\n+      <param name="verbose" type="integer" value="1" optional="true" label="verbose" help="0, 1, or 2. Verbosity mode. 0 = silent, 1 = progress bar, 2 = one line per epoch." />\n     </section>\n   </xml>\n \n'
b
diff -r b1eda492f063 -r 118e230e85ce keras_train_and_eval.py
--- a/keras_train_and_eval.py Thu Aug 11 09:20:25 2022 +0000
+++ b/keras_train_and_eval.py Wed Aug 09 13:40:06 2023 +0000
[
b'@@ -1,34 +1,43 @@\n import argparse\n import json\n import os\n-import pickle\n import warnings\n from itertools import chain\n \n import joblib\n import numpy as np\n import pandas as pd\n-from galaxy_ml.externals.selene_sdk.utils import compute_score\n-from galaxy_ml.keras_galaxy_models import _predict_generator\n+from galaxy_ml.keras_galaxy_models import (\n+    _predict_generator,\n+    KerasGBatchClassifier,\n+)\n+from galaxy_ml.model_persist import dump_model_to_h5, load_model_from_h5\n from galaxy_ml.model_validations import train_test_split\n-from galaxy_ml.utils import (clean_params, get_main_estimator,\n-                             get_module, get_scoring, load_model, read_columns,\n-                             SafeEval, try_get_attr)\n+from galaxy_ml.utils import (\n+    clean_params,\n+    gen_compute_scores,\n+    get_main_estimator,\n+    get_module,\n+    get_scoring,\n+    read_columns,\n+    SafeEval\n+)\n from scipy.io import mmread\n-from sklearn.metrics.scorer import _check_multimetric_scoring\n-from sklearn.model_selection import _search, _validation\n+from sklearn.metrics._scorer import _check_multimetric_scoring\n from sklearn.model_selection._validation import _score\n-from sklearn.pipeline import Pipeline\n-from sklearn.utils import indexable, safe_indexing\n-\n-_fit_and_score = try_get_attr("galaxy_ml.model_validations", "_fit_and_score")\n-setattr(_search, "_fit_and_score", _fit_and_score)\n-setattr(_validation, "_fit_and_score", _fit_and_score)\n+from sklearn.utils import _safe_indexing, indexable\n \n N_JOBS = int(os.environ.get("GALAXY_SLOTS", 1))\n CACHE_DIR = os.path.join(os.getcwd(), "cached")\n-del os\n-NON_SEARCHABLE = ("n_jobs", "pre_dispatch", "memory", "_path", "nthread", "callbacks")\n+NON_SEARCHABLE = (\n+    "n_jobs",\n+    "pre_dispatch",\n+    "memory",\n+    "_path",\n+    "_dir",\n+    "nthread",\n+    "callbacks",\n+)\n ALLOWED_CALLBACKS = (\n     "EarlyStopping",\n     "TerminateOnNaN",\n@@ -96,7 +105,7 @@\n         train = index_arr[~np.isin(groups, group_names)]\n         rval = list(\n             chain.from_iterable(\n-                (safe_indexing(a, train), safe_indexing(a, test)) for a in new_arrays\n+                (_safe_indexing(a, train), _safe_indexing(a, test)) for a in new_arrays\n             )\n         )\n     else:\n@@ -108,68 +117,69 @@\n     return rval\n \n \n-def _evaluate(y_true, pred_probas, scorer, is_multimetric=True):\n-    """output scores based on input scorer\n+def _evaluate_keras_and_sklearn_scores(\n+    estimator,\n+    data_generator,\n+    X,\n+    y=None,\n+    sk_scoring=None,\n+    steps=None,\n+    batch_size=32,\n+    return_predictions=False,\n+):\n+    """output scores for bother keras and sklearn metrics\n \n     Parameters\n-    ----------\n-    y_true : array\n-        True label or target values\n-    pred_probas : array\n-        Prediction values, probability for classification problem\n-    scorer : dict\n-        dict of `sklearn.metrics.scorer.SCORER`\n-    is_multimetric : bool, default is True\n+    -----------\n+    estimator : object\n+        Fitted `galaxy_ml.keras_galaxy_models.KerasGBatchClassifier`.\n+    data_generator : object\n+        From `galaxy_ml.preprocessors.ImageDataFrameBatchGenerator`.\n+    X : 2-D array\n+        Contains indecies of images that need to be evaluated.\n+    y : None\n+        Target value.\n+    sk_scoring : dict\n+        Galaxy tool input parameters.\n+    steps : integer or None\n+        Evaluation/prediction steps before stop.\n+    batch_size : integer\n+        Number of samples in a batch\n+    return_predictions : bool, default is False\n+        Whether to return predictions and true labels.\n     """\n-    if y_true.ndim == 1 or y_true.shape[-1] == 1:\n-        pred_probas = pred_probas.ravel()\n-        pred_labels = (pred_probas > 0.5).astype("int32")\n-        targets = y_true.ravel().astype("int32")\n-        if not is_multimetric:\n-            preds = (\n-                pred_labels\n-                if scorer.__class__.__name__ == "_PredictScorer"\n-                else pred_probas\n-     '..b'   else:\n@@ -455,25 +454,46 @@\n     else:\n         estimator.fit(X_train, y_train)\n \n-    if hasattr(estimator, "evaluate"):\n+    if isinstance(estimator, KerasGBatchClassifier):\n+        scores = {}\n         steps = estimator.prediction_steps\n         batch_size = estimator.batch_size\n-        generator = estimator.data_generator_.flow(\n-            X_test, y=y_test, batch_size=batch_size\n+        data_generator = estimator.data_generator_\n+\n+        scores, predictions, y_true = _evaluate_keras_and_sklearn_scores(\n+            estimator,\n+            data_generator,\n+            X_test,\n+            y=y_test,\n+            sk_scoring=scoring,\n+            steps=steps,\n+            batch_size=batch_size,\n+            return_predictions=bool(outfile_y_true),\n         )\n-        predictions, y_true = _predict_generator(\n-            estimator.model_, generator, steps=steps\n-        )\n-        scores = _evaluate(y_true, predictions, scorer, is_multimetric=True)\n \n     else:\n+        scores = {}\n+        if hasattr(estimator, "model_") and hasattr(estimator.model_, "metrics_names"):\n+            batch_size = estimator.batch_size\n+            score_results = estimator.model_.evaluate(\n+                X_test, y=y_test, batch_size=batch_size, verbose=0\n+            )\n+            metrics_names = estimator.model_.metrics_names\n+            if not isinstance(metrics_names, list):\n+                scores[metrics_names] = score_results\n+            else:\n+                scores = dict(zip(metrics_names, score_results))\n+\n         if hasattr(estimator, "predict_proba"):\n             predictions = estimator.predict_proba(X_test)\n         else:\n             predictions = estimator.predict(X_test)\n \n         y_true = y_test\n-        scores = _score(estimator, X_test, y_test, scorer, is_multimetric=True)\n+        sk_scores = _score(estimator, X_test, y_test, scorer)\n+        scores.update(sk_scores)\n+\n+    # handle output\n     if outfile_y_true:\n         try:\n             pd.DataFrame(y_true).to_csv(outfile_y_true, sep="\\t", index=False)\n@@ -486,7 +506,6 @@\n             )\n         except Exception as e:\n             print("Error in saving predictions: %s" % e)\n-\n     # handle output\n     for name, score in scores.items():\n         scores[name] = [score]\n@@ -497,23 +516,7 @@\n     memory.clear(warn=False)\n \n     if outfile_object:\n-        main_est = estimator\n-        if isinstance(estimator, Pipeline):\n-            main_est = estimator.steps[-1][-1]\n-\n-        if hasattr(main_est, "model_") and hasattr(main_est, "save_weights"):\n-            if outfile_weights:\n-                main_est.save_weights(outfile_weights)\n-            del main_est.model_\n-            del main_est.fit_params\n-            del main_est.model_class_\n-            if getattr(main_est, "validation_data", None):\n-                del main_est.validation_data\n-            if getattr(main_est, "data_generator_", None):\n-                del main_est.data_generator_\n-\n-        with open(outfile_object, "wb") as output_handler:\n-            pickle.dump(estimator, output_handler, pickle.HIGHEST_PROTOCOL)\n+        dump_model_to_h5(estimator, outfile_object)\n \n \n if __name__ == "__main__":\n@@ -524,7 +527,6 @@\n     aparser.add_argument("-y", "--infile2", dest="infile2")\n     aparser.add_argument("-O", "--outfile_result", dest="outfile_result")\n     aparser.add_argument("-o", "--outfile_object", dest="outfile_object")\n-    aparser.add_argument("-w", "--outfile_weights", dest="outfile_weights")\n     aparser.add_argument("-l", "--outfile_y_true", dest="outfile_y_true")\n     aparser.add_argument("-p", "--outfile_y_preds", dest="outfile_y_preds")\n     aparser.add_argument("-g", "--groups", dest="groups")\n@@ -541,7 +543,6 @@\n         args.infile2,\n         args.outfile_result,\n         outfile_object=args.outfile_object,\n-        outfile_weights=args.outfile_weights,\n         outfile_y_true=args.outfile_y_true,\n         outfile_y_preds=args.outfile_y_preds,\n         groups=args.groups,\n'
b
diff -r b1eda492f063 -r 118e230e85ce label_encoder.py
--- a/label_encoder.py Thu Aug 11 09:20:25 2022 +0000
+++ b/label_encoder.py Wed Aug 09 13:40:06 2023 +0000
[
@@ -21,24 +21,24 @@
         File path to output vector
 
     """
-    warnings.simplefilter('ignore')
+    warnings.simplefilter("ignore")
 
-    with open(inputs, 'r') as param_handler:
+    with open(inputs, "r") as param_handler:
         params = json.load(param_handler)
 
-    input_header = params['header0']
-    header = 'infer' if input_header else None
+    input_header = params["header0"]
+    header = "infer" if input_header else None
 
-    input_vector = pd.read_csv(infile, sep='\t', header=header)
+    input_vector = pd.read_csv(infile, sep="\t", header=header)
 
     le = LabelEncoder()
 
     output_vector = le.fit_transform(input_vector)
 
-    np.savetxt(outfile, output_vector, fmt="%d", delimiter='\t')
+    np.savetxt(outfile, output_vector, fmt="%d", delimiter="\t")
 
 
-if __name__ == '__main__':
+if __name__ == "__main__":
     aparser = argparse.ArgumentParser()
     aparser.add_argument("-i", "--inputs", dest="inputs", required=True)
     aparser.add_argument("-y", "--infile", dest="infile")
b
diff -r b1eda492f063 -r 118e230e85ce main_macros.xml
--- a/main_macros.xml Thu Aug 11 09:20:25 2022 +0000
+++ b/main_macros.xml Wed Aug 09 13:40:06 2023 +0000
[
b'@@ -1,225 +1,228 @@\n <macros>\n-    <token name="@VERSION@">1.0.8.4</token>\n+    <token name="@VERSION@">1.0.10.0</token>\n+    <token name="@PROFILE@">21.05</token>\n \n     <xml name="python_requirements">\n         <requirements>\n-            <requirement type="package" version="0.8.3">Galaxy-ML</requirement>\n+            <requirement type="package" version="3.9">python</requirement>\n+            <requirement type="package" version="0.10.0">galaxy-ml</requirement>\n             <yield />\n         </requirements>\n     </xml>\n \n     <xml name="macro_stdio">\n-        <stdio>\n-            <exit_code range="1:" level="fatal" description="Error occurred. Please check Tool Standard Error" />\n-        </stdio>\n-    </xml>\n-\n-\n+    <stdio>\n+      <exit_code range=":-1" level="fatal" description="Error occurred. Please check Tool Standard Error" />\n+      <exit_code range="137" level="fatal_oom" description="Out of Memory" />\n+      <exit_code range="1:" level="fatal" description="Error occurred. Please check Tool Standard Error" />\n+    </stdio>\n+  </xml>\n+  \n     <!--Generic interface-->\n \n-    <xml name="sl_Conditional" token_train="tabular" token_data="tabular" token_model="txt">\n-        <conditional name="selected_tasks">\n-            <param name="selected_task" type="select" label="Select a Classification Task">\n-                <option value="train" selected="true">Train a model</option>\n-                <option value="load">Load a model and predict</option>\n-            </param>\n-            <when value="load">\n-                <param name="infile_model" type="data" format="@MODEL@" label="Models" help="Select a model file." />\n-                <param name="infile_data" type="data" format="@DATA@" label="Data (tabular)" help="Select the dataset you want to classify." />\n-                <param name="header" type="boolean" optional="True" truevalue="booltrue" falsevalue="boolfalse" checked="False" label="Does the dataset contain header:" />\n-                <conditional name="prediction_options">\n-                    <param name="prediction_option" type="select" label="Select the type of prediction">\n-                        <option value="predict">Predict class labels</option>\n-                        <option value="advanced">Include advanced options</option>\n-                    </param>\n-                    <when value="predict">\n-                    </when>\n-                    <when value="advanced">\n-                    </when>\n-                </conditional>\n-            </when>\n-            <when value="train">\n-                <conditional name="selected_algorithms">\n-                    <yield />\n-                </conditional>\n-            </when>\n+  <xml name="sl_Conditional" token_train="tabular" token_data="tabular" token_model="txt">\n+    <conditional name="selected_tasks">\n+      <param name="selected_task" type="select" label="Select a Classification Task">\n+        <option value="train" selected="true">Train a model</option>\n+        <option value="load">Load a model and predict</option>\n+      </param>\n+      <when value="load">\n+        <param name="infile_model" type="data" format="@MODEL@" label="Models" help="Select a model file." />\n+        <param name="infile_data" type="data" format="@DATA@" label="Data (tabular)" help="Select the dataset you want to classify." />\n+        <param name="header" type="boolean" optional="True" truevalue="booltrue" falsevalue="boolfalse" checked="False" label="Does the dataset contain header:" />\n+        <conditional name="prediction_options">\n+          <param name="prediction_option" type="select" label="Select the type of prediction">\n+            <option value="predict">Predict class labels</option>\n+            <option value="advanced">Include advanced options</option>\n+          </param>\n+          <when value="predict">\n+          </when>\n+          <when value="advanced">\n+          </when>\n         </conditional>\n-    </xml>\n+      </when>\n+      <when value="train'..b'               Cournapeau, D. and Brucher, M. and Perrot, M. and Duchesnay, E.},\n+          journal={Journal of Machine Learning Research},\n+          volume={12},\n+          pages={2825--2830},\n+          year={2011}\n         }\n-            </citation>\n-        </citations>\n-    </xml>\n+      </citation>\n+      <yield />\n+    </citations>\n+  </xml>\n \n-    <xml name="skrebate_citation">\n-        <citation type="bibtex">\n+  <xml name="scipy_citation">\n+    <citations>\n+      <citation type="bibtex">\n+        @Misc{,\n+        author =    {Eric Jones and Travis Oliphant and Pearu Peterson and others},\n+        title =     {{SciPy}: Open source scientific tools for {Python}},\n+        year =      {2001--},\n+        url = "http://www.scipy.org/",\n+        note = {[Online; accessed 2016-04-09]}\n+      }\n+      </citation>\n+    </citations>\n+  </xml>\n+\n+  <xml name="skrebate_citation">\n+    <citation type="bibtex">\n       @article{DBLP:journals/corr/abs-1711-08477,\n         author    = {Ryan J. Urbanowicz and\n                     Randal S. Olson and\n@@ -1973,33 +1953,33 @@\n         biburl    = {https://dblp.org/rec/bib/journals/corr/abs-1711-08477},\n         bibsource = {dblp computer science bibliography, https://dblp.org}\n       }\n-        </citation>\n-    </xml>\n+    </citation>\n+  </xml>\n \n-    <xml name="xgboost_citation">\n-        <citation type="bibtex">\n+  <xml name="xgboost_citation">\n+    <citation type="bibtex">\n       @inproceedings{Chen:2016:XST:2939672.2939785,\n-        author    = {Chen, Tianqi and Guestrin, Carlos},\n-        title     = {{XGBoost}: A Scalable Tree Boosting System},\n+        author = {Chen, Tianqi and Guestrin, Carlos},\n+        title = {{XGBoost}: A Scalable Tree Boosting System},\n         booktitle = {Proceedings of the 22nd ACM SIGKDD International Conference on Knowledge Discovery and Data Mining},\n-        series    = {KDD \'16},\n-        year      = {2016},\n-        isbn      = {978-1-4503-4232-2},\n-        location  = {San Francisco, California, USA},\n-        pages     = {785--794},\n-        numpages  = {10},\n-        url       = {http://doi.acm.org/10.1145/2939672.2939785},\n-        doi       = {10.1145/2939672.2939785},\n-        acmid     = {2939785},\n+        series = {KDD \'16},\n+        year = {2016},\n+        isbn = {978-1-4503-4232-2},\n+        location = {San Francisco, California, USA},\n+        pages = {785--794},\n+        numpages = {10},\n+        url = {http://doi.acm.org/10.1145/2939672.2939785},\n+        doi = {10.1145/2939672.2939785},\n+        acmid = {2939785},\n         publisher = {ACM},\n-        address   = {New York, NY, USA},\n-        keywords  = {large-scale machine learning},\n+        address = {New York, NY, USA},\n+        keywords = {large-scale machine learning},\n       }\n-        </citation>\n-    </xml>\n+    </citation>\n+  </xml>\n \n-    <xml name="imblearn_citation">\n-        <citation type="bibtex">\n+  <xml name="imblearn_citation">\n+    <citation type="bibtex">\n       @article{JMLR:v18:16-365,\n         author  = {Guillaume  Lema{{\\^i}}tre and Fernando Nogueira and Christos K. Aridas},\n         title   = {Imbalanced-learn: A Python Toolbox to Tackle the Curse of Imbalanced Datasets in Machine Learning},\n@@ -2010,11 +1990,22 @@\n         pages   = {1-5},\n         url     = {http://jmlr.org/papers/v18/16-365.html}\n       }\n-        </citation>\n-    </xml>\n+    </citation>\n+  </xml>\n \n-    <xml name="selene_citation">\n-        <citation type="doi">10.1038/s41592-019-0360-8</citation>\n-    </xml>\n+  <xml name="selene_citation">\n+    <citation type="bibtex">\n+      @article{chen2019selene,\n+        title={Selene: a PyTorch-based deep learning library for sequence data},\n+        author={Chen, Kathleen M and Cofer, Evan M and Zhou, Jian and Troyanskaya, Olga G},\n+        journal={Nature methods},\n+        volume={16},\n+        number={4},\n+        pages={315},\n+        year={2019},\n+        publisher={Nature Publishing Group}\n+      }\n+    </citation>\n+  </xml>\n \n </macros>\n'
b
diff -r b1eda492f063 -r 118e230e85ce ml_visualization_ex.py
--- a/ml_visualization_ex.py Thu Aug 11 09:20:25 2022 +0000
+++ b/ml_visualization_ex.py Wed Aug 09 13:40:06 2023 +0000
[
@@ -9,13 +9,18 @@
 import pandas as pd
 import plotly
 import plotly.graph_objs as go
-from galaxy_ml.utils import load_model, read_columns, SafeEval
-from keras.models import model_from_json
-from keras.utils import plot_model
-from sklearn.feature_selection.base import SelectorMixin
-from sklearn.metrics import (auc, average_precision_score, confusion_matrix,
-                             precision_recall_curve, roc_curve)
+from galaxy_ml.model_persist import load_model_from_h5
+from galaxy_ml.utils import read_columns, SafeEval
+from sklearn.feature_selection._base import SelectorMixin
+from sklearn.metrics import (
+    auc,
+    average_precision_score,
+    precision_recall_curve,
+    roc_curve,
+)
 from sklearn.pipeline import Pipeline
+from tensorflow.keras.models import model_from_json
+from tensorflow.keras.utils import plot_model
 
 safe_eval = SafeEval()
 
@@ -253,30 +258,6 @@
     os.rename(os.path.join(folder, "output.svg"), os.path.join(folder, "output"))
 
 
-def get_dataframe(file_path, plot_selection, header_name, column_name):
-    header = "infer" if plot_selection[header_name] else None
-    column_option = plot_selection[column_name]["selected_column_selector_option"]
-    if column_option in [
-        "by_index_number",
-        "all_but_by_index_number",
-        "by_header_name",
-        "all_but_by_header_name",
-    ]:
-        col = plot_selection[column_name]["col1"]
-    else:
-        col = None
-    _, input_df = read_columns(
-        file_path,
-        c=col,
-        c_option=column_option,
-        return_df=True,
-        sep="\t",
-        header=header,
-        parse_dates=True,
-    )
-    return input_df
-
-
 def main(
     inputs,
     infile_estimator=None,
@@ -290,10 +271,6 @@
     targets=None,
     fasta_path=None,
     model_config=None,
-    true_labels=None,
-    predicted_labels=None,
-    plot_color=None,
-    title=None,
 ):
     """
     Parameter
@@ -334,18 +311,6 @@
 
     model_config : str, default is None
         File path to dataset containing JSON config for neural networks
-
-    true_labels : str, default is None
-        File path to dataset containing true labels
-
-    predicted_labels : str, default is None
-        File path to dataset containing true predicted labels
-
-    plot_color : str, default is None
-        Color of the confusion matrix heatmap
-
-    title : str, default is None
-        Title of the confusion matrix heatmap
     """
     warnings.simplefilter("ignore")
 
@@ -357,8 +322,7 @@
     plot_format = params["plotting_selection"]["plot_format"]
 
     if plot_type == "feature_importances":
-        with open(infile_estimator, "rb") as estimator_handler:
-            estimator = load_model(estimator_handler)
+        estimator = load_model_from_h5(infile_estimator)
 
         column_option = params["plotting_selection"]["column_selector_options"][
             "selected_column_selector_option"
@@ -570,36 +534,6 @@
 
         return 0
 
-    elif plot_type == "classification_confusion_matrix":
-        plot_selection = params["plotting_selection"]
-        input_true = get_dataframe(
-            true_labels, plot_selection, "header_true", "column_selector_options_true"
-        )
-        header_predicted = "infer" if plot_selection["header_predicted"] else None
-        input_predicted = pd.read_csv(
-            predicted_labels, sep="\t", parse_dates=True, header=header_predicted
-        )
-        true_classes = input_true.iloc[:, -1].copy()
-        predicted_classes = input_predicted.iloc[:, -1].copy()
-        axis_labels = list(set(true_classes))
-        c_matrix = confusion_matrix(true_classes, predicted_classes)
-        fig, ax = plt.subplots(figsize=(7, 7))
-        im = plt.imshow(c_matrix, cmap=plot_color)
-        for i in range(len(c_matrix)):
-            for j in range(len(c_matrix)):
-                ax.text(j, i, c_matrix[i, j], ha="center", va="center", color="k")
-        ax.set_ylabel("True class labels")
-        ax.set_xlabel("Predicted class labels")
-        ax.set_title(title)
-        ax.set_xticks(axis_labels)
-        ax.set_yticks(axis_labels)
-        fig.colorbar(im, ax=ax)
-        fig.tight_layout()
-        plt.savefig("output.png", dpi=125)
-        os.rename("output.png", "output")
-
-        return 0
-
     # save pdf file to disk
     # fig.write_image("image.pdf", format='pdf')
     # fig.write_image("image.pdf", format='pdf', width=340*2, height=226*2)
@@ -619,10 +553,6 @@
     aparser.add_argument("-t", "--targets", dest="targets")
     aparser.add_argument("-f", "--fasta_path", dest="fasta_path")
     aparser.add_argument("-c", "--model_config", dest="model_config")
-    aparser.add_argument("-tl", "--true_labels", dest="true_labels")
-    aparser.add_argument("-pl", "--predicted_labels", dest="predicted_labels")
-    aparser.add_argument("-pc", "--plot_color", dest="plot_color")
-    aparser.add_argument("-pt", "--title", dest="title")
     args = aparser.parse_args()
 
     main(
@@ -638,8 +568,4 @@
         targets=args.targets,
         fasta_path=args.fasta_path,
         model_config=args.model_config,
-        true_labels=args.true_labels,
-        predicted_labels=args.predicted_labels,
-        plot_color=args.plot_color,
-        title=args.title,
     )
b
diff -r b1eda492f063 -r 118e230e85ce model_prediction.py
--- a/model_prediction.py Thu Aug 11 09:20:25 2022 +0000
+++ b/model_prediction.py Wed Aug 09 13:40:06 2023 +0000
[
@@ -4,9 +4,10 @@
 
 import numpy as np
 import pandas as pd
-from galaxy_ml.utils import get_module, load_model, read_columns, try_get_attr
+from galaxy_ml.model_persist import load_model_from_h5
+from galaxy_ml.utils import (clean_params, get_module, read_columns,
+                             try_get_attr)
 from scipy.io import mmread
-from sklearn.pipeline import Pipeline
 
 N_JOBS = int(__import__("os").environ.get("GALAXY_SLOTS", 1))
 
@@ -15,7 +16,6 @@
     inputs,
     infile_estimator,
     outfile_predict,
-    infile_weights=None,
     infile1=None,
     fasta_path=None,
     ref_seq=None,
@@ -27,15 +27,12 @@
     inputs : str
         File path to galaxy tool parameter
 
-    infile_estimator : strgit
+    infile_estimator : str
         File path to trained estimator input
 
     outfile_predict : str
         File path to save the prediction results, tabular
 
-    infile_weights : str
-        File path to weights input
-
     infile1 : str
         File path to dataset containing features
 
@@ -54,19 +51,8 @@
         params = json.load(param_handler)
 
     # load model
-    with open(infile_estimator, "rb") as est_handler:
-        estimator = load_model(est_handler)
-
-    main_est = estimator
-    if isinstance(estimator, Pipeline):
-        main_est = estimator.steps[-1][-1]
-    if hasattr(main_est, "config") and hasattr(main_est, "load_weights"):
-        if not infile_weights or infile_weights == "None":
-            raise ValueError(
-                "The selected model skeleton asks for weights, "
-                "but dataset for weights wan not selected!"
-            )
-        main_est.load_weights(infile_weights)
+    estimator = load_model_from_h5(infile_estimator)
+    estimator = clean_params(estimator)
 
     # handle data input
     input_type = params["input_options"]["selected_input"]
@@ -221,7 +207,6 @@
     aparser = argparse.ArgumentParser()
     aparser.add_argument("-i", "--inputs", dest="inputs", required=True)
     aparser.add_argument("-e", "--infile_estimator", dest="infile_estimator")
-    aparser.add_argument("-w", "--infile_weights", dest="infile_weights")
     aparser.add_argument("-X", "--infile1", dest="infile1")
     aparser.add_argument("-O", "--outfile_predict", dest="outfile_predict")
     aparser.add_argument("-f", "--fasta_path", dest="fasta_path")
@@ -233,7 +218,6 @@
         args.inputs,
         args.infile_estimator,
         args.outfile_predict,
-        infile_weights=args.infile_weights,
         infile1=args.infile1,
         fasta_path=args.fasta_path,
         ref_seq=args.ref_seq,
b
diff -r b1eda492f063 -r 118e230e85ce pdb70_cs219.ffdata
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/pdb70_cs219.ffdata Wed Aug 09 13:40:06 2023 +0000
[
@@ -0,0 +1,189 @@
+
+
+
+
+<!DOCTYPE HTML>
+<html>
+    <!--base.mako-->
+    
+
+
+    <head>
+        <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+        <meta name = "viewport" content = "maximum-scale=1.0">
+        <meta http-equiv="X-UA-Compatible" content="IE=Edge,chrome=1">
+
+        <title>
+            Galaxy
+            | Europe
+            | 
+        </title>
+
+        <link rel="index" href="/"/>
+
+        
+        
+    <link href="/static/style/bootstrap-tour.css?v=1618364054" media="screen" rel="stylesheet" type="text/css" />
+    <link href="/static/dist/base.css?v=1618364054" media="screen" rel="stylesheet" type="text/css" />
+
+        
+    <script src="/static/dist/libs.chunk.js?v=1618364054" type="text/javascript"></script>
+<script src="/static/dist/base.chunk.js?v=1618364054" type="text/javascript"></script>
+<script src="/static/dist/generic.bundled.js?v=1618364054" type="text/javascript"></script>
+
+        
+    <!-- message.mako javascript_app() -->
+    
+
+    
+    <script type="text/javascript">
+        // galaxy_client_app.mako, load
+
+        var bootstrapped;
+        try {
+            bootstrapped = 
+{}
+;
+        } catch(err) {
+            console.warn("Unable to parse bootstrapped variable", err);
+            bootstrapped = {};
+        }
+
+        var options = {
+            root: '/',
+            config: 
+    
+{
+"display_galaxy_brand": true,
+"chunk_upload_size": 104857600,
+"use_remote_user": null,
+"enable_oidc": true,
+"mailing_join_addr": null,
+"select_type_workflow_threshold": -1,
+"myexperiment_target_url": "www.myexperiment.org:80",
+"tool_recommendation_model_path": "https://github.com/galaxyproject/galaxy-test-data/raw/master/tool_recommendation_model.hdf5",
+"simplified_workflow_run_ui_target_history": "current",
+"interactivetools_enable": true,
+"is_admin_user": false,
+"show_welcome_with_login": true,
+"welcome_url": "/static/welcome.html",
+"allow_user_impersonation": true,
+"overwrite_model_recommendations": false,
+"topk_recommendations": 10,
+"user_library_import_dir_available": false,
+"ga_code": null,
+"enable_beta_markdown_export": true,
+"visualizations_visible": true,
+"enable_tool_recommendations": true,
+"enable_unique_workflow_defaults": false,
+"registration_warning_message": "Please register only one account. The usegalaxy.eu service is provided free of charge and has limited computational and data storage resources. <strong>Registration and usage of multiple accounts is tracked and such accounts are subject to termination and data deletion.<\/strong>",
+"logo_src": "/static/favicon.png",
+"enable_quotas": true,
+"server_mail_configured": true,
+"citation_url": "https://galaxyproject.org/citing-galaxy",
+"allow_user_dataset_purge": true,
+"ftp_upload_site": "ftp://ftp.usegalaxy.eu",
+"terms_url": "https://usegalaxy.eu/terms",
+"upload_from_form_button": "always-on",
+"wiki_url": "https://galaxyproject.org/",
+"logo_src_secondary": null,
+"aws_estimate": true,
+"single_user": false,
+"datatypes_disable_auto": false,
+"brand": "Europe",
+"mailing_lists": "https://galaxyproject.org/mailing-lists/",
+"python": [
+3,
+6
+],
+"release_doc_base_url": "https://docs.galaxyproject.org/en/release_",
+"enable_openid": false,
+"cookie_domain": null,
+"message_box_content": "You are using the new UseGalaxy.eu backend server, let us know if you encounter any issues!",
+"admin_tool_recommendations_path": "/opt/galaxy/config/tool_recommendations_overwrite.yml",
+"search_url": "https://galaxyproject.org/search/",
+"remote_user_logout_href": null,
+"default_locale": "auto",
+"screencasts_url": "https://vimeo.com/galaxyproject",
+"quota_url": "https://galaxyproject.org/support/account-quotas/",
+"version_major": "21.01",
+"simplified_workflow_run_ui": "prefer",
+"allow_user_creation": true,
+"lims_doc_url": "https://usegalaxy.org/u/rkchak/p/sts",
+"message_box_visible": false,
+"has_user_tool_filters": true,
+"message_box_class": "info",
+"require_login": false,
+"logo_url": "/",
+"support_url": "https://galaxyproject.org/support/",
+"simplified_workflow_run_ui_job_cache": "off",
+"server_startttime": 1618364054,
+"oidc": {
+"elixir": {
+"icon": "https://elixir-europe.org/sites/default/files/images/login-button-orange.png"
+}
+},
+"version_minor": "",
+"helpsite_url": "https://help.galaxyproject.org/c/usegalaxy-eu-support",
+"file_sources_configured": true,
+"inactivity_box_content": "Your account has not been activated yet.  Feel free to browse around and see what's available, but you won't be able to upload data or run jobs until you have verified your email address.",
+"nginx_upload_path": "/_upload"
+}
+,
+            user: 
+    
+{
+"total_disk_usage": 0,
+"nice_total_disk_usage": "0 bytes",
+"quota_percent": null
+}
+,
+            session_csrf_token: 'c3ae71f65be7de55dd5bd5f97f316000'
+        };
+
+        config.set({
+            options: options,
+            bootstrapped: bootstrapped
+        });
+
+
+    </script>
+
+    
+
+
+
+
+    
+
+    
+    <script type="text/javascript">
+        config.addInitialization(function() {
+            if (parent.handle_minwidth_hint) {
+                parent.handle_minwidth_hint(-1);
+            }
+        });
+    </script>
+
+    </head>
+    <body class="inbound">
+        
+    
+    
+    <div class="message mt-2 alert alert-danger">You are not allowed to access this dataset</div>
+
+
+    </body>
+</html>
+
+
+
+
+
+
+
+
+
+
+
+
b
diff -r b1eda492f063 -r 118e230e85ce pipeline.xml
--- a/pipeline.xml Thu Aug 11 09:20:25 2022 +0000
+++ b/pipeline.xml Wed Aug 09 13:40:06 2023 +0000
[
b'@@ -1,4 +1,4 @@\n-<tool id="sklearn_build_pipeline" name="Pipeline Builder" version="@VERSION@" profile="20.05">\n+<tool id="sklearn_build_pipeline" name="Pipeline Builder" version="@VERSION@" profile="@PROFILE@">\n     <description>an all-in-one platform to build pipeline, single estimator, preprocessor and custom wrappers</description>\n     <macros>\n         <import>main_macros.xml</import>\n@@ -18,7 +18,6 @@\n import imblearn\n import json\n import pandas as pd\n-import pickle\n import pprint\n import skrebate\n import sys\n@@ -30,11 +29,9 @@\n     svm, linear_model, tree, discriminant_analysis)\n from sklearn.pipeline import make_pipeline\n from imblearn.pipeline import make_pipeline as imb_make_pipeline\n+from galaxy_ml.model_persist import dump_model_to_h5, load_model_from_h5\n from galaxy_ml.utils import (SafeEval, feature_selector, get_estimator,\n-                             try_get_attr, get_search_params, load_model)\n-\n-## TODO remove following imports after scikit-learn v0.22\n-from sklearn.experimental import enable_hist_gradient_boosting\n+                             try_get_attr, get_search_params)\n \n \n N_JOBS = int(__import__(\'os\').environ.get(\'GALAXY_SLOTS\', 1))\n@@ -182,10 +179,8 @@\n     regressor_path = \'$final_estimator.estimator_selector.regressor\'\n     transformer_path = \'$final_estimator.estimator_selector.transformer\'\n     #end if\n-    with open(regressor_path, \'rb\') as f:\n-        regressor = load_model(f)\n-    with open(transformer_path, \'rb\') as f:\n-        transformer = load_model(f)\n+    regressor = load_model_from_h5(regressor_path)\n+    transformer = load_model_from_h5(transformer_path)\n     estimator = compose.TransformedTargetRegressor(regressor=regressor, transformer=transformer)\n     pipeline_steps.append( estimator )\n else:\n@@ -202,14 +197,8 @@\n         out_obj = make_pipeline(*pipeline_steps)\n     pprint.pprint(out_obj.named_steps)\n \n-with open(\'$outfile\', \'wb\') as out_handler:\n-    pickle.dump(out_obj, out_handler, pickle.HIGHEST_PROTOCOL)\n+dump_model_to_h5(out_obj, \'$outfile\', verbose=0)\n \n-#if $get_params\n-results = get_search_params(out_obj)\n-df = pd.DataFrame(results, columns=[\'\', \'Parameter\', \'Value\'])\n-df.to_csv(\'$outfile_params\', sep=\'\\t\', index=False)\n-#end if\n             ]]>\n         </configfile>\n     </configfiles>\n@@ -254,7 +243,9 @@\n                     <expand macro="imbalanced_learn_sampling" />\n                 </when>\n                 <when value="IRAPS">\n-                    <expand macro="estimator_params_text" label="Type in parameter settings for IRAPSCore if different from default:" help="Default(=blank): n_iter=1000, responsive_thres=-1, resistant_thres=0, random_state=None. No double quotes" />\n+                    <expand macro="estimator_params_text"\n+                        label="Type in parameter settings for IRAPSCore if different from default:"\n+                        help="Default(=blank): n_iter=1000, responsive_thres=-1, resistant_thres=0, random_state=None. No double quotes" />\n                     <param argument="p_thres" type="float" value="0.001" label="P value threshold" help="Float. default=0.001" />\n                     <param argument="fc_thres" type="float" value="0.1" label="fold change threshold" help="Float. default=0.1" />\n                     <param argument="occurrence" type="float" value="0.7" label="reservation factor" help="Float. default=0.7" />\n@@ -267,7 +258,7 @@\n         </repeat>\n         <section name="final_estimator" title="Final Estimator" expanded="true">\n             <conditional name="estimator_selector">\n-                <param name="selected_module" type="select" label="Choose the module that contains target estimator:">\n+                <param name="selected_module" type="select" label="Choose the module that contains target estimator:" >\n                     <expand macro="estimator_module_options">\n                         <option value="sklearn.compose">sklearn.compose</option>\n                         <option value="binarize_target">B'..b'alue="n_features_to_select=3, n_neighbors=100" />\n+                    </conditional>\n             </conditional>\n             <param name="selected_module" value="ensemble" />\n             <param name="selected_estimator" value="RandomForestRegressor" />\n-            <output name="outfile" file="pipeline09" compare="sim_size" delta="30" />\n+            <output name="outfile" file="pipeline09" compare="sim_size" delta="5" />\n         </test>\n         <test>\n             <conditional name="component_selector">\n@@ -478,7 +462,7 @@\n             </conditional>\n             <param name="selected_module" value="ensemble" />\n             <param name="selected_estimator" value="RandomForestClassifier" />\n-            <output name="outfile" file="pipeline11" compare="sim_size" delta="30" />\n+            <output name="outfile" file="pipeline11" compare="sim_size" delta="5" />\n         </test>\n         <test expect_failure="true">\n             <conditional name="component_selector">\n@@ -505,7 +489,7 @@\n                     <param name="selected_module" value="none" />\n                 </conditional>\n             </section>\n-            <output name="outfile" file="pipeline12" compare="sim_size" delta="30" />\n+            <output name="outfile" file="pipeline12" compare="sim_size" delta="5" />\n         </test>\n         <test>\n             <conditional name="component_selector">\n@@ -513,7 +497,7 @@\n             </conditional>\n             <param name="selected_module" value="ensemble" />\n             <param name="selected_estimator" value="RandomForestClassifier" />\n-            <output name="outfile" file="RandomForestClassifier.zip" compare="sim_size" delta="30" />\n+            <output name="outfile" file="RandomForestClassifier.h5mlm" compare="sim_size" delta="5" />\n         </test>\n         <test>\n             <conditional name="component_selector">\n@@ -524,7 +508,7 @@\n                     <param name="selected_module" value="none" />\n                 </conditional>\n             </section>\n-            <output name="outfile" file="pipeline14" compare="sim_size" delta="30" />\n+            <output name="outfile" file="pipeline14" compare="sim_size" delta="5" />\n         </test>\n         <test>\n             <conditional name="component_selector">\n@@ -534,10 +518,10 @@\n                 <conditional name="estimator_selector">\n                     <param name="selected_module" value="binarize_target" />\n                     <param name="clf_or_regr" value="BinarizeTargetClassifier" />\n-                    <param name="wrapped_estimator" value="RandomForestClassifier.zip" ftype="zip" />\n+                    <param name="wrapped_estimator" value="RandomForestClassifier.h5mlm" ftype="h5mlm" />\n                 </conditional>\n             </section>\n-            <output name="outfile" file="pipeline15" compare="sim_size" delta="30" />\n+            <output name="outfile" file="pipeline15" compare="sim_size" delta="5" />\n         </test>\n         <test>\n             <conditional name="component_selector">\n@@ -551,10 +535,10 @@\n             <section name="final_estimator">\n                 <conditional name="estimator_selector">\n                     <param name="selected_module" value="custom_estimator" />\n-                    <param name="c_estimator" value="keras_model02" ftype="zip" />\n+                    <param name="c_estimator" value="keras_model02" ftype="h5mlm" />\n                 </conditional>\n             </section>\n-            <output name="outfile" file="pipeline16" compare="sim_size" delta="30" />\n+            <output name="outfile" file="pipeline16" compare="sim_size" delta="5" />\n         </test>\n     </tests>\n     <help>\n@@ -583,9 +567,9 @@\n \n **Output**\n \n-- Pickled pipeline/estimator object\n+- Pipeline/estimator object\n \n-- Hyperparameter of the ojbect (optional)\n+- Hyperparameter of the object (optional)\n \n \n .. _`Scikit-learn pipeline Pipeline`: http://scikit-learn.org/stable/modules/generated/sklearn.pipeline.Pipeline.html\n'
b
diff -r b1eda492f063 -r 118e230e85ce search_model_validation.py
--- a/search_model_validation.py Thu Aug 11 09:20:25 2022 +0000
+++ b/search_model_validation.py Wed Aug 09 13:40:06 2023 +0000
[
b'@@ -1,35 +1,55 @@\n import argparse\n-import collections\n import json\n import os\n-import pickle\n import sys\n import warnings\n+from distutils.version import LooseVersion as Version\n \n import imblearn\n import joblib\n import numpy as np\n import pandas as pd\n import skrebate\n-from galaxy_ml.utils import (clean_params, get_cv,\n-                             get_main_estimator, get_module, get_scoring,\n-                             load_model, read_columns, SafeEval, try_get_attr)\n+from galaxy_ml import __version__ as galaxy_ml_version\n+from galaxy_ml.binarize_target import IRAPSClassifier\n+from galaxy_ml.model_persist import dump_model_to_h5, load_model_from_h5\n+from galaxy_ml.utils import (\n+    clean_params,\n+    get_cv,\n+    get_main_estimator,\n+    get_module,\n+    get_scoring,\n+    read_columns,\n+    SafeEval,\n+    try_get_attr\n+)\n from scipy.io import mmread\n-from sklearn import (cluster, decomposition, feature_selection,\n-                     kernel_approximation, model_selection, preprocessing)\n+from sklearn import (\n+    cluster,\n+    decomposition,\n+    feature_selection,\n+    kernel_approximation,\n+    model_selection,\n+    preprocessing,\n+)\n from sklearn.exceptions import FitFailedWarning\n from sklearn.model_selection import _search, _validation\n from sklearn.model_selection._validation import _score, cross_validate\n-\n-_fit_and_score = try_get_attr("galaxy_ml.model_validations", "_fit_and_score")\n-setattr(_search, "_fit_and_score", _fit_and_score)\n-setattr(_validation, "_fit_and_score", _fit_and_score)\n+from sklearn.preprocessing import LabelEncoder\n+from skopt import BayesSearchCV\n \n N_JOBS = int(os.environ.get("GALAXY_SLOTS", 1))\n # handle  disk cache\n CACHE_DIR = os.path.join(os.getcwd(), "cached")\n-del os\n-NON_SEARCHABLE = ("n_jobs", "pre_dispatch", "memory", "_path", "nthread", "callbacks")\n+NON_SEARCHABLE = (\n+    "n_jobs",\n+    "pre_dispatch",\n+    "memory",\n+    "_path",\n+    "_dir",\n+    "nthread",\n+    "callbacks",\n+)\n \n \n def _eval_search_params(params_builder):\n@@ -100,33 +120,29 @@\n                 imblearn.under_sampling.CondensedNearestNeighbour(\n                     random_state=0, n_jobs=N_JOBS\n                 ),\n-                imblearn.under_sampling.EditedNearestNeighbours(\n-                    random_state=0, n_jobs=N_JOBS\n-                ),\n-                imblearn.under_sampling.RepeatedEditedNearestNeighbours(\n-                    random_state=0, n_jobs=N_JOBS\n-                ),\n-                imblearn.under_sampling.AllKNN(random_state=0, n_jobs=N_JOBS),\n+                imblearn.under_sampling.EditedNearestNeighbours(n_jobs=N_JOBS),\n+                imblearn.under_sampling.RepeatedEditedNearestNeighbours(n_jobs=N_JOBS),\n+                imblearn.under_sampling.AllKNN(n_jobs=N_JOBS),\n                 imblearn.under_sampling.InstanceHardnessThreshold(\n                     random_state=0, n_jobs=N_JOBS\n                 ),\n-                imblearn.under_sampling.NearMiss(random_state=0, n_jobs=N_JOBS),\n-                imblearn.under_sampling.NeighbourhoodCleaningRule(\n-                    random_state=0, n_jobs=N_JOBS\n-                ),\n+                imblearn.under_sampling.NearMiss(n_jobs=N_JOBS),\n+                imblearn.under_sampling.NeighbourhoodCleaningRule(n_jobs=N_JOBS),\n                 imblearn.under_sampling.OneSidedSelection(\n                     random_state=0, n_jobs=N_JOBS\n                 ),\n                 imblearn.under_sampling.RandomUnderSampler(random_state=0),\n-                imblearn.under_sampling.TomekLinks(random_state=0, n_jobs=N_JOBS),\n+                imblearn.under_sampling.TomekLinks(n_jobs=N_JOBS),\n                 imblearn.over_sampling.ADASYN(random_state=0, n_jobs=N_JOBS),\n+                imblearn.over_sampling.BorderlineSMOTE(random_state=0, n_jobs=N_JOBS),\n+                imblearn.over_sampling.KMeansSMOTE(random_state=0, n_jobs=N_JOBS),\n                 imblearn.over_sampling.RandomOverSampler(random_state=0),\n                 imblearn.over_samp'..b'  outer_cv, _ = get_cv(cv_selector)\n         # nested CV, outer cv using cross_validate\n         if options["error_score"] == "raise":\n             rval = cross_validate(\n                 searcher,\n                 X,\n                 y,\n+                groups=groups,\n                 scoring=options["scoring"],\n                 cv=outer_cv,\n                 n_jobs=N_JOBS,\n                 verbose=options["verbose"],\n+                fit_params={"groups": groups},\n                 return_estimator=(params["save"] == "save_estimator"),\n                 error_score=options["error_score"],\n                 return_train_score=True,\n@@ -643,10 +700,12 @@\n                         searcher,\n                         X,\n                         y,\n+                        groups=groups,\n                         scoring=options["scoring"],\n                         cv=outer_cv,\n                         n_jobs=N_JOBS,\n                         verbose=options["verbose"],\n+                        fit_params={"groups": groups},\n                         return_estimator=(params["save"] == "save_estimator"),\n                         error_score=options["error_score"],\n                         return_train_score=True,\n@@ -676,8 +735,6 @@\n                     cv_results_.to_csv(target_path, sep="\\t", header=True, index=False)\n             except Exception as e:\n                 print(e)\n-            finally:\n-                del os\n \n         keys = list(rval.keys())\n         for k in keys:\n@@ -689,6 +746,9 @@\n         rval = pd.DataFrame(rval)\n         rval = rval[sorted(rval.columns)]\n         rval.to_csv(path_or_buf=outfile_result, sep="\\t", header=True, index=False)\n+\n+        return 0\n+\n         # deprecate train test split mode\n         """searcher = _do_train_test_split_val(\n             searcher, X, y, params,\n@@ -696,7 +756,6 @@\n             error_score=options[\'error_score\'],\n             groups=groups,\n             outfile=outfile_result)"""\n-        return 0\n \n     # no outer split\n     else:\n@@ -732,24 +791,7 @@\n             )\n             return\n \n-        # clean prams\n-        best_estimator_ = clean_params(best_estimator_)\n-\n-        main_est = get_main_estimator(best_estimator_)\n-\n-        if hasattr(main_est, "model_") and hasattr(main_est, "save_weights"):\n-            if outfile_weights:\n-                main_est.save_weights(outfile_weights)\n-            del main_est.model_\n-            del main_est.fit_params\n-            del main_est.model_class_\n-            del main_est.validation_data\n-            if getattr(main_est, "data_generator_", None):\n-                del main_est.data_generator_\n-\n-        with open(outfile_object, "wb") as output_handler:\n-            print("Best estimator is saved: %s " % repr(best_estimator_))\n-            pickle.dump(best_estimator_, output_handler, pickle.HIGHEST_PROTOCOL)\n+        dump_model_to_h5(best_estimator_, outfile_object)\n \n \n if __name__ == "__main__":\n@@ -760,7 +802,6 @@\n     aparser.add_argument("-y", "--infile2", dest="infile2")\n     aparser.add_argument("-O", "--outfile_result", dest="outfile_result")\n     aparser.add_argument("-o", "--outfile_object", dest="outfile_object")\n-    aparser.add_argument("-w", "--outfile_weights", dest="outfile_weights")\n     aparser.add_argument("-g", "--groups", dest="groups")\n     aparser.add_argument("-r", "--ref_seq", dest="ref_seq")\n     aparser.add_argument("-b", "--intervals", dest="intervals")\n@@ -768,17 +809,4 @@\n     aparser.add_argument("-f", "--fasta_path", dest="fasta_path")\n     args = aparser.parse_args()\n \n-    main(\n-        args.inputs,\n-        args.infile_estimator,\n-        args.infile1,\n-        args.infile2,\n-        args.outfile_result,\n-        outfile_object=args.outfile_object,\n-        outfile_weights=args.outfile_weights,\n-        groups=args.groups,\n-        ref_seq=args.ref_seq,\n-        intervals=args.intervals,\n-        targets=args.targets,\n-        fasta_path=args.fasta_path,\n-    )\n+    main(**vars(args))\n'
b
diff -r b1eda492f063 -r 118e230e85ce simple_model_fit.py
--- a/simple_model_fit.py Thu Aug 11 09:20:25 2022 +0000
+++ b/simple_model_fit.py Wed Aug 09 13:40:06 2023 +0000
b
@@ -1,9 +1,9 @@
 import argparse
 import json
-import pickle
 
 import pandas as pd
-from galaxy_ml.utils import load_model, read_columns
+from galaxy_ml.model_persist import dump_model_to_h5, load_model_from_h5
+from galaxy_ml.utils import read_columns
 from scipy.io import mmread
 from sklearn.pipeline import Pipeline
 
@@ -148,9 +148,9 @@
         params = json.load(param_handler)
 
     # load model
-    with open(infile_estimator, "rb") as est_handler:
-        estimator = load_model(est_handler)
-    estimator = clean_params(estimator, n_jobs=N_JOBS)
+    estimator = load_model_from_h5(infile_estimator)
+
+    estimator = clean_params(estimator)
 
     X_train, y_train = _get_X_y(params, infile1, infile2)
 
@@ -170,8 +170,7 @@
         if getattr(main_est, "data_generator_", None):
             del main_est.data_generator_
 
-    with open(out_object, "wb") as output_handler:
-        pickle.dump(estimator, output_handler, pickle.HIGHEST_PROTOCOL)
+    dump_model_to_h5(estimator, out_object)
 
 
 if __name__ == "__main__":
b
diff -r b1eda492f063 -r 118e230e85ce stacking_ensembles.py
--- a/stacking_ensembles.py Thu Aug 11 09:20:25 2022 +0000
+++ b/stacking_ensembles.py Wed Aug 09 13:40:06 2023 +0000
[
@@ -1,22 +1,22 @@
 import argparse
 import ast
 import json
-import pickle
 import sys
 import warnings
+from distutils.version import LooseVersion as Version
 
 import mlxtend.classifier
 import mlxtend.regressor
-import pandas as pd
-from galaxy_ml.utils import (get_cv, get_estimator, get_search_params,
-                             load_model)
+from galaxy_ml import __version__ as galaxy_ml_version
+from galaxy_ml.model_persist import dump_model_to_h5, load_model_from_h5
+from galaxy_ml.utils import get_cv, get_estimator
 
 warnings.filterwarnings("ignore")
 
 N_JOBS = int(__import__("os").environ.get("GALAXY_SLOTS", 1))
 
 
-def main(inputs_path, output_obj, base_paths=None, meta_path=None, outfile_params=None):
+def main(inputs_path, output_obj, base_paths=None, meta_path=None):
     """
     Parameter
     ---------
@@ -31,9 +31,6 @@
 
     meta_path : str
         File path
-
-    outfile_params : str
-        File path for params output
     """
     with open(inputs_path, "r") as param_handler:
         params = json.load(param_handler)
@@ -43,8 +40,7 @@
     base_estimators = []
     for idx, base_file in enumerate(base_paths.split(",")):
         if base_file and base_file != "None":
-            with open(base_file, "rb") as handler:
-                model = load_model(handler)
+            model = load_model_from_h5(base_file)
         else:
             estimator_json = params["base_est_builder"][idx]["estimator_selector"]
             model = get_estimator(estimator_json)
@@ -59,8 +55,7 @@
     # get meta estimator, if applicable
     if estimator_type.startswith("mlxtend"):
         if meta_path:
-            with open(meta_path, "rb") as f:
-                meta_estimator = load_model(f)
+            meta_estimator = load_model_from_h5(meta_path)
         else:
             estimator_json = params["algo_selection"]["meta_estimator"][
                 "estimator_selector"
@@ -71,7 +66,9 @@
 
     cv_selector = options.pop("cv_selector", None)
     if cv_selector:
-        splitter, _groups = get_cv(cv_selector)
+        if Version(galaxy_ml_version) < Version("0.8.3"):
+            cv_selector.pop("n_stratification_bins", None)
+        splitter, groups = get_cv(cv_selector)
         options["cv"] = splitter
         # set n_jobs
         options["n_jobs"] = N_JOBS
@@ -104,13 +101,7 @@
     for base_est in base_estimators:
         print(base_est)
 
-    with open(output_obj, "wb") as out_handler:
-        pickle.dump(ensemble_estimator, out_handler, pickle.HIGHEST_PROTOCOL)
-
-    if params["get_params"] and outfile_params:
-        results = get_search_params(ensemble_estimator)
-        df = pd.DataFrame(results, columns=["", "Parameter", "Value"])
-        df.to_csv(outfile_params, sep="\t", index=False)
+    dump_model_to_h5(ensemble_estimator, output_obj)
 
 
 if __name__ == "__main__":
@@ -119,13 +110,6 @@
     aparser.add_argument("-m", "--meta", dest="meta")
     aparser.add_argument("-i", "--inputs", dest="inputs")
     aparser.add_argument("-o", "--outfile", dest="outfile")
-    aparser.add_argument("-p", "--outfile_params", dest="outfile_params")
     args = aparser.parse_args()
 
-    main(
-        args.inputs,
-        args.outfile,
-        base_paths=args.bases,
-        meta_path=args.meta,
-        outfile_params=args.outfile_params,
-    )
+    main(args.inputs, args.outfile, base_paths=args.bases, meta_path=args.meta)
b
diff -r b1eda492f063 -r 118e230e85ce test-data/GridSearchCV01.h5mlm
b
Binary file test-data/GridSearchCV01.h5mlm has changed
b
diff -r b1eda492f063 -r 118e230e85ce test-data/LinearRegression01.h5mlm
b
Binary file test-data/LinearRegression01.h5mlm has changed
b
diff -r b1eda492f063 -r 118e230e85ce test-data/RFE.h5mlm
b
Binary file test-data/RFE.h5mlm has changed
b
diff -r b1eda492f063 -r 118e230e85ce test-data/RandomForestClassifier.h5mlm
b
Binary file test-data/RandomForestClassifier.h5mlm has changed
b
diff -r b1eda492f063 -r 118e230e85ce test-data/RandomForestRegressor01.h5mlm
b
Binary file test-data/RandomForestRegressor01.h5mlm has changed
b
diff -r b1eda492f063 -r 118e230e85ce test-data/StackingCVRegressor01.h5mlm
b
Binary file test-data/StackingCVRegressor01.h5mlm has changed
b
diff -r b1eda492f063 -r 118e230e85ce test-data/StackingRegressor02.h5mlm
b
Binary file test-data/StackingRegressor02.h5mlm has changed
b
diff -r b1eda492f063 -r 118e230e85ce test-data/StackingVoting03.h5mlm
b
Binary file test-data/StackingVoting03.h5mlm has changed
b
diff -r b1eda492f063 -r 118e230e85ce test-data/XGBRegressor01.h5mlm
b
Binary file test-data/XGBRegressor01.h5mlm has changed
b
diff -r b1eda492f063 -r 118e230e85ce test-data/abc_model01
b
Binary file test-data/abc_model01 has changed
b
diff -r b1eda492f063 -r 118e230e85ce test-data/abc_result01
--- a/test-data/abc_result01 Thu Aug 11 09:20:25 2022 +0000
+++ b/test-data/abc_result01 Wed Aug 09 13:40:06 2023 +0000
b
@@ -1,6 +1,6 @@
 0 1 2 3 predicted
-3.68258022948 2.82110345641 -3.9901407239999998 -1.9523364774 1
-0.015942057224 -0.7119585943469999 0.125502976978 -0.972218263337 0
-2.0869076882499997 0.929399321468 -2.1292408448400004 -1.9971402218799998 1
-1.4132105208399999 0.523750660422 -1.4210539291 -1.49298569451 1
-0.7683140439399999 1.38267855169 -0.989045048734 0.649504257894 1
+3.68258022948 2.82110345641 -3.990140724 -1.9523364774 1
+0.015942057224 -0.711958594347 0.125502976978 -0.972218263337 0
+2.08690768825 0.929399321468 -2.12924084484 -1.99714022188 1
+1.41321052084 0.523750660422 -1.4210539291 -1.49298569451 1
+0.76831404394 1.38267855169 -0.989045048734 0.649504257894 1
b
diff -r b1eda492f063 -r 118e230e85ce test-data/abr_model01
b
Binary file test-data/abr_model01 has changed
b
diff -r b1eda492f063 -r 118e230e85ce test-data/abr_result01
--- a/test-data/abr_result01 Thu Aug 11 09:20:25 2022 +0000
+++ b/test-data/abr_result01 Wed Aug 09 13:40:06 2023 +0000
b
@@ -1,6 +1,6 @@
 0 1 2 3 4 predicted
-86.97021227350001 1.00532111569 -1.01739601979 -0.613139481654 0.641846874331 0.323842059244
-91.2021798817 -0.6215229712070001 1.11914889596 0.390012184498 1.28956938152 1.1503117056799999
--47.4101632272 -0.638416457964 -0.7327774684530001 -0.8640261049779999 -1.06109770116 -0.7191695359690001
-61.712804630200004 -1.0999480057700002 -0.739679672932 0.585657963012 1.4890682753600002 1.1503117056799999
--206.998295124 0.130238853011 0.70574123041 1.3320656526399999 -1.3322092373799999 -0.7191695359690001
+86.9702122735 1.00532111569 -1.01739601979 -0.613139481654 0.641846874331 0.94315222831
+91.2021798817 -0.621522971207 1.11914889596 0.390012184498 1.28956938152 1.0812585465566666
+-47.4101632272 -0.638416457964 -0.732777468453 -0.864026104978 -1.06109770116 -0.719169535969
+61.7128046302 -1.09994800577 -0.739679672932 0.585657963012 1.48906827536 1.15031170568
+-206.998295124 0.130238853011 0.70574123041 1.33206565264 -1.33220923738 -0.719169535969
b
diff -r b1eda492f063 -r 118e230e85ce test-data/auc.txt
--- a/test-data/auc.txt Thu Aug 11 09:20:25 2022 +0000
+++ b/test-data/auc.txt Wed Aug 09 13:40:06 2023 +0000
b
@@ -1,2 +1,2 @@
 auc : 
-2.5
+3.0
b
diff -r b1eda492f063 -r 118e230e85ce test-data/best_estimator_.h5mlm
b
Binary file test-data/best_estimator_.h5mlm has changed
b
diff -r b1eda492f063 -r 118e230e85ce test-data/best_params_.txt
--- a/test-data/best_params_.txt Thu Aug 11 09:20:25 2022 +0000
+++ b/test-data/best_params_.txt Wed Aug 09 13:40:06 2023 +0000
b
@@ -1,1 +1,1 @@
-{'estimator__n_estimators': 100}
\ No newline at end of file
+{}
\ No newline at end of file
b
diff -r b1eda492f063 -r 118e230e85ce test-data/best_score_.tabular
--- a/test-data/best_score_.tabular Thu Aug 11 09:20:25 2022 +0000
+++ b/test-data/best_score_.tabular Wed Aug 09 13:40:06 2023 +0000
b
@@ -1,2 +1,2 @@
 best_score_
-0.7976348550293088
+0.8065123295049499
b
diff -r b1eda492f063 -r 118e230e85ce test-data/classifier_y.tabular
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/classifier_y.tabular Wed Aug 09 13:40:06 2023 +0000
b
@@ -0,0 +1,262 @@
+label
+high
+intermediate
+high
+intermediate
+intermediate
+intermediate
+intermediate
+high
+low
+low
+low
+high
+intermediate
+low
+high
+high
+intermediate
+high
+intermediate
+low
+low
+intermediate
+high
+intermediate
+low
+low
+high
+intermediate
+intermediate
+high
+intermediate
+intermediate
+intermediate
+intermediate
+low
+intermediate
+intermediate
+intermediate
+low
+low
+intermediate
+intermediate
+low
+intermediate
+intermediate
+intermediate
+intermediate
+high
+high
+intermediate
+intermediate
+high
+intermediate
+intermediate
+high
+intermediate
+intermediate
+intermediate
+intermediate
+intermediate
+intermediate
+intermediate
+intermediate
+intermediate
+intermediate
+intermediate
+intermediate
+low
+intermediate
+intermediate
+low
+intermediate
+high
+high
+intermediate
+high
+intermediate
+low
+high
+high
+high
+high
+intermediate
+low
+high
+intermediate
+intermediate
+high
+intermediate
+intermediate
+low
+low
+intermediate
+intermediate
+high
+high
+intermediate
+high
+high
+low
+intermediate
+intermediate
+intermediate
+high
+intermediate
+high
+intermediate
+high
+intermediate
+intermediate
+low
+intermediate
+intermediate
+high
+high
+high
+intermediate
+high
+intermediate
+intermediate
+low
+intermediate
+intermediate
+intermediate
+high
+low
+intermediate
+intermediate
+intermediate
+intermediate
+high
+intermediate
+intermediate
+intermediate
+high
+intermediate
+high
+intermediate
+high
+high
+high
+intermediate
+intermediate
+intermediate
+high
+intermediate
+intermediate
+low
+intermediate
+intermediate
+low
+low
+intermediate
+intermediate
+intermediate
+intermediate
+high
+high
+intermediate
+high
+high
+intermediate
+intermediate
+intermediate
+low
+high
+intermediate
+high
+intermediate
+intermediate
+intermediate
+high
+intermediate
+intermediate
+intermediate
+intermediate
+intermediate
+intermediate
+intermediate
+intermediate
+intermediate
+intermediate
+high
+intermediate
+intermediate
+low
+intermediate
+intermediate
+low
+intermediate
+high
+intermediate
+high
+high
+high
+low
+high
+low
+intermediate
+intermediate
+high
+intermediate
+intermediate
+high
+high
+intermediate
+high
+intermediate
+low
+intermediate
+intermediate
+high
+low
+intermediate
+intermediate
+intermediate
+intermediate
+high
+low
+high
+intermediate
+intermediate
+low
+high
+intermediate
+low
+intermediate
+intermediate
+intermediate
+high
+high
+intermediate
+high
+high
+intermediate
+intermediate
+high
+high
+intermediate
+low
+intermediate
+intermediate
+high
+intermediate
+intermediate
+intermediate
+high
+high
+high
+intermediate
+high
+low
+high
+intermediate
+low
+intermediate
+high
+intermediate
+high
+intermediate
+intermediate
b
diff -r b1eda492f063 -r 118e230e85ce test-data/cluster_result01.txt
--- a/test-data/cluster_result01.txt Thu Aug 11 09:20:25 2022 +0000
+++ b/test-data/cluster_result01.txt Wed Aug 09 13:40:06 2023 +0000
b
@@ -1,3 +1,4 @@
+0 58 56 -67 0
 0 44 64 -76 0
 0 51 48 -73 0
 0 58 65 -49 0
b
diff -r b1eda492f063 -r 118e230e85ce test-data/cluster_result02.txt
--- a/test-data/cluster_result02.txt Thu Aug 11 09:20:25 2022 +0000
+++ b/test-data/cluster_result02.txt Wed Aug 09 13:40:06 2023 +0000
b
@@ -1,48 +1,49 @@
-0 44 64 -76 3
-0 51 48 -73 3
-0 58 65 -49 3
-0 43 61 -49 3
-0 45 43 -79 3
-0 42 60 -98 3
-0 50 55 -59 3
-0 53 53 -56 3
-0 45 44 -61 3
-0 43 65 -84 3
-0 35 52 -75 3
-0 56 56 -70 3
-1 -61 86 43 2
-1 -67 93 15 2
-1 -59 94 36 2
-1 -50 92 62 2
-1 -78 91 70 2
-1 -35 87 47 2
-1 -56 91 52 2
-1 -61 81 46 2
-1 -83 78 34 2
-1 -50 87 45 2
-1 -67 73 50 2
-1 -50 97 45 2
-1 -61 111 45 2
-2 -109 23 -92 0
-2 -94 20 -96 0
-2 -85 26 -88 0
-2 -90 33 -114 0
-2 -63 9 -106 0
-2 -79 9 -93 0
-2 -99 26 -108 0
-2 -81 19 -110 0
-2 -108 21 -108 0
-2 -92 27 -106 0
-2 -88 2 -106 0
-2 -88 15 -103 0
-3 54 -74 4 1
-3 42 -92 31 1
-3 39 -99 -7 1
-3 48 -115 -5 1
-3 39 -96 2 1
-3 31 -109 9 1
-3 33 -96 -8 1
-3 23 -102 4 1
-3 38 -90 21 1
-3 34 -107 1 1
-3 35 -78 18 1
+0 58 56 -67 0
+0 44 64 -76 0
+0 51 48 -73 0
+0 58 65 -49 0
+0 43 61 -49 0
+0 45 43 -79 0
+0 42 60 -98 0
+0 50 55 -59 0
+0 53 53 -56 0
+0 45 44 -61 0
+0 43 65 -84 0
+0 35 52 -75 0
+0 56 56 -70 0
+1 -61 86 43 1
+1 -67 93 15 1
+1 -59 94 36 1
+1 -50 92 62 1
+1 -78 91 70 1
+1 -35 87 47 1
+1 -56 91 52 1
+1 -61 81 46 1
+1 -83 78 34 1
+1 -50 87 45 1
+1 -67 73 50 1
+1 -50 97 45 1
+1 -61 111 45 1
+2 -109 23 -92 2
+2 -94 20 -96 2
+2 -85 26 -88 2
+2 -90 33 -114 2
+2 -63 9 -106 2
+2 -79 9 -93 2
+2 -99 26 -108 2
+2 -81 19 -110 2
+2 -108 21 -108 2
+2 -92 27 -106 2
+2 -88 2 -106 2
+2 -88 15 -103 2
+3 54 -74 4 3
+3 42 -92 31 3
+3 39 -99 -7 3
+3 48 -115 -5 3
+3 39 -96 2 3
+3 31 -109 9 3
+3 33 -96 -8 3
+3 23 -102 4 3
+3 38 -90 21 3
+3 34 -107 1 3
+3 35 -78 18 3
b
diff -r b1eda492f063 -r 118e230e85ce test-data/cluster_result03.txt
--- a/test-data/cluster_result03.txt Thu Aug 11 09:20:25 2022 +0000
+++ b/test-data/cluster_result03.txt Wed Aug 09 13:40:06 2023 +0000
b
@@ -1,3 +1,4 @@
+0 58 56 -67 -1
 0 44 64 -76 -1
 0 51 48 -73 -1
 0 58 65 -49 -1
b
diff -r b1eda492f063 -r 118e230e85ce test-data/cluster_result04.txt
--- a/test-data/cluster_result04.txt Thu Aug 11 09:20:25 2022 +0000
+++ b/test-data/cluster_result04.txt Wed Aug 09 13:40:06 2023 +0000
b
@@ -1,3 +1,4 @@
+0 58 56 -67 1
 0 44 64 -76 1
 0 51 48 -73 1
 0 58 65 -49 1
b
diff -r b1eda492f063 -r 118e230e85ce test-data/cluster_result05.txt
--- a/test-data/cluster_result05.txt Thu Aug 11 09:20:25 2022 +0000
+++ b/test-data/cluster_result05.txt Wed Aug 09 13:40:06 2023 +0000
b
@@ -1,3 +1,4 @@
+0 58 56 -67 0
 0 44 64 -76 0
 0 51 48 -73 0
 0 58 65 -49 0
b
diff -r b1eda492f063 -r 118e230e85ce test-data/cluster_result06.txt
--- a/test-data/cluster_result06.txt Thu Aug 11 09:20:25 2022 +0000
+++ b/test-data/cluster_result06.txt Wed Aug 09 13:40:06 2023 +0000
b
@@ -1,3 +1,4 @@
+0 58 56 -67 0
 0 44 64 -76 0
 0 51 48 -73 0
 0 58 65 -49 0
b
diff -r b1eda492f063 -r 118e230e85ce test-data/cluster_result07.txt
--- a/test-data/cluster_result07.txt Thu Aug 11 09:20:25 2022 +0000
+++ b/test-data/cluster_result07.txt Wed Aug 09 13:40:06 2023 +0000
b
@@ -1,3 +1,4 @@
+0 58 56 -67 0
 0 44 64 -76 0
 0 51 48 -73 0
 0 58 65 -49 0
b
diff -r b1eda492f063 -r 118e230e85ce test-data/cluster_result08.txt
--- a/test-data/cluster_result08.txt Thu Aug 11 09:20:25 2022 +0000
+++ b/test-data/cluster_result08.txt Wed Aug 09 13:40:06 2023 +0000
b
@@ -1,3 +1,4 @@
+0 58 56 -67 0
 0 44 64 -76 0
 0 51 48 -73 0
 0 58 65 -49 0
b
diff -r b1eda492f063 -r 118e230e85ce test-data/cluster_result09.txt
--- a/test-data/cluster_result09.txt Thu Aug 11 09:20:25 2022 +0000
+++ b/test-data/cluster_result09.txt Wed Aug 09 13:40:06 2023 +0000
b
@@ -1,3 +1,4 @@
+0 58 56 -67 0
 0 44 64 -76 0
 0 51 48 -73 0
 0 58 65 -49 0
b
diff -r b1eda492f063 -r 118e230e85ce test-data/cluster_result10.txt
--- a/test-data/cluster_result10.txt Thu Aug 11 09:20:25 2022 +0000
+++ b/test-data/cluster_result10.txt Wed Aug 09 13:40:06 2023 +0000
b
@@ -1,3 +1,4 @@
+0 58 56 -67 3
 0 44 64 -76 3
 0 51 48 -73 3
 0 58 65 -49 3
b
diff -r b1eda492f063 -r 118e230e85ce test-data/cluster_result11.txt
--- a/test-data/cluster_result11.txt Thu Aug 11 09:20:25 2022 +0000
+++ b/test-data/cluster_result11.txt Wed Aug 09 13:40:06 2023 +0000
b
@@ -1,3 +1,4 @@
+0 58 56 -67 2
 0 44 64 -76 2
 0 51 48 -73 2
 0 58 65 -49 2
b
diff -r b1eda492f063 -r 118e230e85ce test-data/cluster_result12.txt
--- a/test-data/cluster_result12.txt Thu Aug 11 09:20:25 2022 +0000
+++ b/test-data/cluster_result12.txt Wed Aug 09 13:40:06 2023 +0000
b
@@ -1,48 +1,49 @@
-0 44 64 -76 1
-0 51 48 -73 1
-0 58 65 -49 1
-0 43 61 -49 0
-0 45 43 -79 1
-0 42 60 -98 1
-0 50 55 -59 1
-0 53 53 -56 1
+0 58 56 -67 3
+0 44 64 -76 3
+0 51 48 -73 0
+0 58 65 -49 0
+0 43 61 -49 2
+0 45 43 -79 0
+0 42 60 -98 2
+0 50 55 -59 3
+0 53 53 -56 3
 0 45 44 -61 0
-0 43 65 -84 1
-0 35 52 -75 1
-0 56 56 -70 1
-1 -61 86 43 2
-1 -67 93 15 1
-1 -59 94 36 1
+0 43 65 -84 3
+0 35 52 -75 2
+0 56 56 -70 3
+1 -61 86 43 3
+1 -67 93 15 2
+1 -59 94 36 0
 1 -50 92 62 0
-1 -78 91 70 1
-1 -35 87 47 1
+1 -78 91 70 2
+1 -35 87 47 0
 1 -56 91 52 0
-1 -61 81 46 2
-1 -83 78 34 1
+1 -61 81 46 3
+1 -83 78 34 0
 1 -50 87 45 0
-1 -67 73 50 1
+1 -67 73 50 2
 1 -50 97 45 0
 1 -61 111 45 1
-2 -109 23 -92 0
-2 -94 20 -96 3
-2 -85 26 -88 3
-2 -90 33 -114 3
-2 -63 9 -106 0
-2 -79 9 -93 1
-2 -99 26 -108 3
-2 -81 19 -110 3
-2 -108 21 -108 3
-2 -92 27 -106 3
-2 -88 2 -106 0
-2 -88 15 -103 3
-3 54 -74 4 1
-3 42 -92 31 3
+2 -109 23 -92 2
+2 -94 20 -96 2
+2 -85 26 -88 2
+2 -90 33 -114 2
+2 -63 9 -106 2
+2 -79 9 -93 3
+2 -99 26 -108 2
+2 -81 19 -110 2
+2 -108 21 -108 2
+2 -92 27 -106 2
+2 -88 2 -106 2
+2 -88 15 -103 2
+3 54 -74 4 0
+3 42 -92 31 1
 3 39 -99 -7 3
-3 48 -115 -5 1
+3 48 -115 -5 2
 3 39 -96 2 3
-3 31 -109 9 3
+3 31 -109 9 2
 3 33 -96 -8 3
-3 23 -102 4 3
-3 38 -90 21 3
-3 34 -107 1 3
-3 35 -78 18 3
+3 23 -102 4 2
+3 38 -90 21 1
+3 34 -107 1 2
+3 35 -78 18 1
b
diff -r b1eda492f063 -r 118e230e85ce test-data/cluster_result13.txt
--- a/test-data/cluster_result13.txt Thu Aug 11 09:20:25 2022 +0000
+++ b/test-data/cluster_result13.txt Wed Aug 09 13:40:06 2023 +0000
b
@@ -1,40 +1,41 @@
-0 44 64 -76 4
-0 51 48 -73 0
+0 58 56 -67 2
+0 44 64 -76 2
+0 51 48 -73 1
 0 58 65 -49 0
-0 43 61 -49 1
-0 45 43 -79 0
-0 42 60 -98 0
-0 50 55 -59 2
-0 53 53 -56 2
-0 45 44 -61 0
-0 43 65 -84 4
+0 43 61 -49 0
+0 45 43 -79 2
+0 42 60 -98 1
+0 50 55 -59 1
+0 53 53 -56 1
+0 45 44 -61 1
+0 43 65 -84 2
 0 35 52 -75 1
-0 56 56 -70 0
-1 -61 86 43 0
-1 -67 93 15 0
-1 -59 94 36 0
-1 -50 92 62 0
+0 56 56 -70 2
+1 -61 86 43 2
+1 -67 93 15 1
+1 -59 94 36 2
+1 -50 92 62 1
 1 -78 91 70 1
-1 -35 87 47 0
-1 -56 91 52 0
-1 -61 81 46 0
-1 -83 78 34 0
-1 -50 87 45 0
-1 -67 73 50 1
-1 -50 97 45 0
-1 -61 111 45 0
-2 -109 23 -92 0
-2 -94 20 -96 0
-2 -85 26 -88 0
-2 -90 33 -114 1
-2 -63 9 -106 0
-2 -79 9 -93 1
-2 -99 26 -108 3
-2 -81 19 -110 0
-2 -108 21 -108 0
-2 -92 27 -106 3
+1 -35 87 47 1
+1 -56 91 52 1
+1 -61 81 46 2
+1 -83 78 34 2
+1 -50 87 45 1
+1 -67 73 50 0
+1 -50 97 45 1
+1 -61 111 45 2
+2 -109 23 -92 1
+2 -94 20 -96 1
+2 -85 26 -88 1
+2 -90 33 -114 2
+2 -63 9 -106 1
+2 -79 9 -93 2
+2 -99 26 -108 2
+2 -81 19 -110 1
+2 -108 21 -108 2
+2 -92 27 -106 2
 2 -88 2 -106 1
-2 -88 15 -103 0
+2 -88 15 -103 1
 3 54 -74 4 0
 3 42 -92 31 1
 3 39 -99 -7 1
@@ -42,7 +43,7 @@
 3 39 -96 2 1
 3 31 -109 9 1
 3 33 -96 -8 1
-3 23 -102 4 0
+3 23 -102 4 1
 3 38 -90 21 1
 3 34 -107 1 1
 3 35 -78 18 1
b
diff -r b1eda492f063 -r 118e230e85ce test-data/cluster_result14.txt
--- a/test-data/cluster_result14.txt Thu Aug 11 09:20:25 2022 +0000
+++ b/test-data/cluster_result14.txt Wed Aug 09 13:40:06 2023 +0000
b
@@ -1,48 +1,49 @@
-0 44 64 -76 2
-0 51 48 -73 2
-0 58 65 -49 5
-0 43 61 -49 5
-0 45 43 -79 2
-0 42 60 -98 2
-0 50 55 -59 5
-0 53 53 -56 5
-0 45 44 -61 2
-0 43 65 -84 2
-0 35 52 -75 2
-0 56 56 -70 2
-1 -61 86 43 1
-1 -67 93 15 1
-1 -59 94 36 1
-1 -50 92 62 1
-1 -78 91 70 7
-1 -35 87 47 1
-1 -56 91 52 1
-1 -61 81 46 7
-1 -83 78 34 7
-1 -50 87 45 1
-1 -67 73 50 7
-1 -50 97 45 1
-1 -61 111 45 1
-2 -109 23 -92 6
-2 -94 20 -96 6
-2 -85 26 -88 6
-2 -90 33 -114 6
-2 -63 9 -106 3
-2 -79 9 -93 3
-2 -99 26 -108 6
-2 -81 19 -110 6
-2 -108 21 -108 6
-2 -92 27 -106 6
-2 -88 2 -106 3
-2 -88 15 -103 6
-3 54 -74 4 4
-3 42 -92 31 4
+0 58 56 -67 3
+0 44 64 -76 7
+0 51 48 -73 3
+0 58 65 -49 3
+0 43 61 -49 3
+0 45 43 -79 3
+0 42 60 -98 7
+0 50 55 -59 3
+0 53 53 -56 3
+0 45 44 -61 3
+0 43 65 -84 7
+0 35 52 -75 3
+0 56 56 -70 3
+1 -61 86 43 4
+1 -67 93 15 4
+1 -59 94 36 4
+1 -50 92 62 4
+1 -78 91 70 4
+1 -35 87 47 4
+1 -56 91 52 4
+1 -61 81 46 4
+1 -83 78 34 4
+1 -50 87 45 4
+1 -67 73 50 4
+1 -50 97 45 4
+1 -61 111 45 4
+2 -109 23 -92 5
+2 -94 20 -96 5
+2 -85 26 -88 2
+2 -90 33 -114 5
+2 -63 9 -106 2
+2 -79 9 -93 2
+2 -99 26 -108 5
+2 -81 19 -110 2
+2 -108 21 -108 5
+2 -92 27 -106 5
+2 -88 2 -106 2
+2 -88 15 -103 2
+3 54 -74 4 6
+3 42 -92 31 6
 3 39 -99 -7 0
 3 48 -115 -5 0
-3 39 -96 2 0
+3 39 -96 2 1
 3 31 -109 9 0
 3 33 -96 -8 0
 3 23 -102 4 0
-3 38 -90 21 4
+3 38 -90 21 6
 3 34 -107 1 0
-3 35 -78 18 4
+3 35 -78 18 6
b
diff -r b1eda492f063 -r 118e230e85ce test-data/cluster_result15.txt
--- a/test-data/cluster_result15.txt Thu Aug 11 09:20:25 2022 +0000
+++ b/test-data/cluster_result15.txt Wed Aug 09 13:40:06 2023 +0000
b
@@ -1,48 +1,49 @@
-0 44 64 -76 1
-0 51 48 -73 1
-0 58 65 -49 1
-0 43 61 -49 1
-0 45 43 -79 1
-0 42 60 -98 1
-0 50 55 -59 1
-0 53 53 -56 1
-0 45 44 -61 1
-0 43 65 -84 1
-0 35 52 -75 1
-0 56 56 -70 1
-1 -61 86 43 2
-1 -67 93 15 2
-1 -59 94 36 2
-1 -50 92 62 2
-1 -78 91 70 2
-1 -35 87 47 2
-1 -56 91 52 2
-1 -61 81 46 2
-1 -83 78 34 2
-1 -50 87 45 2
-1 -67 73 50 2
-1 -50 97 45 2
-1 -61 111 45 2
-2 -109 23 -92 3
-2 -94 20 -96 3
-2 -85 26 -88 3
-2 -90 33 -114 3
-2 -63 9 -106 3
-2 -79 9 -93 3
-2 -99 26 -108 3
-2 -81 19 -110 3
-2 -108 21 -108 3
-2 -92 27 -106 3
-2 -88 2 -106 3
-2 -88 15 -103 3
-3 54 -74 4 0
-3 42 -92 31 0
-3 39 -99 -7 0
-3 48 -115 -5 0
-3 39 -96 2 0
-3 31 -109 9 0
-3 33 -96 -8 0
-3 23 -102 4 0
-3 38 -90 21 0
-3 34 -107 1 0
-3 35 -78 18 0
+0 58 56 -67 2
+0 44 64 -76 2
+0 51 48 -73 2
+0 58 65 -49 2
+0 43 61 -49 2
+0 45 43 -79 2
+0 42 60 -98 2
+0 50 55 -59 2
+0 53 53 -56 2
+0 45 44 -61 2
+0 43 65 -84 2
+0 35 52 -75 2
+0 56 56 -70 2
+1 -61 86 43 0
+1 -67 93 15 0
+1 -59 94 36 0
+1 -50 92 62 0
+1 -78 91 70 0
+1 -35 87 47 0
+1 -56 91 52 0
+1 -61 81 46 0
+1 -83 78 34 0
+1 -50 87 45 0
+1 -67 73 50 0
+1 -50 97 45 0
+1 -61 111 45 0
+2 -109 23 -92 1
+2 -94 20 -96 1
+2 -85 26 -88 1
+2 -90 33 -114 1
+2 -63 9 -106 1
+2 -79 9 -93 1
+2 -99 26 -108 1
+2 -81 19 -110 1
+2 -108 21 -108 1
+2 -92 27 -106 1
+2 -88 2 -106 1
+2 -88 15 -103 1
+3 54 -74 4 3
+3 42 -92 31 3
+3 39 -99 -7 3
+3 48 -115 -5 3
+3 39 -96 2 3
+3 31 -109 9 3
+3 33 -96 -8 3
+3 23 -102 4 3
+3 38 -90 21 3
+3 34 -107 1 3
+3 35 -78 18 3
b
diff -r b1eda492f063 -r 118e230e85ce test-data/cluster_result16.txt
--- a/test-data/cluster_result16.txt Thu Aug 11 09:20:25 2022 +0000
+++ b/test-data/cluster_result16.txt Wed Aug 09 13:40:06 2023 +0000
b
@@ -1,3 +1,4 @@
+0 58 56 -67 0
 0 44 64 -76 0
 0 51 48 -73 0
 0 58 65 -49 0
b
diff -r b1eda492f063 -r 118e230e85ce test-data/cluster_result20.txt
--- a/test-data/cluster_result20.txt Thu Aug 11 09:20:25 2022 +0000
+++ b/test-data/cluster_result20.txt Wed Aug 09 13:40:06 2023 +0000
b
@@ -1,4 +1,4 @@
-0
 1
 0
 0
+0
b
diff -r b1eda492f063 -r 118e230e85ce test-data/feature_importances_.tabular
--- a/test-data/feature_importances_.tabular Thu Aug 11 09:20:25 2022 +0000
+++ b/test-data/feature_importances_.tabular Wed Aug 09 13:40:06 2023 +0000
b
@@ -1,11 +1,18 @@
 feature_importances_
-0.15959252
-0.20373514
-0.22071308
-0.06281833
-0.098471984
-0.06960951
-0.13073005
-0.027164686
-0.022071308
-0.0050933785
+0.0
+0.010321120632288475
+0.020625116573685313
+0.016636826378837406
+0.6664893851561091
+0.20159083801089675
+0.02241562132348836
+0.009496348250813283
+0.012938283097946316
+0.018952436107178415
+0.005185454993705707
+0.0034219510481551417
+0.0032506707056426144
+0.002133755718458784
+0.0010583192894241607
+0.003336217194380454
+0.0021476555189897154
b
diff -r b1eda492f063 -r 118e230e85ce test-data/feature_selection_result01
--- a/test-data/feature_selection_result01 Thu Aug 11 09:20:25 2022 +0000
+++ b/test-data/feature_selection_result01 Wed Aug 09 13:40:06 2023 +0000
b
@@ -1,11 +1,11 @@
 0 1
-143.762620712 -1.1796457192799998
--88.5787166225 -2.5710918402200003
--82.8452345578 -0.168636324107
-72.4951388149 0.991068834926
-11.805182128 -0.7096855607860001
--63.9354970901 0.9841122108220001
-126.32584079600001 0.35353444883900004
-23.0341392692 1.03188231893
-67.6714937696 -0.8214378651719999
-47.39275848810001 -0.0942409319417
+143.762620712 -0.330941870584
+-88.5787166225 1.08055532812
+-82.8452345578 0.272541389247
+72.4951388149 -0.268686605278
+11.805182128 1.03604670966
+-63.9354970901 -0.101485840571
+126.325840796 -0.359998340179
+23.0341392692 0.518540465136
+67.6714937696 -0.115688051547
+47.3927584881 -0.785096541368
b
diff -r b1eda492f063 -r 118e230e85ce test-data/feature_selection_result08
--- a/test-data/feature_selection_result08 Thu Aug 11 09:20:25 2022 +0000
+++ b/test-data/feature_selection_result08 Wed Aug 09 13:40:06 2023 +0000
b
@@ -2,10 +2,10 @@
 143.762620712 -0.330941870584
 -88.5787166225 1.08055532812
 -82.8452345578 0.272541389247
-72.4951388149 -0.26868660527800003
-11.805182128 1.0360467096600001
+72.4951388149 -0.268686605278
+11.805182128 1.03604670966
 -63.9354970901 -0.101485840571
-126.32584079600001 -0.35999834017899995
-23.0341392692 0.5185404651359999
+126.325840796 -0.359998340179
+23.0341392692 0.518540465136
 67.6714937696 -0.115688051547
-47.39275848810001 -0.7850965413680001
+47.3927584881 -0.785096541368
b
diff -r b1eda492f063 -r 118e230e85ce test-data/feature_selection_result09
--- a/test-data/feature_selection_result09 Thu Aug 11 09:20:25 2022 +0000
+++ b/test-data/feature_selection_result09 Wed Aug 09 13:40:06 2023 +0000
b
@@ -1,11 +1,11 @@
-0
-143.762620712
--88.5787166225
--82.8452345578
-72.4951388149
-11.805182128
--63.9354970901
-126.32584079600001
-23.0341392692
-67.6714937696
-47.39275848810001
+0 1
+143.762620712 -0.330941870584
+-88.5787166225 1.08055532812
+-82.8452345578 0.272541389247
+72.4951388149 -0.268686605278
+11.805182128 1.03604670966
+-63.9354970901 -0.101485840571
+126.325840796 -0.359998340179
+23.0341392692 0.518540465136
+67.6714937696 -0.115688051547
+47.3927584881 -0.785096541368
b
diff -r b1eda492f063 -r 118e230e85ce test-data/feature_selection_result12
--- a/test-data/feature_selection_result12 Thu Aug 11 09:20:25 2022 +0000
+++ b/test-data/feature_selection_result12 Wed Aug 09 13:40:06 2023 +0000
b
@@ -1,11 +1,262 @@
-0 1
-143.762620712 -0.330941870584
--88.5787166225 1.08055532812
--82.8452345578 0.272541389247
-72.4951388149 -0.26868660527800003
-11.805182128 1.0360467096600001
--63.9354970901 -0.101485840571
-126.32584079600001 -0.35999834017899995
-23.0341392692 0.5185404651359999
-67.6714937696 -0.115688051547
-47.39275848810001 -0.7850965413680001
+temp_1 average
+69.0 69.7
+59.0 58.1
+88.0 77.3
+65.0 64.7
+50.0 47.5
+51.0 48.2
+52.0 48.6
+78.0 76.7
+35.0 45.2
+40.0 46.1
+47.0 45.3
+72.0 76.3
+76.0 74.4
+39.0 45.3
+78.0 72.2
+71.0 67.3
+48.0 47.7
+72.0 77.0
+57.0 54.7
+40.0 45.1
+54.0 47.6
+58.0 53.2
+68.0 58.6
+65.0 55.3
+47.0 48.8
+44.0 45.6
+64.0 67.1
+62.0 57.1
+66.0 65.7
+70.0 71.8
+57.0 54.2
+50.0 50.5
+55.0 51.8
+55.0 49.5
+42.0 45.2
+65.0 60.1
+63.0 65.6
+48.0 47.3
+42.0 46.3
+51.0 46.2
+64.0 68.0
+75.0 74.6
+52.0 46.7
+67.0 68.6
+68.0 68.7
+54.0 55.0
+62.0 56.8
+76.0 76.1
+73.0 73.1
+52.0 50.3
+70.0 73.9
+77.0 77.4
+60.0 56.6
+52.0 53.3
+79.0 75.0
+76.0 57.2
+66.0 66.5
+57.0 61.8
+66.0 57.4
+61.0 58.4
+55.0 53.1
+48.0 48.1
+49.0 49.2
+65.0 66.7
+60.0 62.5
+56.0 53.0
+59.0 57.4
+44.0 45.7
+82.0 63.2
+64.0 67.0
+43.0 45.5
+64.0 55.7
+63.0 52.7
+70.0 70.6
+71.0 52.4
+76.0 73.5
+68.0 62.1
+39.0 45.3
+71.0 70.7
+69.0 71.7
+74.0 71.5
+81.0 64.1
+51.0 49.3
+45.0 46.8
+87.0 76.8
+71.0 73.8
+55.0 60.3
+80.0 76.9
+67.0 69.0
+61.0 61.4
+46.0 46.6
+39.0 45.1
+67.0 68.3
+52.0 47.8
+67.0 69.8
+75.0 71.2
+68.0 73.3
+92.0 68.2
+67.0 72.8
+44.0 45.8
+61.0 61.0
+65.0 53.4
+68.0 73.0
+87.0 62.1
+117.0 54.8
+80.0 76.4
+57.0 51.0
+67.0 63.6
+58.0 54.0
+65.0 56.2
+52.0 48.6
+59.0 55.3
+57.0 53.9
+81.0 59.2
+75.0 77.1
+76.0 77.4
+57.0 64.8
+69.0 74.2
+77.0 66.8
+55.0 49.9
+49.0 46.8
+54.0 52.7
+55.0 51.2
+56.0 55.6
+68.0 74.6
+54.0 53.4
+67.0 69.0
+49.0 46.9
+49.0 49.1
+56.0 48.5
+73.0 71.0
+66.0 66.4
+69.0 66.5
+82.0 64.5
+90.0 76.7
+51.0 50.7
+77.0 57.1
+60.0 61.4
+74.0 72.8
+85.0 77.2
+68.0 62.8
+56.0 49.5
+71.0 56.2
+62.0 59.5
+83.0 77.3
+64.0 65.4
+56.0 48.4
+41.0 45.1
+65.0 66.2
+65.0 53.7
+40.0 46.0
+45.0 45.6
+52.0 48.4
+63.0 51.7
+52.0 47.6
+60.0 57.9
+81.0 75.7
+75.0 75.8
+59.0 51.4
+73.0 77.1
+75.0 77.3
+60.0 58.5
+75.0 71.3
+59.0 57.6
+53.0 49.1
+79.0 77.2
+57.0 52.1
+75.0 67.6
+71.0 69.4
+53.0 50.2
+46.0 48.8
+81.0 76.9
+49.0 48.9
+57.0 48.4
+60.0 58.8
+67.0 73.7
+61.0 64.1
+66.0 69.5
+64.0 51.9
+66.0 65.7
+64.0 52.2
+71.0 65.2
+75.0 63.8
+48.0 46.4
+53.0 52.5
+49.0 47.1
+85.0 68.5
+62.0 49.4
+50.0 47.0
+58.0 55.9
+72.0 77.2
+55.0 50.7
+74.0 72.3
+85.0 77.3
+73.0 77.3
+52.0 47.4
+67.0 67.6
+45.0 45.1
+46.0 47.2
+66.0 60.6
+71.0 77.0
+70.0 69.3
+58.0 49.9
+72.0 77.1
+74.0 75.4
+65.0 64.5
+77.0 58.8
+59.0 50.9
+45.0 45.7
+53.0 50.5
+53.0 54.9
+79.0 77.3
+49.0 49.0
+63.0 62.9
+69.0 56.5
+60.0 50.8
+64.0 62.5
+79.0 71.0
+55.0 47.0
+73.0 56.0
+60.0 59.1
+67.0 70.2
+42.0 45.2
+60.0 65.0
+57.0 49.8
+35.0 45.2
+75.0 70.3
+61.0 51.1
+51.0 50.6
+71.0 71.9
+74.0 75.3
+48.0 45.4
+74.0 74.9
+76.0 70.8
+58.0 51.6
+51.0 50.4
+72.0 72.6
+76.0 67.2
+52.0 47.9
+53.0 48.2
+65.0 69.1
+58.0 58.1
+77.0 75.6
+61.0 52.9
+67.0 65.3
+54.0 49.3
+79.0 67.4
+77.0 64.3
+71.0 67.7
+58.0 57.7
+68.0 55.9
+40.0 45.4
+80.0 77.3
+74.0 62.3
+57.0 45.5
+52.0 47.8
+71.0 75.1
+49.0 53.6
+89.0 59.0
+60.0 60.2
+59.0 58.3
b
diff -r b1eda492f063 -r 118e230e85ce test-data/final_estimator.h5mlm
b
Binary file test-data/final_estimator.h5mlm has changed
b
diff -r b1eda492f063 -r 118e230e85ce test-data/fitted_model_eval01.tabular
--- a/test-data/fitted_model_eval01.tabular Thu Aug 11 09:20:25 2022 +0000
+++ b/test-data/fitted_model_eval01.tabular Wed Aug 09 13:40:06 2023 +0000
b
@@ -1,2 +1,2 @@
-score
-0.8277511130733235
+r2
+0.9740021394589831
b
diff -r b1eda492f063 -r 118e230e85ce test-data/gbc_model01
b
Binary file test-data/gbc_model01 has changed
b
diff -r b1eda492f063 -r 118e230e85ce test-data/gbc_result01
--- a/test-data/gbc_result01 Thu Aug 11 09:20:25 2022 +0000
+++ b/test-data/gbc_result01 Wed Aug 09 13:40:06 2023 +0000
b
@@ -1,6 +1,6 @@
 0 1 2 3 predicted
-3.68258022948 2.82110345641 -3.9901407239999998 -1.9523364774 1
-0.015942057224 -0.7119585943469999 0.125502976978 -0.972218263337 0
-2.0869076882499997 0.929399321468 -2.1292408448400004 -1.9971402218799998 1
-1.4132105208399999 0.523750660422 -1.4210539291 -1.49298569451 1
-0.7683140439399999 1.38267855169 -0.989045048734 0.649504257894 1
+3.68258022948 2.82110345641 -3.990140724 -1.9523364774 1
+0.015942057224 -0.711958594347 0.125502976978 -0.972218263337 0
+2.08690768825 0.929399321468 -2.12924084484 -1.99714022188 1
+1.41321052084 0.523750660422 -1.4210539291 -1.49298569451 1
+0.76831404394 1.38267855169 -0.989045048734 0.649504257894 1
b
diff -r b1eda492f063 -r 118e230e85ce test-data/gbr_model01
b
Binary file test-data/gbr_model01 has changed
b
diff -r b1eda492f063 -r 118e230e85ce test-data/get_params.tabular
--- a/test-data/get_params.tabular Thu Aug 11 09:20:25 2022 +0000
+++ b/test-data/get_params.tabular Wed Aug 09 13:40:06 2023 +0000
b
@@ -1,6 +1,7 @@
  Parameter Value
 @ copy_X copy_X: True
 @ fit_intercept fit_intercept: True
-* n_jobs n_jobs: 1
-@ normalize normalize: False
+* n_jobs n_jobs: None
+@ normalize normalize: 'deprecated'
+@ positive positive: False
  Note: @, params eligible for search in searchcv tool.
b
diff -r b1eda492f063 -r 118e230e85ce test-data/glm_model01
b
Binary file test-data/glm_model01 has changed
b
diff -r b1eda492f063 -r 118e230e85ce test-data/glm_model02
b
Binary file test-data/glm_model02 has changed
b
diff -r b1eda492f063 -r 118e230e85ce test-data/glm_model03
b
Binary file test-data/glm_model03 has changed
b
diff -r b1eda492f063 -r 118e230e85ce test-data/glm_model04
b
Binary file test-data/glm_model04 has changed
b
diff -r b1eda492f063 -r 118e230e85ce test-data/glm_model05
b
Binary file test-data/glm_model05 has changed
b
diff -r b1eda492f063 -r 118e230e85ce test-data/glm_model06
b
Binary file test-data/glm_model06 has changed
b
diff -r b1eda492f063 -r 118e230e85ce test-data/glm_model07
b
Binary file test-data/glm_model07 has changed
b
diff -r b1eda492f063 -r 118e230e85ce test-data/glm_model08
b
Binary file test-data/glm_model08 has changed
b
diff -r b1eda492f063 -r 118e230e85ce test-data/glm_result01
--- a/test-data/glm_result01 Thu Aug 11 09:20:25 2022 +0000
+++ b/test-data/glm_result01 Wed Aug 09 13:40:06 2023 +0000
b
@@ -1,5 +1,5 @@
-86.97021227350001 1.00532111569 -1.01739601979 -0.613139481654 0.641846874331 20479602419382.055
-91.2021798817 -0.6215229712070001 1.11914889596 0.390012184498 1.28956938152 21460309408632.004
--47.4101632272 -0.638416457964 -0.7327774684530001 -0.8640261049779999 -1.06109770116 -11245419999724.842
-61.712804630200004 -1.0999480057700002 -0.739679672932 0.585657963012 1.4890682753600002 14574106078789.26
--206.998295124 0.130238853011 0.70574123041 1.3320656526399999 -1.3322092373799999 -48782519807586.32
+86.9702122735 1.00532111569 -1.01739601979 -0.613139481654 0.641846874331 20479602419372.582
+91.2021798817 -0.621522971207 1.11914889596 0.390012184498 1.28956938152 21460309408622.086
+-47.4101632272 -0.638416457964 -0.732777468453 -0.864026104978 -1.06109770116 -11245419999719.686
+61.7128046302 -1.09994800577 -0.739679672932 0.585657963012 1.48906827536 14574106078782.537
+-206.998295124 0.130238853011 0.70574123041 1.33206565264 -1.33220923738 -48782519807563.79
b
diff -r b1eda492f063 -r 118e230e85ce test-data/glm_result02
--- a/test-data/glm_result02 Thu Aug 11 09:20:25 2022 +0000
+++ b/test-data/glm_result02 Wed Aug 09 13:40:06 2023 +0000
b
@@ -1,5 +1,5 @@
-3.68258022948 2.82110345641 -3.9901407239999998 -1.9523364774 1
-0.015942057224 -0.7119585943469999 0.125502976978 -0.972218263337 0
-2.0869076882499997 0.929399321468 -2.1292408448400004 -1.9971402218799998 1
-1.4132105208399999 0.523750660422 -1.4210539291 -1.49298569451 1
-0.7683140439399999 1.38267855169 -0.989045048734 0.649504257894 1
+3.68258022948 2.82110345641 -3.990140724 -1.9523364774 1
+0.015942057224 -0.711958594347 0.125502976978 -0.972218263337 0
+2.08690768825 0.929399321468 -2.12924084484 -1.99714022188 1
+1.41321052084 0.523750660422 -1.4210539291 -1.49298569451 1
+0.76831404394 1.38267855169 -0.989045048734 0.649504257894 1
b
diff -r b1eda492f063 -r 118e230e85ce test-data/glm_result03
--- a/test-data/glm_result03 Thu Aug 11 09:20:25 2022 +0000
+++ b/test-data/glm_result03 Wed Aug 09 13:40:06 2023 +0000
b
@@ -1,5 +1,5 @@
-3.68258022948 2.82110345641 -3.9901407239999998 -1.9523364774 1
-0.015942057224 -0.7119585943469999 0.125502976978 -0.972218263337 0
-2.0869076882499997 0.929399321468 -2.1292408448400004 -1.9971402218799998 0
-1.4132105208399999 0.523750660422 -1.4210539291 -1.49298569451 0
-0.7683140439399999 1.38267855169 -0.989045048734 0.649504257894 1
+3.68258022948 2.82110345641 -3.990140724 -1.9523364774 1
+0.015942057224 -0.711958594347 0.125502976978 -0.972218263337 0
+2.08690768825 0.929399321468 -2.12924084484 -1.99714022188 0
+1.41321052084 0.523750660422 -1.4210539291 -1.49298569451 0
+0.76831404394 1.38267855169 -0.989045048734 0.649504257894 1
b
diff -r b1eda492f063 -r 118e230e85ce test-data/glm_result04
--- a/test-data/glm_result04 Thu Aug 11 09:20:25 2022 +0000
+++ b/test-data/glm_result04 Wed Aug 09 13:40:06 2023 +0000
b
@@ -1,5 +1,5 @@
-86.97021227350001 1.00532111569 -1.01739601979 -0.613139481654 0.641846874331 0.5282637592226301
-91.2021798817 -0.6215229712070001 1.11914889596 0.390012184498 1.28956938152 0.5180352211818147
--47.4101632272 -0.638416457964 -0.7327774684530001 -0.8640261049779999 -1.06109770116 0.012682414140451959
-61.712804630200004 -1.0999480057700002 -0.739679672932 0.585657963012 1.4890682753600002 0.1869842234155321
--206.998295124 0.130238853011 0.70574123041 1.3320656526399999 -1.3322092373799999 -1.6599360904302456
+86.9702122735 1.00532111569 -1.01739601979 -0.613139481654 0.641846874331 0.5282637592226304
+91.2021798817 -0.621522971207 1.11914889596 0.390012184498 1.28956938152 0.5180352211818147
+-47.4101632272 -0.638416457964 -0.732777468453 -0.864026104978 -1.06109770116 0.012682414140452014
+61.7128046302 -1.09994800577 -0.739679672932 0.585657963012 1.48906827536 0.18698422341553234
+-206.998295124 0.130238853011 0.70574123041 1.33206565264 -1.33220923738 -1.659936090430246
b
diff -r b1eda492f063 -r 118e230e85ce test-data/glm_result05
--- a/test-data/glm_result05 Thu Aug 11 09:20:25 2022 +0000
+++ b/test-data/glm_result05 Wed Aug 09 13:40:06 2023 +0000
b
@@ -1,5 +1,5 @@
-3.68258022948 2.82110345641 -3.9901407239999998 -1.9523364774 1
-0.015942057224 -0.7119585943469999 0.125502976978 -0.972218263337 0
-2.0869076882499997 0.929399321468 -2.1292408448400004 -1.9971402218799998 1
-1.4132105208399999 0.523750660422 -1.4210539291 -1.49298569451 1
-0.7683140439399999 1.38267855169 -0.989045048734 0.649504257894 1
+3.68258022948 2.82110345641 -3.990140724 -1.9523364774 1
+0.015942057224 -0.711958594347 0.125502976978 -0.972218263337 0
+2.08690768825 0.929399321468 -2.12924084484 -1.99714022188 1
+1.41321052084 0.523750660422 -1.4210539291 -1.49298569451 1
+0.76831404394 1.38267855169 -0.989045048734 0.649504257894 1
b
diff -r b1eda492f063 -r 118e230e85ce test-data/glm_result06
--- a/test-data/glm_result06 Thu Aug 11 09:20:25 2022 +0000
+++ b/test-data/glm_result06 Wed Aug 09 13:40:06 2023 +0000
b
@@ -1,5 +1,5 @@
-3.68258022948 2.82110345641 -3.9901407239999998 -1.9523364774 1
-0.015942057224 -0.7119585943469999 0.125502976978 -0.972218263337 0
-2.0869076882499997 0.929399321468 -2.1292408448400004 -1.9971402218799998 1
-1.4132105208399999 0.523750660422 -1.4210539291 -1.49298569451 1
-0.7683140439399999 1.38267855169 -0.989045048734 0.649504257894 1
+3.68258022948 2.82110345641 -3.990140724 -1.9523364774 0
+0.015942057224 -0.711958594347 0.125502976978 -0.972218263337 0
+2.08690768825 0.929399321468 -2.12924084484 -1.99714022188 0
+1.41321052084 0.523750660422 -1.4210539291 -1.49298569451 0
+0.76831404394 1.38267855169 -0.989045048734 0.649504257894 1
b
diff -r b1eda492f063 -r 118e230e85ce test-data/glm_result07
--- a/test-data/glm_result07 Thu Aug 11 09:20:25 2022 +0000
+++ b/test-data/glm_result07 Wed Aug 09 13:40:06 2023 +0000
b
@@ -1,5 +1,5 @@
-86.97021227350001 1.00532111569 -1.01739601979 -0.613139481654 0.641846874331 0.6093152833692663
-91.2021798817 -0.6215229712070001 1.11914889596 0.390012184498 1.28956938152 0.5963828164943974
--47.4101632272 -0.638416457964 -0.7327774684530001 -0.8640261049779999 -1.06109770116 -0.07927429227257948
-61.712804630200004 -1.0999480057700002 -0.739679672932 0.585657963012 1.4890682753600002 0.2621440442022235
--206.998295124 0.130238853011 0.70574123041 1.3320656526399999 -1.3322092373799999 -1.7330414645145749
+86.9702122735 1.00532111569 -1.01739601979 -0.613139481654 0.641846874331 0.6093152833692668
+91.2021798817 -0.621522971207 1.11914889596 0.390012184498 1.28956938152 0.5963828164943976
+-47.4101632272 -0.638416457964 -0.732777468453 -0.864026104978 -1.06109770116 -0.07927429227258004
+61.7128046302 -1.09994800577 -0.739679672932 0.585657963012 1.48906827536 0.26214404420222365
+-206.998295124 0.130238853011 0.70574123041 1.33206565264 -1.33220923738 -1.7330414645145753
b
diff -r b1eda492f063 -r 118e230e85ce test-data/glm_result08
--- a/test-data/glm_result08 Thu Aug 11 09:20:25 2022 +0000
+++ b/test-data/glm_result08 Wed Aug 09 13:40:06 2023 +0000
b
@@ -1,5 +1,5 @@
-3.68258022948 2.82110345641 -3.9901407239999998 -1.9523364774 1
-0.015942057224 -0.7119585943469999 0.125502976978 -0.972218263337 0
-2.0869076882499997 0.929399321468 -2.1292408448400004 -1.9971402218799998 0
-1.4132105208399999 0.523750660422 -1.4210539291 -1.49298569451 0
-0.7683140439399999 1.38267855169 -0.989045048734 0.649504257894 1
+3.68258022948 2.82110345641 -3.990140724 -1.9523364774 1
+0.015942057224 -0.711958594347 0.125502976978 -0.972218263337 0
+2.08690768825 0.929399321468 -2.12924084484 -1.99714022188 0
+1.41321052084 0.523750660422 -1.4210539291 -1.49298569451 0
+0.76831404394 1.38267855169 -0.989045048734 0.649504257894 1
b
diff -r b1eda492f063 -r 118e230e85ce test-data/jaccard_similarity_score.txt
--- a/test-data/jaccard_similarity_score.txt Thu Aug 11 09:20:25 2022 +0000
+++ b/test-data/jaccard_similarity_score.txt Wed Aug 09 13:40:06 2023 +0000
b
@@ -1,2 +1,2 @@
-jaccard_similarity_score : 
-0.8461538461538461
+jaccard_score : 
+0.7538461538461538
b
diff -r b1eda492f063 -r 118e230e85ce test-data/keras01.json
--- a/test-data/keras01.json Thu Aug 11 09:20:25 2022 +0000
+++ b/test-data/keras01.json Wed Aug 09 13:40:06 2023 +0000
[
@@ -1,12 +1,25 @@
 {
   "class_name": "Sequential",
   "config": {
-    "name": "sequential_1",
+    "name": "sequential",
     "layers": [
       {
+        "class_name": "InputLayer",
+        "config": {
+          "batch_input_shape": [
+            null,
+            784
+          ],
+          "dtype": "float32",
+          "sparse": false,
+          "ragged": false,
+          "name": "dense_input"
+        }
+      },
+      {
         "class_name": "Dense",
         "config": {
-          "name": "dense_1",
+          "name": "dense",
           "trainable": true,
           "batch_input_shape": [
             null,
@@ -17,11 +30,43 @@
           "activation": "linear",
           "use_bias": true,
           "kernel_initializer": {
-            "class_name": "VarianceScaling",
+            "class_name": "GlorotUniform",
             "config": {
-              "scale": 1.0,
-              "mode": "fan_avg",
-              "distribution": "uniform",
+              "seed": null
+            }
+          },
+          "bias_initializer": {
+            "class_name": "Zeros",
+            "config": {}
+          },
+          "kernel_regularizer": null,
+          "bias_regularizer": null,
+          "activity_regularizer": null,
+          "kernel_constraint": null,
+          "bias_constraint": null
+        }
+      },
+      {
+        "class_name": "Activation",
+        "config": {
+          "name": "activation",
+          "trainable": true,
+          "dtype": "float32",
+          "activation": "relu"
+        }
+      },
+      {
+        "class_name": "Dense",
+        "config": {
+          "name": "dense_1",
+          "trainable": true,
+          "dtype": "float32",
+          "units": 10,
+          "activation": "linear",
+          "use_bias": true,
+          "kernel_initializer": {
+            "class_name": "GlorotUniform",
+            "config": {
               "seed": null
             }
           },
@@ -42,49 +87,11 @@
           "name": "activation_1",
           "trainable": true,
           "dtype": "float32",
-          "activation": "relu"
-        }
-      },
-      {
-        "class_name": "Dense",
-        "config": {
-          "name": "dense_2",
-          "trainable": true,
-          "dtype": "float32",
-          "units": 10,
-          "activation": "linear",
-          "use_bias": true,
-          "kernel_initializer": {
-            "class_name": "VarianceScaling",
-            "config": {
-              "scale": 1.0,
-              "mode": "fan_avg",
-              "distribution": "uniform",
-              "seed": null
-            }
-          },
-          "bias_initializer": {
-            "class_name": "Zeros",
-            "config": {}
-          },
-          "kernel_regularizer": null,
-          "bias_regularizer": null,
-          "activity_regularizer": null,
-          "kernel_constraint": null,
-          "bias_constraint": null
-        }
-      },
-      {
-        "class_name": "Activation",
-        "config": {
-          "name": "activation_2",
-          "trainable": true,
-          "dtype": "float32",
           "activation": "softmax"
         }
       }
     ]
   },
-  "keras_version": "2.3.1",
+  "keras_version": "2.10.0",
   "backend": "tensorflow"
 }
\ No newline at end of file
b
diff -r b1eda492f063 -r 118e230e85ce test-data/keras02.json
--- a/test-data/keras02.json Thu Aug 11 09:20:25 2022 +0000
+++ b/test-data/keras02.json Wed Aug 09 13:40:06 2023 +0000
[
b'@@ -1,10 +1,9 @@\n {\n-  "class_name": "Model",\n+  "class_name": "Functional",\n   "config": {\n-    "name": "model_1",\n+    "name": "model",\n     "layers": [\n       {\n-        "name": "main_input",\n         "class_name": "InputLayer",\n         "config": {\n           "batch_input_shape": [\n@@ -13,15 +12,16 @@\n           ],\n           "dtype": "int32",\n           "sparse": false,\n+          "ragged": false,\n           "name": "main_input"\n         },\n+        "name": "main_input",\n         "inbound_nodes": []\n       },\n       {\n-        "name": "embedding_1",\n         "class_name": "Embedding",\n         "config": {\n-          "name": "embedding_1",\n+          "name": "embedding",\n           "trainable": true,\n           "batch_input_shape": [\n             null,\n@@ -44,6 +44,7 @@\n           "mask_zero": false,\n           "input_length": 100\n         },\n+        "name": "embedding",\n         "inbound_nodes": [\n           [\n             [\n@@ -56,10 +57,9 @@\n         ]\n       },\n       {\n-        "name": "lstm_1",\n         "class_name": "LSTM",\n         "config": {\n-          "name": "lstm_1",\n+          "name": "lstm",\n           "trainable": true,\n           "dtype": "float32",\n           "return_sequences": false,\n@@ -67,29 +67,30 @@\n           "go_backwards": false,\n           "stateful": false,\n           "unroll": false,\n+          "time_major": false,\n           "units": 32,\n           "activation": "tanh",\n           "recurrent_activation": "sigmoid",\n           "use_bias": true,\n           "kernel_initializer": {\n-            "class_name": "VarianceScaling",\n+            "class_name": "GlorotUniform",\n             "config": {\n-              "scale": 1.0,\n-              "mode": "fan_avg",\n-              "distribution": "uniform",\n               "seed": null\n-            }\n+            },\n+            "shared_object_id": 3\n           },\n           "recurrent_initializer": {\n             "class_name": "Orthogonal",\n             "config": {\n               "gain": 1.0,\n               "seed": null\n-            }\n+            },\n+            "shared_object_id": 4\n           },\n           "bias_initializer": {\n             "class_name": "Zeros",\n-            "config": {}\n+            "config": {},\n+            "shared_object_id": 5\n           },\n           "unit_forget_bias": true,\n           "kernel_regularizer": null,\n@@ -103,10 +104,11 @@\n           "recurrent_dropout": 0.0,\n           "implementation": 2\n         },\n+        "name": "lstm",\n         "inbound_nodes": [\n           [\n             [\n-              "embedding_1",\n+              "embedding",\n               0,\n               0,\n               {}\n@@ -115,21 +117,17 @@\n         ]\n       },\n       {\n-        "name": "dense_1",\n         "class_name": "Dense",\n         "config": {\n-          "name": "dense_1",\n+          "name": "dense",\n           "trainable": true,\n           "dtype": "float32",\n           "units": 1,\n           "activation": "sigmoid",\n           "use_bias": true,\n           "kernel_initializer": {\n-            "class_name": "VarianceScaling",\n+            "class_name": "GlorotUniform",\n             "config": {\n-              "scale": 1.0,\n-              "mode": "fan_avg",\n-              "distribution": "uniform",\n               "seed": null\n             }\n           },\n@@ -143,10 +141,11 @@\n           "kernel_constraint": null,\n           "bias_constraint": null\n         },\n+        "name": "dense",\n         "inbound_nodes": [\n           [\n             [\n-              "lstm_1",\n+              "lstm",\n               0,\n               0,\n               {}\n@@ -155,7 +154,6 @@\n         ]\n       },\n       {\n-        "name": "aux_input",\n         "class_name": "InputLayer",\n         "config": {\n           "batch_input_shape": [\n@@ -164,23 +162,25 @@\n           ],\n           "dtype": "float32",\n           "sparse": false,\n+          "ragged": false,\n           "name": "aux_input"\n         },\n+        "name": "aux_input",\n         "inbound_nodes": []\n       },\n'..b'ense_2",\n+          "name": "dense_1",\n           "trainable": true,\n           "dtype": "float32",\n           "units": 64,\n           "activation": "relu",\n           "use_bias": true,\n           "kernel_initializer": {\n-            "class_name": "VarianceScaling",\n+            "class_name": "GlorotUniform",\n             "config": {\n-              "scale": 1.0,\n-              "mode": "fan_avg",\n-              "distribution": "uniform",\n               "seed": null\n             }\n           },\n@@ -223,10 +219,11 @@\n           "kernel_constraint": null,\n           "bias_constraint": null\n         },\n+        "name": "dense_1",\n         "inbound_nodes": [\n           [\n             [\n-              "concatenate_1",\n+              "concatenate",\n               0,\n               0,\n               {}\n@@ -235,21 +232,17 @@\n         ]\n       },\n       {\n-        "name": "dense_3",\n         "class_name": "Dense",\n         "config": {\n-          "name": "dense_3",\n+          "name": "dense_2",\n           "trainable": true,\n           "dtype": "float32",\n           "units": 64,\n           "activation": "relu",\n           "use_bias": true,\n           "kernel_initializer": {\n-            "class_name": "VarianceScaling",\n+            "class_name": "GlorotUniform",\n             "config": {\n-              "scale": 1.0,\n-              "mode": "fan_avg",\n-              "distribution": "uniform",\n               "seed": null\n             }\n           },\n@@ -263,10 +256,11 @@\n           "kernel_constraint": null,\n           "bias_constraint": null\n         },\n+        "name": "dense_2",\n         "inbound_nodes": [\n           [\n             [\n-              "dense_2",\n+              "dense_1",\n               0,\n               0,\n               {}\n@@ -275,21 +269,17 @@\n         ]\n       },\n       {\n-        "name": "dense_4",\n         "class_name": "Dense",\n         "config": {\n-          "name": "dense_4",\n+          "name": "dense_3",\n           "trainable": true,\n           "dtype": "float32",\n           "units": 64,\n           "activation": "relu",\n           "use_bias": true,\n           "kernel_initializer": {\n-            "class_name": "VarianceScaling",\n+            "class_name": "GlorotUniform",\n             "config": {\n-              "scale": 1.0,\n-              "mode": "fan_avg",\n-              "distribution": "uniform",\n               "seed": null\n             }\n           },\n@@ -303,10 +293,11 @@\n           "kernel_constraint": null,\n           "bias_constraint": null\n         },\n+        "name": "dense_3",\n         "inbound_nodes": [\n           [\n             [\n-              "dense_3",\n+              "dense_2",\n               0,\n               0,\n               {}\n@@ -315,21 +306,17 @@\n         ]\n       },\n       {\n-        "name": "dense_5",\n         "class_name": "Dense",\n         "config": {\n-          "name": "dense_5",\n+          "name": "dense_4",\n           "trainable": true,\n           "dtype": "float32",\n           "units": 1,\n           "activation": "sigmoid",\n           "use_bias": true,\n           "kernel_initializer": {\n-            "class_name": "VarianceScaling",\n+            "class_name": "GlorotUniform",\n             "config": {\n-              "scale": 1.0,\n-              "mode": "fan_avg",\n-              "distribution": "uniform",\n               "seed": null\n             }\n           },\n@@ -343,10 +330,11 @@\n           "kernel_constraint": null,\n           "bias_constraint": null\n         },\n+        "name": "dense_4",\n         "inbound_nodes": [\n           [\n             [\n-              "dense_4",\n+              "dense_3",\n               0,\n               0,\n               {}\n@@ -369,17 +357,17 @@\n     ],\n     "output_layers": [\n       [\n-        "dense_1",\n+        "dense",\n         0,\n         0\n       ],\n       [\n-        "dense_5",\n+        "dense_4",\n         0,\n         0\n       ]\n     ]\n   },\n-  "keras_version": "2.3.1",\n+  "keras_version": "2.10.0",\n   "backend": "tensorflow"\n }\n\\ No newline at end of file\n'
b
diff -r b1eda492f063 -r 118e230e85ce test-data/keras03.json
--- a/test-data/keras03.json Thu Aug 11 09:20:25 2022 +0000
+++ b/test-data/keras03.json Wed Aug 09 13:40:06 2023 +0000
[
@@ -1,1 +1,90 @@
-{"class_name": "Sequential", "config": {"name": "sequential_1", "layers": [{"class_name": "Dense", "config": {"name": "dense_1", "trainable": true, "batch_input_shape": [null, 17], "dtype": "float32", "units": 100, "activation": "linear", "use_bias": true, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"scale": 1.0, "mode": "fan_avg", "distribution": "uniform", "seed": 0}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}}, {"class_name": "Dropout", "config": {"name": "dropout_1", "trainable": true, "rate": 0.1, "noise_shape": null, "seed": 0}}, {"class_name": "Dense", "config": {"name": "dense_2", "trainable": true, "units": 1, "activation": "linear", "use_bias": true, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"scale": 1.0, "mode": "fan_avg", "distribution": "uniform", "seed": 0}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}}]}, "keras_version": "2.2.4", "backend": "tensorflow"}
\ No newline at end of file
+{
+  "class_name": "Sequential",
+  "config": {
+    "name": "sequential",
+    "layers": [
+      {
+        "class_name": "InputLayer",
+        "config": {
+          "batch_input_shape": [
+            null,
+            17
+          ],
+          "dtype": "float32",
+          "sparse": false,
+          "ragged": false,
+          "name": "dense_input"
+        }
+      },
+      {
+        "class_name": "Dense",
+        "config": {
+          "name": "dense",
+          "trainable": true,
+          "batch_input_shape": [
+            null,
+            17
+          ],
+          "dtype": "float32",
+          "units": 100,
+          "activation": "linear",
+          "use_bias": true,
+          "kernel_initializer": {
+            "class_name": "GlorotUniform",
+            "config": {
+              "seed": null
+            }
+          },
+          "bias_initializer": {
+            "class_name": "Zeros",
+            "config": {}
+          },
+          "kernel_regularizer": null,
+          "bias_regularizer": null,
+          "activity_regularizer": null,
+          "kernel_constraint": null,
+          "bias_constraint": null
+        }
+      },
+      {
+        "class_name": "Dropout",
+        "config": {
+          "name": "dropout",
+          "trainable": true,
+          "dtype": "float32",
+          "rate": 0.1,
+          "noise_shape": null,
+          "seed": null
+        }
+      },
+      {
+        "class_name": "Dense",
+        "config": {
+          "name": "dense_1",
+          "trainable": true,
+          "dtype": "float32",
+          "units": 1,
+          "activation": "linear",
+          "use_bias": true,
+          "kernel_initializer": {
+            "class_name": "GlorotUniform",
+            "config": {
+              "seed": null
+            }
+          },
+          "bias_initializer": {
+            "class_name": "Zeros",
+            "config": {}
+          },
+          "kernel_regularizer": null,
+          "bias_regularizer": null,
+          "activity_regularizer": null,
+          "kernel_constraint": null,
+          "bias_constraint": null
+        }
+      }
+    ]
+  },
+  "keras_version": "2.10.0",
+  "backend": "tensorflow"
+}
\ No newline at end of file
b
diff -r b1eda492f063 -r 118e230e85ce test-data/keras04.json
--- a/test-data/keras04.json Thu Aug 11 09:20:25 2022 +0000
+++ b/test-data/keras04.json Wed Aug 09 13:40:06 2023 +0000
[
@@ -1,12 +1,25 @@
 {
   "class_name": "Sequential",
   "config": {
-    "name": "sequential_1",
+    "name": "sequential",
     "layers": [
       {
+        "class_name": "InputLayer",
+        "config": {
+          "batch_input_shape": [
+            null,
+            17
+          ],
+          "dtype": "float32",
+          "sparse": false,
+          "ragged": false,
+          "name": "dense_input"
+        }
+      },
+      {
         "class_name": "Dense",
         "config": {
-          "name": "dense_1",
+          "name": "dense",
           "trainable": true,
           "batch_input_shape": [
             null,
@@ -17,11 +30,43 @@
           "activation": "linear",
           "use_bias": true,
           "kernel_initializer": {
-            "class_name": "VarianceScaling",
+            "class_name": "GlorotUniform",
             "config": {
-              "scale": 1.0,
-              "mode": "fan_avg",
-              "distribution": "uniform",
+              "seed": null
+            }
+          },
+          "bias_initializer": {
+            "class_name": "Zeros",
+            "config": {}
+          },
+          "kernel_regularizer": null,
+          "bias_regularizer": null,
+          "activity_regularizer": null,
+          "kernel_constraint": null,
+          "bias_constraint": null
+        }
+      },
+      {
+        "class_name": "Activation",
+        "config": {
+          "name": "activation",
+          "trainable": true,
+          "dtype": "float32",
+          "activation": "linear"
+        }
+      },
+      {
+        "class_name": "Dense",
+        "config": {
+          "name": "dense_1",
+          "trainable": true,
+          "dtype": "float32",
+          "units": 1,
+          "activation": "linear",
+          "use_bias": true,
+          "kernel_initializer": {
+            "class_name": "GlorotUniform",
+            "config": {
               "seed": null
             }
           },
@@ -44,47 +89,9 @@
           "dtype": "float32",
           "activation": "linear"
         }
-      },
-      {
-        "class_name": "Dense",
-        "config": {
-          "name": "dense_2",
-          "trainable": true,
-          "dtype": "float32",
-          "units": 1,
-          "activation": "linear",
-          "use_bias": true,
-          "kernel_initializer": {
-            "class_name": "VarianceScaling",
-            "config": {
-              "scale": 1.0,
-              "mode": "fan_avg",
-              "distribution": "uniform",
-              "seed": null
-            }
-          },
-          "bias_initializer": {
-            "class_name": "Zeros",
-            "config": {}
-          },
-          "kernel_regularizer": null,
-          "bias_regularizer": null,
-          "activity_regularizer": null,
-          "kernel_constraint": null,
-          "bias_constraint": null
-        }
-      },
-      {
-        "class_name": "Activation",
-        "config": {
-          "name": "activation_2",
-          "trainable": true,
-          "dtype": "float32",
-          "activation": "linear"
-        }
       }
     ]
   },
-  "keras_version": "2.3.1",
+  "keras_version": "2.10.0",
   "backend": "tensorflow"
 }
\ No newline at end of file
b
diff -r b1eda492f063 -r 118e230e85ce test-data/keras05.json
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/keras05.json Wed Aug 09 13:40:06 2023 +0000
[
@@ -0,0 +1,243 @@
+{
+  "class_name": "Sequential",
+  "config": {
+    "name": "sequential",
+    "layers": [
+      {
+        "class_name": "InputLayer",
+        "config": {
+          "batch_input_shape": [
+            null,
+            32,
+            32,
+            3
+          ],
+          "dtype": "float32",
+          "sparse": false,
+          "ragged": false,
+          "name": "conv2d_input"
+        }
+      },
+      {
+        "class_name": "Conv2D",
+        "config": {
+          "name": "conv2d",
+          "trainable": true,
+          "batch_input_shape": [
+            null,
+            32,
+            32,
+            3
+          ],
+          "dtype": "float32",
+          "filters": 32,
+          "kernel_size": [
+            3,
+            3
+          ],
+          "strides": [
+            1,
+            1
+          ],
+          "padding": "same",
+          "data_format": "channels_last",
+          "dilation_rate": [
+            1,
+            1
+          ],
+          "groups": 1,
+          "activation": "relu",
+          "use_bias": true,
+          "kernel_initializer": {
+            "class_name": "GlorotUniform",
+            "config": {
+              "seed": null
+            }
+          },
+          "bias_initializer": {
+            "class_name": "Zeros",
+            "config": {}
+          },
+          "kernel_regularizer": null,
+          "bias_regularizer": null,
+          "activity_regularizer": null,
+          "kernel_constraint": null,
+          "bias_constraint": null
+        }
+      },
+      {
+        "class_name": "MaxPooling2D",
+        "config": {
+          "name": "max_pooling2d",
+          "trainable": true,
+          "dtype": "float32",
+          "pool_size": [
+            2,
+            2
+          ],
+          "padding": "valid",
+          "strides": [
+            2,
+            2
+          ],
+          "data_format": "channels_last"
+        }
+      },
+      {
+        "class_name": "Dropout",
+        "config": {
+          "name": "dropout",
+          "trainable": true,
+          "dtype": "float32",
+          "rate": 0.25,
+          "noise_shape": null,
+          "seed": null
+        }
+      },
+      {
+        "class_name": "Conv2D",
+        "config": {
+          "name": "conv2d_1",
+          "trainable": true,
+          "dtype": "float32",
+          "filters": 64,
+          "kernel_size": [
+            3,
+            3
+          ],
+          "strides": [
+            1,
+            1
+          ],
+          "padding": "same",
+          "data_format": "channels_last",
+          "dilation_rate": [
+            1,
+            1
+          ],
+          "groups": 1,
+          "activation": "relu",
+          "use_bias": true,
+          "kernel_initializer": {
+            "class_name": "GlorotUniform",
+            "config": {
+              "seed": null
+            }
+          },
+          "bias_initializer": {
+            "class_name": "Zeros",
+            "config": {}
+          },
+          "kernel_regularizer": null,
+          "bias_regularizer": null,
+          "activity_regularizer": null,
+          "kernel_constraint": null,
+          "bias_constraint": null
+        }
+      },
+      {
+        "class_name": "MaxPooling2D",
+        "config": {
+          "name": "max_pooling2d_1",
+          "trainable": true,
+          "dtype": "float32",
+          "pool_size": [
+            2,
+            2
+          ],
+          "padding": "valid",
+          "strides": [
+            2,
+            2
+          ],
+          "data_format": "channels_last"
+        }
+      },
+      {
+        "class_name": "Dropout",
+        "config": {
+          "name": "dropout_1",
+          "trainable": true,
+          "dtype": "float32",
+          "rate": 0.25,
+          "noise_shape": null,
+          "seed": null
+        }
+      },
+      {
+        "class_name": "Flatten",
+        "config": {
+          "name": "flatten",
+          "trainable": true,
+          "dtype": "float32",
+          "data_format": "channels_last"
+        }
+      },
+      {
+        "class_name": "Dense",
+        "config": {
+          "name": "dense",
+          "trainable": true,
+          "dtype": "float32",
+          "units": 512,
+          "activation": "relu",
+          "use_bias": true,
+          "kernel_initializer": {
+            "class_name": "GlorotUniform",
+            "config": {
+              "seed": null
+            }
+          },
+          "bias_initializer": {
+            "class_name": "Zeros",
+            "config": {}
+          },
+          "kernel_regularizer": null,
+          "bias_regularizer": null,
+          "activity_regularizer": null,
+          "kernel_constraint": null,
+          "bias_constraint": null
+        }
+      },
+      {
+        "class_name": "Dropout",
+        "config": {
+          "name": "dropout_2",
+          "trainable": true,
+          "dtype": "float32",
+          "rate": 0.5,
+          "noise_shape": null,
+          "seed": null
+        }
+      },
+      {
+        "class_name": "Dense",
+        "config": {
+          "name": "dense_1",
+          "trainable": true,
+          "dtype": "float32",
+          "units": 10,
+          "activation": "softmax",
+          "use_bias": true,
+          "kernel_initializer": {
+            "class_name": "GlorotUniform",
+            "config": {
+              "seed": null
+            }
+          },
+          "bias_initializer": {
+            "class_name": "Zeros",
+            "config": {}
+          },
+          "kernel_regularizer": null,
+          "bias_regularizer": null,
+          "activity_regularizer": null,
+          "kernel_constraint": null,
+          "bias_constraint": null
+        }
+      }
+    ]
+  },
+  "keras_version": "2.10.0",
+  "backend": "tensorflow"
+}
\ No newline at end of file
b
diff -r b1eda492f063 -r 118e230e85ce test-data/keras_batch_model01
b
Binary file test-data/keras_batch_model01 has changed
b
diff -r b1eda492f063 -r 118e230e85ce test-data/keras_batch_model02
b
Binary file test-data/keras_batch_model02 has changed
b
diff -r b1eda492f063 -r 118e230e85ce test-data/keras_batch_model03
b
Binary file test-data/keras_batch_model03 has changed
b
diff -r b1eda492f063 -r 118e230e85ce test-data/keras_batch_model04
b
Binary file test-data/keras_batch_model04 has changed
b
diff -r b1eda492f063 -r 118e230e85ce test-data/keras_batch_model05
b
Binary file test-data/keras_batch_model05 has changed
b
diff -r b1eda492f063 -r 118e230e85ce test-data/keras_model01
b
Binary file test-data/keras_model01 has changed
b
diff -r b1eda492f063 -r 118e230e85ce test-data/keras_model02
b
Binary file test-data/keras_model02 has changed
b
diff -r b1eda492f063 -r 118e230e85ce test-data/keras_model04
b
Binary file test-data/keras_model04 has changed
b
diff -r b1eda492f063 -r 118e230e85ce test-data/lda_model01
b
Binary file test-data/lda_model01 has changed
b
diff -r b1eda492f063 -r 118e230e85ce test-data/lda_model02
b
Binary file test-data/lda_model02 has changed
b
diff -r b1eda492f063 -r 118e230e85ce test-data/lda_prediction_result01.tabular
--- a/test-data/lda_prediction_result01.tabular Thu Aug 11 09:20:25 2022 +0000
+++ b/test-data/lda_prediction_result01.tabular Wed Aug 09 13:40:06 2023 +0000
b
@@ -1,5 +1,6 @@
-3.68258022948 2.82110345641 -3.990140724 -1.9523364774 0
-0.015942057224 -0.7119585943469999 0.125502976978 -0.972218263337 0
-2.0869076882499997 0.929399321468 -2.1292408448400004 -1.9971402218799998 0
-1.4132105208399999 0.523750660422 -1.4210539291 -1.49298569451 0
-0.7683140439399999 1.38267855169 -0.989045048734 0.649504257894 1
+0 1 2 3 0
+3.68258022948 2.82110345641 -3.990140724 -1.9523364774 1
+0.015942057224 -0.711958594347 0.125502976978 -0.972218263337 0
+2.08690768825 0.929399321468 -2.12924084484 -1.99714022188 0
+1.41321052084 0.523750660422 -1.4210539291 -1.49298569451 0
+0.76831404394 1.38267855169 -0.989045048734 0.649504257894 1
b
diff -r b1eda492f063 -r 118e230e85ce test-data/lda_prediction_result02.tabular
--- a/test-data/lda_prediction_result02.tabular Thu Aug 11 09:20:25 2022 +0000
+++ b/test-data/lda_prediction_result02.tabular Wed Aug 09 13:40:06 2023 +0000
b
@@ -1,5 +1,6 @@
-3.68258022948 2.82110345641 -3.990140724 -1.9523364774 0
-0.015942057224 -0.7119585943469999 0.125502976978 -0.972218263337 0
-2.0869076882499997 0.929399321468 -2.1292408448400004 -1.9971402218799998 0
-1.4132105208399999 0.523750660422 -1.4210539291 -1.49298569451 0
-0.7683140439399999 1.38267855169 -0.989045048734 0.649504257894 1
+0 1 2 3 0
+3.68258022948 2.82110345641 -3.990140724 -1.9523364774 1
+0.015942057224 -0.711958594347 0.125502976978 -0.972218263337 0
+2.08690768825 0.929399321468 -2.12924084484 -1.99714022188 0
+1.41321052084 0.523750660422 -1.4210539291 -1.49298569451 0
+0.76831404394 1.38267855169 -0.989045048734 0.649504257894 1
b
diff -r b1eda492f063 -r 118e230e85ce test-data/lgb_class_model.txt
--- a/test-data/lgb_class_model.txt Thu Aug 11 09:20:25 2022 +0000
+++ b/test-data/lgb_class_model.txt Wed Aug 09 13:40:06 2023 +0000
[
@@ -7,7 +7,7 @@
 objective=binary sigmoid:1
 feature_names=Column_0 Column_1 Column_2 Column_3
 feature_infos=none none none none
-tree_sizes=228
+tree_sizes=239
 
 Tree=0
 num_leaves=1
@@ -18,18 +18,19 @@
 decision_type=
 left_child=
 right_child=
-leaf_value=-0.40546510810816427
+leaf_value=-0.2876820724517809
 leaf_weight=
 leaf_count=
 internal_value=
 internal_weight=
 internal_count=
+is_linear=0
 shrinkage=1
 
 
 end of trees
 
-feature importances:
+feature_importances:
 
 parameters:
 [boosting: gbdt]
@@ -39,10 +40,14 @@
 [device_type: cpu]
 [data: ]
 [valid: ]
-[num_iterations: 100]
+[num_iterations: 10000]
 [learning_rate: 0.02]
 [num_leaves: 32]
-[num_threads: 0]
+[num_threads: 1]
+[deterministic: 0]
+[force_col_wise: 0]
+[force_row_wise: 0]
+[histogram_pool_size: -1]
 [max_depth: 8]
 [min_data_in_leaf: 20]
 [min_sum_hessian_in_leaf: 39]
@@ -54,11 +59,14 @@
 [feature_fraction: 0.9]
 [feature_fraction_bynode: 1]
 [feature_fraction_seed: 26500]
+[extra_trees: 0]
+[extra_seed: 15724]
 [early_stopping_round: 0]
 [first_metric_only: 0]
 [max_delta_step: 0]
 [lambda_l1: 0.04]
 [lambda_l2: 0.07]
+[linear_lambda: 0]
 [min_gain_to_split: 0.02]
 [drop_rate: 0.1]
 [max_drop: 50]
@@ -75,51 +83,41 @@
 [max_cat_to_onehot: 4]
 [top_k: 20]
 [monotone_constraints: ]
+[monotone_constraints_method: basic]
+[monotone_penalty: 0]
 [feature_contri: ]
 [forcedsplits_filename: ]
-[forcedbins_filename: ]
 [refit_decay_rate: 0.9]
 [cegb_tradeoff: 1]
 [cegb_penalty_split: 0]
 [cegb_penalty_feature_lazy: ]
 [cegb_penalty_feature_coupled: ]
+[path_smooth: 0]
+[interaction_constraints: ]
 [verbosity: -1]
+[saved_feature_importance_type: 0]
+[linear_tree: 0]
 [max_bin: 255]
 [max_bin_by_feature: ]
 [min_data_in_bin: 3]
 [bin_construct_sample_cnt: 200000]
-[histogram_pool_size: -1]
 [data_random_seed: 41]
-[output_model: LightGBM_model.txt]
-[snapshot_freq: -1]
-[input_model: ]
-[output_result: LightGBM_predict_result.txt]
-[initscore_filename: ]
-[valid_data_initscores: ]
-[pre_partition: 0]
+[is_enable_sparse: 1]
 [enable_bundle: 1]
-[max_conflict_rate: 0]
-[is_enable_sparse: 1]
-[sparse_threshold: 0.8]
 [use_missing: 1]
 [zero_as_missing: 0]
+[feature_pre_filter: 1]
+[pre_partition: 0]
 [two_round: 0]
-[save_binary: 0]
 [header: 0]
 [label_column: ]
 [weight_column: ]
 [group_column: ]
 [ignore_column: ]
 [categorical_feature: ]
-[predict_raw_score: 0]
-[predict_leaf_index: 0]
-[predict_contrib: 0]
-[num_iteration_predict: -1]
-[pred_early_stop: 0]
-[pred_early_stop_freq: 10]
-[pred_early_stop_margin: 10]
-[convert_model_language: ]
-[convert_model: gbdt_prediction.cpp]
+[forcedbins_filename: ]
+[precise_float_parser: 0]
+[objective_seed: 19169]
 [num_class: 1]
 [is_unbalance: 0]
 [scale_pos_weight: 1]
@@ -130,13 +128,12 @@
 [fair_c: 1]
 [poisson_max_delta_step: 0.7]
 [tweedie_variance_power: 1.5]
-[max_position: 20]
-[lambdamart_norm: 1]
+[lambdarank_truncation_level: 30]
+[lambdarank_norm: 1]
 [label_gain: ]
-[metric_freq: 1]
-[is_provide_training_metric: 0]
 [eval_at: ]
 [multi_error_top_k: 1]
+[auc_mu_weights: ]
 [num_machines: 1]
 [local_listen_port: 12400]
 [time_out: 120]
@@ -145,6 +142,7 @@
 [gpu_platform_id: -1]
 [gpu_device_id: -1]
 [gpu_use_dp: 0]
+[num_gpu: 1]
 
 end of parameters
 
b
diff -r b1eda492f063 -r 118e230e85ce test-data/lgb_regr_model.txt
--- a/test-data/lgb_regr_model.txt Thu Aug 11 09:20:25 2022 +0000
+++ b/test-data/lgb_regr_model.txt Wed Aug 09 13:40:06 2023 +0000
[
b'@@ -7,180034 +7,1937 @@\n objective=regression\n feature_names=Column_0 Column_1 Column_2 Column_3 Column_4 Column_5 Column_6 Column_7 Column_8 Column_9 Column_10 Column_11 Column_12 Column_13 Column_14 Column_15 Column_16\n feature_infos=none [1:12] [1:31] [35:89] [35:117] [45.100000000000001:77.400000000000006] [41:77] [46:81] [44:79] [28:95] [0:1] [0:1] [0:1] [0:1] [0:1] [0:1] [0:1]\n-tree_sizes=518 530 532 530 531 532 530 534 533 535 535 532 532 535 534 530 534 533 532 533 533 537 534 456 531 456 534 458 458 536 538 532 536 613 538 532 536 458 533 536 613 533 537 536 533 537 534 537 537 536 458 532 535 612 538 537 614 617 615 615 538 539 615 616 617 539 616 536 615 537 615 539 540 618 619 617 538 540 458 539 457 459 461 460 538 616 613 612 614 459 617 542 618 461 541 615 617 540 619 542 618 622 541 618 622 543 541 541 618 619 538 619 617 462 542 620 618 459 618 541 544 457 460 623 619 461 545 459 625 461 624 624 544 620 623 543 622 623 544 544 622 621 544 545 464 544 462 624 621 543 623 463 543 622 622 545 462 543 624 464 622 464 624 616 463 464 624 464 624 543 541 623 464 617 541 545 625 541 620 542 541 541 542 542 518 543 539 547 542 547 541 546 542 543 541 540 626 543 544 543 545 548 541 545 546 542 543 545 547 548 546 544 546 543 542 548 544 547 541 543 546 547 540 544 545 542 545 626 543 539 463 544 544 542 544 545 545 544 539 546 628 544 545 547 543 545 547 543 541 543 543 543 545 620 544 545 522 543 544 545 544 549 546 543 543 545 603 546 544 602 543 547 623 544 546 544 623 544 549 544 624 545 546 623 543 624 543 545 623 543 546 542 546 545 546 623 547 464 542 544 544 545 543 548 622 466 546 624 542 545 624 544 624 545 547 623 544 546 543 465 547 542 625 545 545 625 543 625 542 547 624 544 549 544 545 626 621 545 542 465 549 545 466 624 465 621 465 627 622 542 623 466 539 464 548 626 543 545 543 465 625 546 466 624 626 623 542 540 544 623 544 625 544 545 546 466 621 546 623 467 546 547 544 543 546 546 544 543 543 546 624 547 466 541 547 546 465 548 543 546 544 547 546 547 544 465 543 625 547 546 546 544 543 546 625 464 625 544 544 624 546 466 623 546 623 627 543 624 546 544 546 545 550 625 548 545 543 547 544 627 465 547 622 628 545 627 543 460 627 466 546 544 544 629 624 628 623 545 543 624 547 544 625 545 544 548 544 543 466 549 547 627 465 629 623 544 467 548 545 622 550 546 548 627 621 548 548 466 625 464 544 624 546 544 626 465 630 545 546 547 546 547 630 619 548 628 547 624 545 626 546 627 549 544 468 547 542 545 549 548 627 628 549 628 546 549 546 544 621 624 544 628 547 545 546 465 629 626 546 546 545 545 545 625 546 464 549 545 545 628 546 550 467 544 630 628 547 547 543 628 544 543 624 546 548 548 465 631 545 624 546 623 467 546 626 467 543 548 630 549 545 547 548 549 547 545 629 466 548 624 549 548 540 630 468 546 542 544 546 546 627 630 469 549 543 547 548 467 625 629 627 547 548 546 545 540 544 549 625 541 546 467 548 546 548 631 544 627 468 550 622 629 543 544 548 548 547 467 630 467 549 628 468 547 468 549 627 467 546 546 548 546 630 543 468 544 626 546 545 627 627 548 545 549 548 625 626 631 626 546 625 547 629 547 469 548 545 548 547 469 546 629 547 545 468 549 549 548 544 548 630 545 547 549 467 547 543 630 544 547 467 547 548 627 623 626 465 469 547 547 547 546 629 548 548 546 546 547 545 548 627 467 550 545 548 548 546 545 552 548 546 629 546 547 543 625 548 548 550 545 547 543 545 548 548 548 549 466 547 549 543 548 547 548 628 469 626 546 626 547 545 550 628 549 544 623 552 544 549 468 552 539 546 467 548 630 544 548 545 627 548 549 624 551 551 545 468 549 468 544 548 548 550 546 548 549 628 548 549 628 548 547 548 550 546 546 550 550 548 550 629 547 547 551 551 546 468 550 466 546 547 545 550 549 628 552 550 545 549 551 549 630 630 467 547 549 551 545 624 468 552 547 549 469 548 543 549 546 548 546 624 546 625 469 552 549 630 544 549 551 547 548 543 550 628 545 551 549 549 545 549 551 550 545 468 547 550 550 551 469 548 551 547 469 624 547 548 548 549 628 548 546 548 548 550 549 546 549 545 549 6'..b'7\n-decision_type=2 2 2 2\n-left_child=1 2 3 -1\n-right_child=-2 -3 -4 -5\n-leaf_value=0.00095524065220772586 0.00069620116702481252 -0.001859639079375311 0.0015142907661079197 -0.0011629911884827296\n-leaf_weight=46 62 40 44 69\n-leaf_count=46 62 40 44 69\n-internal_value=0 -0.0108758 0.00956023 -0.015434\n-internal_weight=0 199 159 115\n-internal_count=261 199 159 115\n+split_feature=4 4 5\n+split_gain=541.805 199.821 100.908\n+threshold=55.500000000000007 75.500000000000014 62.400000000000006\n+decision_type=2 2 2\n+left_child=-1 2 -2\n+right_child=1 -3 -4\n+leaf_value=-0.044118620957022098 -0.010652577091750966 0.05828552102169432 0.023068054282883416\n+leaf_weight=78 65 40 78\n+leaf_count=78 65 40 78\n+internal_value=0 0.0188084 0.00773719\n+internal_weight=0 183 143\n+internal_count=261 183 143\n+is_linear=0\n shrinkage=0.02\n \n \n end of trees\n \n-feature importances:\n-Column_9=8322\n-Column_2=6585\n-Column_5=5272\n-Column_4=4915\n-Column_6=4114\n-Column_3=3831\n-Column_1=3507\n-Column_7=2717\n-Column_8=2340\n+feature_importances:\n+Column_4=145\n+Column_5=52\n+Column_7=32\n+Column_6=31\n+Column_8=19\n+Column_2=9\n+Column_3=9\n+Column_9=3\n \n parameters:\n [boosting: gbdt]\n [objective: regression]\n-[metric: ]\n+[metric: l2]\n [tree_learner: serial]\n [device_type: cpu]\n [data: ]\n [valid: ]\n [num_iterations: 100]\n [learning_rate: 0.02]\n-[num_leaves: 32]\n-[num_threads: 0]\n-[max_depth: 8]\n+[num_leaves: 4]\n+[num_threads: 1]\n+[deterministic: 0]\n+[force_col_wise: 0]\n+[force_row_wise: 0]\n+[histogram_pool_size: -1]\n+[max_depth: 4]\n [min_data_in_leaf: 20]\n [min_sum_hessian_in_leaf: 39]\n [bagging_fraction: 0.9]\n@@ -180045,11 +1948,14 @@\n [feature_fraction: 0.9]\n [feature_fraction_bynode: 1]\n [feature_fraction_seed: 26500]\n+[extra_trees: 0]\n+[extra_seed: 15724]\n [early_stopping_round: 0]\n [first_metric_only: 0]\n [max_delta_step: 0]\n [lambda_l1: 0.04]\n [lambda_l2: 0.07]\n+[linear_lambda: 0]\n [min_gain_to_split: 0.02]\n [drop_rate: 0.1]\n [max_drop: 50]\n@@ -180066,51 +1972,41 @@\n [max_cat_to_onehot: 4]\n [top_k: 20]\n [monotone_constraints: ]\n+[monotone_constraints_method: basic]\n+[monotone_penalty: 0]\n [feature_contri: ]\n [forcedsplits_filename: ]\n-[forcedbins_filename: ]\n [refit_decay_rate: 0.9]\n [cegb_tradeoff: 1]\n [cegb_penalty_split: 0]\n [cegb_penalty_feature_lazy: ]\n [cegb_penalty_feature_coupled: ]\n-[verbosity: -1]\n+[path_smooth: 0]\n+[interaction_constraints: ]\n+[verbosity: 0]\n+[saved_feature_importance_type: 0]\n+[linear_tree: 0]\n [max_bin: 255]\n [max_bin_by_feature: ]\n [min_data_in_bin: 3]\n [bin_construct_sample_cnt: 200000]\n-[histogram_pool_size: -1]\n [data_random_seed: 41]\n-[output_model: LightGBM_model.txt]\n-[snapshot_freq: -1]\n-[input_model: ]\n-[output_result: LightGBM_predict_result.txt]\n-[initscore_filename: ]\n-[valid_data_initscores: ]\n-[pre_partition: 0]\n+[is_enable_sparse: 1]\n [enable_bundle: 1]\n-[max_conflict_rate: 0]\n-[is_enable_sparse: 1]\n-[sparse_threshold: 0.8]\n [use_missing: 1]\n [zero_as_missing: 0]\n+[feature_pre_filter: 1]\n+[pre_partition: 0]\n [two_round: 0]\n-[save_binary: 0]\n [header: 0]\n [label_column: ]\n [weight_column: ]\n [group_column: ]\n [ignore_column: ]\n [categorical_feature: ]\n-[predict_raw_score: 0]\n-[predict_leaf_index: 0]\n-[predict_contrib: 0]\n-[num_iteration_predict: -1]\n-[pred_early_stop: 0]\n-[pred_early_stop_freq: 10]\n-[pred_early_stop_margin: 10]\n-[convert_model_language: ]\n-[convert_model: gbdt_prediction.cpp]\n+[forcedbins_filename: ]\n+[precise_float_parser: 0]\n+[objective_seed: 19169]\n [num_class: 1]\n [is_unbalance: 0]\n [scale_pos_weight: 1]\n@@ -180121,13 +2017,12 @@\n [fair_c: 1]\n [poisson_max_delta_step: 0.7]\n [tweedie_variance_power: 1.5]\n-[max_position: 20]\n-[lambdamart_norm: 1]\n+[lambdarank_truncation_level: 30]\n+[lambdarank_norm: 1]\n [label_gain: ]\n-[metric_freq: 1]\n-[is_provide_training_metric: 0]\n [eval_at: ]\n [multi_error_top_k: 1]\n+[auc_mu_weights: ]\n [num_machines: 1]\n [local_listen_port: 12400]\n [time_out: 120]\n@@ -180136,6 +2031,7 @@\n [gpu_platform_id: -1]\n [gpu_device_id: -1]\n [gpu_use_dp: 0]\n+[num_gpu: 1]\n \n end of parameters\n \n'
b
diff -r b1eda492f063 -r 118e230e85ce test-data/ml_vis01.html
--- a/test-data/ml_vis01.html Thu Aug 11 09:20:25 2022 +0000
+++ b/test-data/ml_vis01.html Wed Aug 09 13:40:06 2023 +0000
[
b'@@ -1,31 +1,67 @@\n <html>\n <head><meta charset="utf-8" /></head>\n <body>\n-    <div>\n-        \n-                <script type="text/javascript">window.PlotlyConfig = {MathJaxConfig: \'local\'};</script>\n+    <div>                        <script type="text/javascript">window.PlotlyConfig = {MathJaxConfig: \'local\'};</script>\n         <script type="text/javascript">/**\n-* plotly.js v1.51.1\n-* Copyright 2012-2019, Plotly, Inc.\n+* plotly.js v1.58.4\n+* Copyright 2012-2020, Plotly, Inc.\n * All rights reserved.\n * Licensed under the MIT license\n */\n-!function(t){if("object"==typeof exports&&"undefined"!=typeof module)module.exports=t();else if("function"==typeof define&&define.amd)define([],t);else{("undefined"!=typeof window?window:"undefined"!=typeof global?global:"undefined"!=typeof self?self:this).Plotly=t()}}(function(){return function(){return function t(e,r,n){function a(o,s){if(!r[o]){if(!e[o]){var l="function"==typeof require&&require;if(!s&&l)return l(o,!0);if(i)return i(o,!0);var c=new Error("Cannot find module \'"+o+"\'");throw c.code="MODULE_NOT_FOUND",c}var u=r[o]={exports:{}};e[o][0].call(u.exports,function(t){return a(e[o][1][t]||t)},u,u.exports,t,e,r,n)}return r[o].exports}for(var i="function"==typeof require&&require,o=0;o<n.length;o++)a(n[o]);return a}}()({1:[function(t,e,r){"use strict";var n=t("../src/lib"),a={"X,X div":"direction:ltr;font-family:\'Open Sans\', verdana, arial, sans-serif;margin:0;padding:0;","X input,X button":"font-family:\'Open Sans\', verdana, arial, sans-serif;","X input:focus,X button:focus":"outline:none;","X a":"text-decoration:none;","X a:hover":"text-decoration:none;","X .crisp":"shape-rendering:crispEdges;","X .user-select-none":"-webkit-user-select:none;-moz-user-select:none;-ms-user-select:none;-o-user-select:none;user-select:none;","X svg":"overflow:hidden;","X svg a":"fill:#447adb;","X svg a:hover":"fill:#3c6dc5;","X .main-svg":"position:absolute;top:0;left:0;pointer-events:none;","X .main-svg .draglayer":"pointer-events:all;","X .cursor-default":"cursor:default;","X .cursor-pointer":"cursor:pointer;","X .cursor-crosshair":"cursor:crosshair;","X .cursor-move":"cursor:move;","X .cursor-col-resize":"cursor:col-resize;","X .cursor-row-resize":"cursor:row-resize;","X .cursor-ns-resize":"cursor:ns-resize;","X .cursor-ew-resize":"cursor:ew-resize;","X .cursor-sw-resize":"cursor:sw-resize;","X .cursor-s-resize":"cursor:s-resize;","X .cursor-se-resize":"cursor:se-resize;","X .cursor-w-resize":"cursor:w-resize;","X .cursor-e-resize":"cursor:e-resize;","X .cursor-nw-resize":"cursor:nw-resize;","X .cursor-n-resize":"cursor:n-resize;","X .cursor-ne-resize":"cursor:ne-resize;","X .cursor-grab":"cursor:-webkit-grab;cursor:grab;","X .modebar":"position:absolute;top:2px;right:2px;","X .ease-bg":"-webkit-transition:background-color 0.3s ease 0s;-moz-transition:background-color 0.3s ease 0s;-ms-transition:background-color 0.3s ease 0s;-o-transition:background-color 0.3s ease 0s;transition:background-color 0.3s ease 0s;","X .modebar--hover>:not(.watermark)":"opacity:0;-webkit-transition:opacity 0.3s ease 0s;-moz-transition:opacity 0.3s ease 0s;-ms-transition:opacity 0.3s ease 0s;-o-transition:opacity 0.3s ease 0s;transition:opacity 0.3s ease 0s;","X:hover .modebar--hover .modebar-group":"opacity:1;","X .modebar-group":"float:left;display:inline-block;box-sizing:border-box;padding-left:8px;position:relative;vertical-align:middle;white-space:nowrap;","X .modebar-btn":"position:relative;font-size:16px;padding:3px 4px;height:22px;cursor:pointer;line-height:normal;box-sizing:border-box;","X .modebar-btn svg":"position:relative;top:2px;","X .modebar.vertical":"display:flex;flex-direction:column;flex-wrap:wrap;align-content:flex-end;max-height:100%;","X .modebar.vertical svg":"top:-1px;","X .modebar.vertical .modebar-group":"display:block;float:none;padding-left:0px;padding-bottom:8px;","X .modebar.vertical .modebar-group .modebar-btn":"display:block;text-align:center;","X [data-title]:before,X [data-title]:after":"po'..b'scattermapbox": [{"marker": {"colorbar": {"outlinewidth": 0, "ticks": ""}}, "type": "scattermapbox"}], "scatterpolar": [{"marker": {"colorbar": {"outlinewidth": 0, "ticks": ""}}, "type": "scatterpolar"}], "scatterpolargl": [{"marker": {"colorbar": {"outlinewidth": 0, "ticks": ""}}, "type": "scatterpolargl"}], "scatterternary": [{"marker": {"colorbar": {"outlinewidth": 0, "ticks": ""}}, "type": "scatterternary"}], "surface": [{"colorbar": {"outlinewidth": 0, "ticks": ""}, "colorscale": [[0.0, "#0d0887"], [0.1111111111111111, "#46039f"], [0.2222222222222222, "#7201a8"], [0.3333333333333333, "#9c179e"], [0.4444444444444444, "#bd3786"], [0.5555555555555556, "#d8576b"], [0.6666666666666666, "#ed7953"], [0.7777777777777778, "#fb9f3a"], [0.8888888888888888, "#fdca26"], [1.0, "#f0f921"]], "type": "surface"}], "table": [{"cells": {"fill": {"color": "#EBF0F8"}, "line": {"color": "white"}}, "header": {"fill": {"color": "#C8D4E3"}, "line": {"color": "white"}}, "type": "table"}]}, "layout": {"annotationdefaults": {"arrowcolor": "#2a3f5f", "arrowhead": 0, "arrowwidth": 1}, "autotypenumbers": "strict", "coloraxis": {"colorbar": {"outlinewidth": 0, "ticks": ""}}, "colorscale": {"diverging": [[0, "#8e0152"], [0.1, "#c51b7d"], [0.2, "#de77ae"], [0.3, "#f1b6da"], [0.4, "#fde0ef"], [0.5, "#f7f7f7"], [0.6, "#e6f5d0"], [0.7, "#b8e186"], [0.8, "#7fbc41"], [0.9, "#4d9221"], [1, "#276419"]], "sequential": [[0.0, "#0d0887"], [0.1111111111111111, "#46039f"], [0.2222222222222222, "#7201a8"], [0.3333333333333333, "#9c179e"], [0.4444444444444444, "#bd3786"], [0.5555555555555556, "#d8576b"], [0.6666666666666666, "#ed7953"], [0.7777777777777778, "#fb9f3a"], [0.8888888888888888, "#fdca26"], [1.0, "#f0f921"]], "sequentialminus": [[0.0, "#0d0887"], [0.1111111111111111, "#46039f"], [0.2222222222222222, "#7201a8"], [0.3333333333333333, "#9c179e"], [0.4444444444444444, "#bd3786"], [0.5555555555555556, "#d8576b"], [0.6666666666666666, "#ed7953"], [0.7777777777777778, "#fb9f3a"], [0.8888888888888888, "#fdca26"], [1.0, "#f0f921"]]}, "colorway": ["#636efa", "#EF553B", "#00cc96", "#ab63fa", "#FFA15A", "#19d3f3", "#FF6692", "#B6E880", "#FF97FF", "#FECB52"], "font": {"color": "#2a3f5f"}, "geo": {"bgcolor": "white", "lakecolor": "white", "landcolor": "#E5ECF6", "showlakes": true, "showland": true, "subunitcolor": "white"}, "hoverlabel": {"align": "left"}, "hovermode": "closest", "mapbox": {"style": "light"}, "paper_bgcolor": "white", "plot_bgcolor": "#E5ECF6", "polar": {"angularaxis": {"gridcolor": "white", "linecolor": "white", "ticks": ""}, "bgcolor": "#E5ECF6", "radialaxis": {"gridcolor": "white", "linecolor": "white", "ticks": ""}}, "scene": {"xaxis": {"backgroundcolor": "#E5ECF6", "gridcolor": "white", "gridwidth": 2, "linecolor": "white", "showbackground": true, "ticks": "", "zerolinecolor": "white"}, "yaxis": {"backgroundcolor": "#E5ECF6", "gridcolor": "white", "gridwidth": 2, "linecolor": "white", "showbackground": true, "ticks": "", "zerolinecolor": "white"}, "zaxis": {"backgroundcolor": "#E5ECF6", "gridcolor": "white", "gridwidth": 2, "linecolor": "white", "showbackground": true, "ticks": "", "zerolinecolor": "white"}}, "shapedefaults": {"line": {"color": "#2a3f5f"}}, "ternary": {"aaxis": {"gridcolor": "white", "linecolor": "white", "ticks": ""}, "baxis": {"gridcolor": "white", "linecolor": "white", "ticks": ""}, "bgcolor": "#E5ECF6", "caxis": {"gridcolor": "white", "linecolor": "white", "ticks": ""}}, "title": {"x": 0.05}, "xaxis": {"automargin": true, "gridcolor": "white", "linecolor": "white", "ticks": "", "title": {"standoff": 15}, "zerolinecolor": "white", "zerolinewidth": 2}, "yaxis": {"automargin": true, "gridcolor": "white", "linecolor": "white", "ticks": "", "title": {"standoff": 15}, "zerolinecolor": "white", "zerolinewidth": 2}}}, "title": {"text": "Feature Importances"}},                        {"responsive": true}                    )                };                            </script>        </div>\n </body>\n </html>\n\\ No newline at end of file\n'
b
diff -r b1eda492f063 -r 118e230e85ce test-data/ml_vis02.html
--- a/test-data/ml_vis02.html Thu Aug 11 09:20:25 2022 +0000
+++ b/test-data/ml_vis02.html Wed Aug 09 13:40:06 2023 +0000
[
b'@@ -1,31 +1,67 @@\n <html>\n <head><meta charset="utf-8" /></head>\n <body>\n-    <div>\n-        \n-                <script type="text/javascript">window.PlotlyConfig = {MathJaxConfig: \'local\'};</script>\n+    <div>                        <script type="text/javascript">window.PlotlyConfig = {MathJaxConfig: \'local\'};</script>\n         <script type="text/javascript">/**\n-* plotly.js v1.51.1\n-* Copyright 2012-2019, Plotly, Inc.\n+* plotly.js v1.58.4\n+* Copyright 2012-2020, Plotly, Inc.\n * All rights reserved.\n * Licensed under the MIT license\n */\n-!function(t){if("object"==typeof exports&&"undefined"!=typeof module)module.exports=t();else if("function"==typeof define&&define.amd)define([],t);else{("undefined"!=typeof window?window:"undefined"!=typeof global?global:"undefined"!=typeof self?self:this).Plotly=t()}}(function(){return function(){return function t(e,r,n){function a(o,s){if(!r[o]){if(!e[o]){var l="function"==typeof require&&require;if(!s&&l)return l(o,!0);if(i)return i(o,!0);var c=new Error("Cannot find module \'"+o+"\'");throw c.code="MODULE_NOT_FOUND",c}var u=r[o]={exports:{}};e[o][0].call(u.exports,function(t){return a(e[o][1][t]||t)},u,u.exports,t,e,r,n)}return r[o].exports}for(var i="function"==typeof require&&require,o=0;o<n.length;o++)a(n[o]);return a}}()({1:[function(t,e,r){"use strict";var n=t("../src/lib"),a={"X,X div":"direction:ltr;font-family:\'Open Sans\', verdana, arial, sans-serif;margin:0;padding:0;","X input,X button":"font-family:\'Open Sans\', verdana, arial, sans-serif;","X input:focus,X button:focus":"outline:none;","X a":"text-decoration:none;","X a:hover":"text-decoration:none;","X .crisp":"shape-rendering:crispEdges;","X .user-select-none":"-webkit-user-select:none;-moz-user-select:none;-ms-user-select:none;-o-user-select:none;user-select:none;","X svg":"overflow:hidden;","X svg a":"fill:#447adb;","X svg a:hover":"fill:#3c6dc5;","X .main-svg":"position:absolute;top:0;left:0;pointer-events:none;","X .main-svg .draglayer":"pointer-events:all;","X .cursor-default":"cursor:default;","X .cursor-pointer":"cursor:pointer;","X .cursor-crosshair":"cursor:crosshair;","X .cursor-move":"cursor:move;","X .cursor-col-resize":"cursor:col-resize;","X .cursor-row-resize":"cursor:row-resize;","X .cursor-ns-resize":"cursor:ns-resize;","X .cursor-ew-resize":"cursor:ew-resize;","X .cursor-sw-resize":"cursor:sw-resize;","X .cursor-s-resize":"cursor:s-resize;","X .cursor-se-resize":"cursor:se-resize;","X .cursor-w-resize":"cursor:w-resize;","X .cursor-e-resize":"cursor:e-resize;","X .cursor-nw-resize":"cursor:nw-resize;","X .cursor-n-resize":"cursor:n-resize;","X .cursor-ne-resize":"cursor:ne-resize;","X .cursor-grab":"cursor:-webkit-grab;cursor:grab;","X .modebar":"position:absolute;top:2px;right:2px;","X .ease-bg":"-webkit-transition:background-color 0.3s ease 0s;-moz-transition:background-color 0.3s ease 0s;-ms-transition:background-color 0.3s ease 0s;-o-transition:background-color 0.3s ease 0s;transition:background-color 0.3s ease 0s;","X .modebar--hover>:not(.watermark)":"opacity:0;-webkit-transition:opacity 0.3s ease 0s;-moz-transition:opacity 0.3s ease 0s;-ms-transition:opacity 0.3s ease 0s;-o-transition:opacity 0.3s ease 0s;transition:opacity 0.3s ease 0s;","X:hover .modebar--hover .modebar-group":"opacity:1;","X .modebar-group":"float:left;display:inline-block;box-sizing:border-box;padding-left:8px;position:relative;vertical-align:middle;white-space:nowrap;","X .modebar-btn":"position:relative;font-size:16px;padding:3px 4px;height:22px;cursor:pointer;line-height:normal;box-sizing:border-box;","X .modebar-btn svg":"position:relative;top:2px;","X .modebar.vertical":"display:flex;flex-direction:column;flex-wrap:wrap;align-content:flex-end;max-height:100%;","X .modebar.vertical svg":"top:-1px;","X .modebar.vertical .modebar-group":"display:block;float:none;padding-left:0px;padding-bottom:8px;","X .modebar.vertical .modebar-group .modebar-btn":"display:block;text-align:center;","X [data-title]:before,X [data-title]:after":"po'..b'idth": 0, "ticks": ""}}, "type": "scatterpolar"}], "scatterpolargl": [{"marker": {"colorbar": {"outlinewidth": 0, "ticks": ""}}, "type": "scatterpolargl"}], "scatterternary": [{"marker": {"colorbar": {"outlinewidth": 0, "ticks": ""}}, "type": "scatterternary"}], "surface": [{"colorbar": {"outlinewidth": 0, "ticks": ""}, "colorscale": [[0.0, "#0d0887"], [0.1111111111111111, "#46039f"], [0.2222222222222222, "#7201a8"], [0.3333333333333333, "#9c179e"], [0.4444444444444444, "#bd3786"], [0.5555555555555556, "#d8576b"], [0.6666666666666666, "#ed7953"], [0.7777777777777778, "#fb9f3a"], [0.8888888888888888, "#fdca26"], [1.0, "#f0f921"]], "type": "surface"}], "table": [{"cells": {"fill": {"color": "#EBF0F8"}, "line": {"color": "white"}}, "header": {"fill": {"color": "#C8D4E3"}, "line": {"color": "white"}}, "type": "table"}]}, "layout": {"annotationdefaults": {"arrowcolor": "#2a3f5f", "arrowhead": 0, "arrowwidth": 1}, "autotypenumbers": "strict", "coloraxis": {"colorbar": {"outlinewidth": 0, "ticks": ""}}, "colorscale": {"diverging": [[0, "#8e0152"], [0.1, "#c51b7d"], [0.2, "#de77ae"], [0.3, "#f1b6da"], [0.4, "#fde0ef"], [0.5, "#f7f7f7"], [0.6, "#e6f5d0"], [0.7, "#b8e186"], [0.8, "#7fbc41"], [0.9, "#4d9221"], [1, "#276419"]], "sequential": [[0.0, "#0d0887"], [0.1111111111111111, "#46039f"], [0.2222222222222222, "#7201a8"], [0.3333333333333333, "#9c179e"], [0.4444444444444444, "#bd3786"], [0.5555555555555556, "#d8576b"], [0.6666666666666666, "#ed7953"], [0.7777777777777778, "#fb9f3a"], [0.8888888888888888, "#fdca26"], [1.0, "#f0f921"]], "sequentialminus": [[0.0, "#0d0887"], [0.1111111111111111, "#46039f"], [0.2222222222222222, "#7201a8"], [0.3333333333333333, "#9c179e"], [0.4444444444444444, "#bd3786"], [0.5555555555555556, "#d8576b"], [0.6666666666666666, "#ed7953"], [0.7777777777777778, "#fb9f3a"], [0.8888888888888888, "#fdca26"], [1.0, "#f0f921"]]}, "colorway": ["#636efa", "#EF553B", "#00cc96", "#ab63fa", "#FFA15A", "#19d3f3", "#FF6692", "#B6E880", "#FF97FF", "#FECB52"], "font": {"color": "#2a3f5f"}, "geo": {"bgcolor": "white", "lakecolor": "white", "landcolor": "#E5ECF6", "showlakes": true, "showland": true, "subunitcolor": "white"}, "hoverlabel": {"align": "left"}, "hovermode": "closest", "mapbox": {"style": "light"}, "paper_bgcolor": "white", "plot_bgcolor": "#E5ECF6", "polar": {"angularaxis": {"gridcolor": "white", "linecolor": "white", "ticks": ""}, "bgcolor": "#E5ECF6", "radialaxis": {"gridcolor": "white", "linecolor": "white", "ticks": ""}}, "scene": {"xaxis": {"backgroundcolor": "#E5ECF6", "gridcolor": "white", "gridwidth": 2, "linecolor": "white", "showbackground": true, "ticks": "", "zerolinecolor": "white"}, "yaxis": {"backgroundcolor": "#E5ECF6", "gridcolor": "white", "gridwidth": 2, "linecolor": "white", "showbackground": true, "ticks": "", "zerolinecolor": "white"}, "zaxis": {"backgroundcolor": "#E5ECF6", "gridcolor": "white", "gridwidth": 2, "linecolor": "white", "showbackground": true, "ticks": "", "zerolinecolor": "white"}}, "shapedefaults": {"line": {"color": "#2a3f5f"}}, "ternary": {"aaxis": {"gridcolor": "white", "linecolor": "white", "ticks": ""}, "baxis": {"gridcolor": "white", "linecolor": "white", "ticks": ""}, "bgcolor": "#E5ECF6", "caxis": {"gridcolor": "white", "linecolor": "white", "ticks": ""}}, "title": {"x": 0.05}, "xaxis": {"automargin": true, "gridcolor": "white", "linecolor": "white", "ticks": "", "title": {"standoff": 15}, "zerolinecolor": "white", "zerolinewidth": 2}, "yaxis": {"automargin": true, "gridcolor": "white", "linecolor": "white", "ticks": "", "title": {"standoff": 15}, "zerolinecolor": "white", "zerolinewidth": 2}}}, "title": {"text": "Learning Curve", "x": 0.5, "xanchor": "center", "y": 0.92, "yanchor": "top"}, "xaxis": {"title": {"text": "No. of samples"}}, "yaxis": {"title": {"text": "Performance Score"}}},                        {"responsive": true}                    )                };                            </script>        </div>\n </body>\n </html>\n\\ No newline at end of file\n'
b
diff -r b1eda492f063 -r 118e230e85ce test-data/ml_vis03.html
--- a/test-data/ml_vis03.html Thu Aug 11 09:20:25 2022 +0000
+++ b/test-data/ml_vis03.html Wed Aug 09 13:40:06 2023 +0000
[
b'@@ -1,31 +1,67 @@\n <html>\n <head><meta charset="utf-8" /></head>\n <body>\n-    <div>\n-        \n-                <script type="text/javascript">window.PlotlyConfig = {MathJaxConfig: \'local\'};</script>\n+    <div>                        <script type="text/javascript">window.PlotlyConfig = {MathJaxConfig: \'local\'};</script>\n         <script type="text/javascript">/**\n-* plotly.js v1.51.1\n-* Copyright 2012-2019, Plotly, Inc.\n+* plotly.js v1.58.4\n+* Copyright 2012-2020, Plotly, Inc.\n * All rights reserved.\n * Licensed under the MIT license\n */\n-!function(t){if("object"==typeof exports&&"undefined"!=typeof module)module.exports=t();else if("function"==typeof define&&define.amd)define([],t);else{("undefined"!=typeof window?window:"undefined"!=typeof global?global:"undefined"!=typeof self?self:this).Plotly=t()}}(function(){return function(){return function t(e,r,n){function a(o,s){if(!r[o]){if(!e[o]){var l="function"==typeof require&&require;if(!s&&l)return l(o,!0);if(i)return i(o,!0);var c=new Error("Cannot find module \'"+o+"\'");throw c.code="MODULE_NOT_FOUND",c}var u=r[o]={exports:{}};e[o][0].call(u.exports,function(t){return a(e[o][1][t]||t)},u,u.exports,t,e,r,n)}return r[o].exports}for(var i="function"==typeof require&&require,o=0;o<n.length;o++)a(n[o]);return a}}()({1:[function(t,e,r){"use strict";var n=t("../src/lib"),a={"X,X div":"direction:ltr;font-family:\'Open Sans\', verdana, arial, sans-serif;margin:0;padding:0;","X input,X button":"font-family:\'Open Sans\', verdana, arial, sans-serif;","X input:focus,X button:focus":"outline:none;","X a":"text-decoration:none;","X a:hover":"text-decoration:none;","X .crisp":"shape-rendering:crispEdges;","X .user-select-none":"-webkit-user-select:none;-moz-user-select:none;-ms-user-select:none;-o-user-select:none;user-select:none;","X svg":"overflow:hidden;","X svg a":"fill:#447adb;","X svg a:hover":"fill:#3c6dc5;","X .main-svg":"position:absolute;top:0;left:0;pointer-events:none;","X .main-svg .draglayer":"pointer-events:all;","X .cursor-default":"cursor:default;","X .cursor-pointer":"cursor:pointer;","X .cursor-crosshair":"cursor:crosshair;","X .cursor-move":"cursor:move;","X .cursor-col-resize":"cursor:col-resize;","X .cursor-row-resize":"cursor:row-resize;","X .cursor-ns-resize":"cursor:ns-resize;","X .cursor-ew-resize":"cursor:ew-resize;","X .cursor-sw-resize":"cursor:sw-resize;","X .cursor-s-resize":"cursor:s-resize;","X .cursor-se-resize":"cursor:se-resize;","X .cursor-w-resize":"cursor:w-resize;","X .cursor-e-resize":"cursor:e-resize;","X .cursor-nw-resize":"cursor:nw-resize;","X .cursor-n-resize":"cursor:n-resize;","X .cursor-ne-resize":"cursor:ne-resize;","X .cursor-grab":"cursor:-webkit-grab;cursor:grab;","X .modebar":"position:absolute;top:2px;right:2px;","X .ease-bg":"-webkit-transition:background-color 0.3s ease 0s;-moz-transition:background-color 0.3s ease 0s;-ms-transition:background-color 0.3s ease 0s;-o-transition:background-color 0.3s ease 0s;transition:background-color 0.3s ease 0s;","X .modebar--hover>:not(.watermark)":"opacity:0;-webkit-transition:opacity 0.3s ease 0s;-moz-transition:opacity 0.3s ease 0s;-ms-transition:opacity 0.3s ease 0s;-o-transition:opacity 0.3s ease 0s;transition:opacity 0.3s ease 0s;","X:hover .modebar--hover .modebar-group":"opacity:1;","X .modebar-group":"float:left;display:inline-block;box-sizing:border-box;padding-left:8px;position:relative;vertical-align:middle;white-space:nowrap;","X .modebar-btn":"position:relative;font-size:16px;padding:3px 4px;height:22px;cursor:pointer;line-height:normal;box-sizing:border-box;","X .modebar-btn svg":"position:relative;top:2px;","X .modebar.vertical":"display:flex;flex-direction:column;flex-wrap:wrap;align-content:flex-end;max-height:100%;","X .modebar.vertical svg":"top:-1px;","X .modebar.vertical .modebar-group":"display:block;float:none;padding-left:0px;padding-bottom:8px;","X .modebar.vertical .modebar-group .modebar-btn":"display:block;text-align:center;","X [data-title]:before,X [data-title]:after":"po'..b'orbar": {"outlinewidth": 0, "ticks": ""}}, "type": "scatterpolargl"}], "scatterternary": [{"marker": {"colorbar": {"outlinewidth": 0, "ticks": ""}}, "type": "scatterternary"}], "surface": [{"colorbar": {"outlinewidth": 0, "ticks": ""}, "colorscale": [[0.0, "#0d0887"], [0.1111111111111111, "#46039f"], [0.2222222222222222, "#7201a8"], [0.3333333333333333, "#9c179e"], [0.4444444444444444, "#bd3786"], [0.5555555555555556, "#d8576b"], [0.6666666666666666, "#ed7953"], [0.7777777777777778, "#fb9f3a"], [0.8888888888888888, "#fdca26"], [1.0, "#f0f921"]], "type": "surface"}], "table": [{"cells": {"fill": {"color": "#EBF0F8"}, "line": {"color": "white"}}, "header": {"fill": {"color": "#C8D4E3"}, "line": {"color": "white"}}, "type": "table"}]}, "layout": {"annotationdefaults": {"arrowcolor": "#2a3f5f", "arrowhead": 0, "arrowwidth": 1}, "autotypenumbers": "strict", "coloraxis": {"colorbar": {"outlinewidth": 0, "ticks": ""}}, "colorscale": {"diverging": [[0, "#8e0152"], [0.1, "#c51b7d"], [0.2, "#de77ae"], [0.3, "#f1b6da"], [0.4, "#fde0ef"], [0.5, "#f7f7f7"], [0.6, "#e6f5d0"], [0.7, "#b8e186"], [0.8, "#7fbc41"], [0.9, "#4d9221"], [1, "#276419"]], "sequential": [[0.0, "#0d0887"], [0.1111111111111111, "#46039f"], [0.2222222222222222, "#7201a8"], [0.3333333333333333, "#9c179e"], [0.4444444444444444, "#bd3786"], [0.5555555555555556, "#d8576b"], [0.6666666666666666, "#ed7953"], [0.7777777777777778, "#fb9f3a"], [0.8888888888888888, "#fdca26"], [1.0, "#f0f921"]], "sequentialminus": [[0.0, "#0d0887"], [0.1111111111111111, "#46039f"], [0.2222222222222222, "#7201a8"], [0.3333333333333333, "#9c179e"], [0.4444444444444444, "#bd3786"], [0.5555555555555556, "#d8576b"], [0.6666666666666666, "#ed7953"], [0.7777777777777778, "#fb9f3a"], [0.8888888888888888, "#fdca26"], [1.0, "#f0f921"]]}, "colorway": ["#636efa", "#EF553B", "#00cc96", "#ab63fa", "#FFA15A", "#19d3f3", "#FF6692", "#B6E880", "#FF97FF", "#FECB52"], "font": {"color": "#2a3f5f"}, "geo": {"bgcolor": "white", "lakecolor": "white", "landcolor": "#E5ECF6", "showlakes": true, "showland": true, "subunitcolor": "white"}, "hoverlabel": {"align": "left"}, "hovermode": "closest", "mapbox": {"style": "light"}, "paper_bgcolor": "white", "plot_bgcolor": "#E5ECF6", "polar": {"angularaxis": {"gridcolor": "white", "linecolor": "white", "ticks": ""}, "bgcolor": "#E5ECF6", "radialaxis": {"gridcolor": "white", "linecolor": "white", "ticks": ""}}, "scene": {"xaxis": {"backgroundcolor": "#E5ECF6", "gridcolor": "white", "gridwidth": 2, "linecolor": "white", "showbackground": true, "ticks": "", "zerolinecolor": "white"}, "yaxis": {"backgroundcolor": "#E5ECF6", "gridcolor": "white", "gridwidth": 2, "linecolor": "white", "showbackground": true, "ticks": "", "zerolinecolor": "white"}, "zaxis": {"backgroundcolor": "#E5ECF6", "gridcolor": "white", "gridwidth": 2, "linecolor": "white", "showbackground": true, "ticks": "", "zerolinecolor": "white"}}, "shapedefaults": {"line": {"color": "#2a3f5f"}}, "ternary": {"aaxis": {"gridcolor": "white", "linecolor": "white", "ticks": ""}, "baxis": {"gridcolor": "white", "linecolor": "white", "ticks": ""}, "bgcolor": "#E5ECF6", "caxis": {"gridcolor": "white", "linecolor": "white", "ticks": ""}}, "title": {"x": 0.05}, "xaxis": {"automargin": true, "gridcolor": "white", "linecolor": "white", "ticks": "", "title": {"standoff": 15}, "zerolinecolor": "white", "zerolinewidth": 2}, "yaxis": {"automargin": true, "gridcolor": "white", "linecolor": "white", "ticks": "", "title": {"standoff": 15}, "zerolinecolor": "white", "zerolinewidth": 2}}}, "title": {"text": "Precision-Recall Curve", "x": 0.5, "xanchor": "center", "y": 0.92, "yanchor": "top"}, "xaxis": {"linecolor": "lightslategray", "linewidth": 1, "title": {"text": "Recall"}}, "yaxis": {"linecolor": "lightslategray", "linewidth": 1, "title": {"text": "Precision"}}},                        {"responsive": true}                    )                };                            </script>        </div>\n </body>\n </html>\n\\ No newline at end of file\n'
b
diff -r b1eda492f063 -r 118e230e85ce test-data/ml_vis04.html
--- a/test-data/ml_vis04.html Thu Aug 11 09:20:25 2022 +0000
+++ b/test-data/ml_vis04.html Wed Aug 09 13:40:06 2023 +0000
[
b'@@ -1,31 +1,67 @@\n <html>\n <head><meta charset="utf-8" /></head>\n <body>\n-    <div>\n-        \n-                <script type="text/javascript">window.PlotlyConfig = {MathJaxConfig: \'local\'};</script>\n+    <div>                        <script type="text/javascript">window.PlotlyConfig = {MathJaxConfig: \'local\'};</script>\n         <script type="text/javascript">/**\n-* plotly.js v1.51.1\n-* Copyright 2012-2019, Plotly, Inc.\n+* plotly.js v1.58.4\n+* Copyright 2012-2020, Plotly, Inc.\n * All rights reserved.\n * Licensed under the MIT license\n */\n-!function(t){if("object"==typeof exports&&"undefined"!=typeof module)module.exports=t();else if("function"==typeof define&&define.amd)define([],t);else{("undefined"!=typeof window?window:"undefined"!=typeof global?global:"undefined"!=typeof self?self:this).Plotly=t()}}(function(){return function(){return function t(e,r,n){function a(o,s){if(!r[o]){if(!e[o]){var l="function"==typeof require&&require;if(!s&&l)return l(o,!0);if(i)return i(o,!0);var c=new Error("Cannot find module \'"+o+"\'");throw c.code="MODULE_NOT_FOUND",c}var u=r[o]={exports:{}};e[o][0].call(u.exports,function(t){return a(e[o][1][t]||t)},u,u.exports,t,e,r,n)}return r[o].exports}for(var i="function"==typeof require&&require,o=0;o<n.length;o++)a(n[o]);return a}}()({1:[function(t,e,r){"use strict";var n=t("../src/lib"),a={"X,X div":"direction:ltr;font-family:\'Open Sans\', verdana, arial, sans-serif;margin:0;padding:0;","X input,X button":"font-family:\'Open Sans\', verdana, arial, sans-serif;","X input:focus,X button:focus":"outline:none;","X a":"text-decoration:none;","X a:hover":"text-decoration:none;","X .crisp":"shape-rendering:crispEdges;","X .user-select-none":"-webkit-user-select:none;-moz-user-select:none;-ms-user-select:none;-o-user-select:none;user-select:none;","X svg":"overflow:hidden;","X svg a":"fill:#447adb;","X svg a:hover":"fill:#3c6dc5;","X .main-svg":"position:absolute;top:0;left:0;pointer-events:none;","X .main-svg .draglayer":"pointer-events:all;","X .cursor-default":"cursor:default;","X .cursor-pointer":"cursor:pointer;","X .cursor-crosshair":"cursor:crosshair;","X .cursor-move":"cursor:move;","X .cursor-col-resize":"cursor:col-resize;","X .cursor-row-resize":"cursor:row-resize;","X .cursor-ns-resize":"cursor:ns-resize;","X .cursor-ew-resize":"cursor:ew-resize;","X .cursor-sw-resize":"cursor:sw-resize;","X .cursor-s-resize":"cursor:s-resize;","X .cursor-se-resize":"cursor:se-resize;","X .cursor-w-resize":"cursor:w-resize;","X .cursor-e-resize":"cursor:e-resize;","X .cursor-nw-resize":"cursor:nw-resize;","X .cursor-n-resize":"cursor:n-resize;","X .cursor-ne-resize":"cursor:ne-resize;","X .cursor-grab":"cursor:-webkit-grab;cursor:grab;","X .modebar":"position:absolute;top:2px;right:2px;","X .ease-bg":"-webkit-transition:background-color 0.3s ease 0s;-moz-transition:background-color 0.3s ease 0s;-ms-transition:background-color 0.3s ease 0s;-o-transition:background-color 0.3s ease 0s;transition:background-color 0.3s ease 0s;","X .modebar--hover>:not(.watermark)":"opacity:0;-webkit-transition:opacity 0.3s ease 0s;-moz-transition:opacity 0.3s ease 0s;-ms-transition:opacity 0.3s ease 0s;-o-transition:opacity 0.3s ease 0s;transition:opacity 0.3s ease 0s;","X:hover .modebar--hover .modebar-group":"opacity:1;","X .modebar-group":"float:left;display:inline-block;box-sizing:border-box;padding-left:8px;position:relative;vertical-align:middle;white-space:nowrap;","X .modebar-btn":"position:relative;font-size:16px;padding:3px 4px;height:22px;cursor:pointer;line-height:normal;box-sizing:border-box;","X .modebar-btn svg":"position:relative;top:2px;","X .modebar.vertical":"display:flex;flex-direction:column;flex-wrap:wrap;align-content:flex-end;max-height:100%;","X .modebar.vertical svg":"top:-1px;","X .modebar.vertical .modebar-group":"display:block;float:none;padding-left:0px;padding-bottom:8px;","X .modebar.vertical .modebar-group .modebar-btn":"display:block;text-align:center;","X [data-title]:before,X [data-title]:after":"po'..b'ype": "scatterpolargl"}], "scatterternary": [{"marker": {"colorbar": {"outlinewidth": 0, "ticks": ""}}, "type": "scatterternary"}], "surface": [{"colorbar": {"outlinewidth": 0, "ticks": ""}, "colorscale": [[0.0, "#0d0887"], [0.1111111111111111, "#46039f"], [0.2222222222222222, "#7201a8"], [0.3333333333333333, "#9c179e"], [0.4444444444444444, "#bd3786"], [0.5555555555555556, "#d8576b"], [0.6666666666666666, "#ed7953"], [0.7777777777777778, "#fb9f3a"], [0.8888888888888888, "#fdca26"], [1.0, "#f0f921"]], "type": "surface"}], "table": [{"cells": {"fill": {"color": "#EBF0F8"}, "line": {"color": "white"}}, "header": {"fill": {"color": "#C8D4E3"}, "line": {"color": "white"}}, "type": "table"}]}, "layout": {"annotationdefaults": {"arrowcolor": "#2a3f5f", "arrowhead": 0, "arrowwidth": 1}, "autotypenumbers": "strict", "coloraxis": {"colorbar": {"outlinewidth": 0, "ticks": ""}}, "colorscale": {"diverging": [[0, "#8e0152"], [0.1, "#c51b7d"], [0.2, "#de77ae"], [0.3, "#f1b6da"], [0.4, "#fde0ef"], [0.5, "#f7f7f7"], [0.6, "#e6f5d0"], [0.7, "#b8e186"], [0.8, "#7fbc41"], [0.9, "#4d9221"], [1, "#276419"]], "sequential": [[0.0, "#0d0887"], [0.1111111111111111, "#46039f"], [0.2222222222222222, "#7201a8"], [0.3333333333333333, "#9c179e"], [0.4444444444444444, "#bd3786"], [0.5555555555555556, "#d8576b"], [0.6666666666666666, "#ed7953"], [0.7777777777777778, "#fb9f3a"], [0.8888888888888888, "#fdca26"], [1.0, "#f0f921"]], "sequentialminus": [[0.0, "#0d0887"], [0.1111111111111111, "#46039f"], [0.2222222222222222, "#7201a8"], [0.3333333333333333, "#9c179e"], [0.4444444444444444, "#bd3786"], [0.5555555555555556, "#d8576b"], [0.6666666666666666, "#ed7953"], [0.7777777777777778, "#fb9f3a"], [0.8888888888888888, "#fdca26"], [1.0, "#f0f921"]]}, "colorway": ["#636efa", "#EF553B", "#00cc96", "#ab63fa", "#FFA15A", "#19d3f3", "#FF6692", "#B6E880", "#FF97FF", "#FECB52"], "font": {"color": "#2a3f5f"}, "geo": {"bgcolor": "white", "lakecolor": "white", "landcolor": "#E5ECF6", "showlakes": true, "showland": true, "subunitcolor": "white"}, "hoverlabel": {"align": "left"}, "hovermode": "closest", "mapbox": {"style": "light"}, "paper_bgcolor": "white", "plot_bgcolor": "#E5ECF6", "polar": {"angularaxis": {"gridcolor": "white", "linecolor": "white", "ticks": ""}, "bgcolor": "#E5ECF6", "radialaxis": {"gridcolor": "white", "linecolor": "white", "ticks": ""}}, "scene": {"xaxis": {"backgroundcolor": "#E5ECF6", "gridcolor": "white", "gridwidth": 2, "linecolor": "white", "showbackground": true, "ticks": "", "zerolinecolor": "white"}, "yaxis": {"backgroundcolor": "#E5ECF6", "gridcolor": "white", "gridwidth": 2, "linecolor": "white", "showbackground": true, "ticks": "", "zerolinecolor": "white"}, "zaxis": {"backgroundcolor": "#E5ECF6", "gridcolor": "white", "gridwidth": 2, "linecolor": "white", "showbackground": true, "ticks": "", "zerolinecolor": "white"}}, "shapedefaults": {"line": {"color": "#2a3f5f"}}, "ternary": {"aaxis": {"gridcolor": "white", "linecolor": "white", "ticks": ""}, "baxis": {"gridcolor": "white", "linecolor": "white", "ticks": ""}, "bgcolor": "#E5ECF6", "caxis": {"gridcolor": "white", "linecolor": "white", "ticks": ""}}, "title": {"x": 0.05}, "xaxis": {"automargin": true, "gridcolor": "white", "linecolor": "white", "ticks": "", "title": {"standoff": 15}, "zerolinecolor": "white", "zerolinewidth": 2}, "yaxis": {"automargin": true, "gridcolor": "white", "linecolor": "white", "ticks": "", "title": {"standoff": 15}, "zerolinecolor": "white", "zerolinewidth": 2}}}, "title": {"text": "Receiver Operating Characteristic (ROC) Curve", "x": 0.5, "xanchor": "center", "y": 0.92, "yanchor": "top"}, "xaxis": {"linecolor": "lightslategray", "linewidth": 1, "title": {"text": "False Positive Rate"}}, "yaxis": {"linecolor": "lightslategray", "linewidth": 1, "title": {"text": "True Positive Rate"}}},                        {"responsive": true}                    )                };                            </script>        </div>\n </body>\n </html>\n\\ No newline at end of file\n'
b
diff -r b1eda492f063 -r 118e230e85ce test-data/ml_vis05.html
--- a/test-data/ml_vis05.html Thu Aug 11 09:20:25 2022 +0000
+++ b/test-data/ml_vis05.html Wed Aug 09 13:40:06 2023 +0000
[
b'@@ -1,31 +1,67 @@\n <html>\n <head><meta charset="utf-8" /></head>\n <body>\n-    <div>\n-        \n-                <script type="text/javascript">window.PlotlyConfig = {MathJaxConfig: \'local\'};</script>\n+    <div>                        <script type="text/javascript">window.PlotlyConfig = {MathJaxConfig: \'local\'};</script>\n         <script type="text/javascript">/**\n-* plotly.js v1.51.1\n-* Copyright 2012-2019, Plotly, Inc.\n+* plotly.js v1.58.4\n+* Copyright 2012-2020, Plotly, Inc.\n * All rights reserved.\n * Licensed under the MIT license\n */\n-!function(t){if("object"==typeof exports&&"undefined"!=typeof module)module.exports=t();else if("function"==typeof define&&define.amd)define([],t);else{("undefined"!=typeof window?window:"undefined"!=typeof global?global:"undefined"!=typeof self?self:this).Plotly=t()}}(function(){return function(){return function t(e,r,n){function a(o,s){if(!r[o]){if(!e[o]){var l="function"==typeof require&&require;if(!s&&l)return l(o,!0);if(i)return i(o,!0);var c=new Error("Cannot find module \'"+o+"\'");throw c.code="MODULE_NOT_FOUND",c}var u=r[o]={exports:{}};e[o][0].call(u.exports,function(t){return a(e[o][1][t]||t)},u,u.exports,t,e,r,n)}return r[o].exports}for(var i="function"==typeof require&&require,o=0;o<n.length;o++)a(n[o]);return a}}()({1:[function(t,e,r){"use strict";var n=t("../src/lib"),a={"X,X div":"direction:ltr;font-family:\'Open Sans\', verdana, arial, sans-serif;margin:0;padding:0;","X input,X button":"font-family:\'Open Sans\', verdana, arial, sans-serif;","X input:focus,X button:focus":"outline:none;","X a":"text-decoration:none;","X a:hover":"text-decoration:none;","X .crisp":"shape-rendering:crispEdges;","X .user-select-none":"-webkit-user-select:none;-moz-user-select:none;-ms-user-select:none;-o-user-select:none;user-select:none;","X svg":"overflow:hidden;","X svg a":"fill:#447adb;","X svg a:hover":"fill:#3c6dc5;","X .main-svg":"position:absolute;top:0;left:0;pointer-events:none;","X .main-svg .draglayer":"pointer-events:all;","X .cursor-default":"cursor:default;","X .cursor-pointer":"cursor:pointer;","X .cursor-crosshair":"cursor:crosshair;","X .cursor-move":"cursor:move;","X .cursor-col-resize":"cursor:col-resize;","X .cursor-row-resize":"cursor:row-resize;","X .cursor-ns-resize":"cursor:ns-resize;","X .cursor-ew-resize":"cursor:ew-resize;","X .cursor-sw-resize":"cursor:sw-resize;","X .cursor-s-resize":"cursor:s-resize;","X .cursor-se-resize":"cursor:se-resize;","X .cursor-w-resize":"cursor:w-resize;","X .cursor-e-resize":"cursor:e-resize;","X .cursor-nw-resize":"cursor:nw-resize;","X .cursor-n-resize":"cursor:n-resize;","X .cursor-ne-resize":"cursor:ne-resize;","X .cursor-grab":"cursor:-webkit-grab;cursor:grab;","X .modebar":"position:absolute;top:2px;right:2px;","X .ease-bg":"-webkit-transition:background-color 0.3s ease 0s;-moz-transition:background-color 0.3s ease 0s;-ms-transition:background-color 0.3s ease 0s;-o-transition:background-color 0.3s ease 0s;transition:background-color 0.3s ease 0s;","X .modebar--hover>:not(.watermark)":"opacity:0;-webkit-transition:opacity 0.3s ease 0s;-moz-transition:opacity 0.3s ease 0s;-ms-transition:opacity 0.3s ease 0s;-o-transition:opacity 0.3s ease 0s;transition:opacity 0.3s ease 0s;","X:hover .modebar--hover .modebar-group":"opacity:1;","X .modebar-group":"float:left;display:inline-block;box-sizing:border-box;padding-left:8px;position:relative;vertical-align:middle;white-space:nowrap;","X .modebar-btn":"position:relative;font-size:16px;padding:3px 4px;height:22px;cursor:pointer;line-height:normal;box-sizing:border-box;","X .modebar-btn svg":"position:relative;top:2px;","X .modebar.vertical":"display:flex;flex-direction:column;flex-wrap:wrap;align-content:flex-end;max-height:100%;","X .modebar.vertical svg":"top:-1px;","X .modebar.vertical .modebar-group":"display:block;float:none;padding-left:0px;padding-bottom:8px;","X .modebar.vertical .modebar-group .modebar-btn":"display:block;text-align:center;","X [data-title]:before,X [data-title]:after":"po'..b'outlinewidth": 0, "ticks": ""}}, "type": "scatterpolar"}], "scatterpolargl": [{"marker": {"colorbar": {"outlinewidth": 0, "ticks": ""}}, "type": "scatterpolargl"}], "scatterternary": [{"marker": {"colorbar": {"outlinewidth": 0, "ticks": ""}}, "type": "scatterternary"}], "surface": [{"colorbar": {"outlinewidth": 0, "ticks": ""}, "colorscale": [[0.0, "#0d0887"], [0.1111111111111111, "#46039f"], [0.2222222222222222, "#7201a8"], [0.3333333333333333, "#9c179e"], [0.4444444444444444, "#bd3786"], [0.5555555555555556, "#d8576b"], [0.6666666666666666, "#ed7953"], [0.7777777777777778, "#fb9f3a"], [0.8888888888888888, "#fdca26"], [1.0, "#f0f921"]], "type": "surface"}], "table": [{"cells": {"fill": {"color": "#EBF0F8"}, "line": {"color": "white"}}, "header": {"fill": {"color": "#C8D4E3"}, "line": {"color": "white"}}, "type": "table"}]}, "layout": {"annotationdefaults": {"arrowcolor": "#2a3f5f", "arrowhead": 0, "arrowwidth": 1}, "autotypenumbers": "strict", "coloraxis": {"colorbar": {"outlinewidth": 0, "ticks": ""}}, "colorscale": {"diverging": [[0, "#8e0152"], [0.1, "#c51b7d"], [0.2, "#de77ae"], [0.3, "#f1b6da"], [0.4, "#fde0ef"], [0.5, "#f7f7f7"], [0.6, "#e6f5d0"], [0.7, "#b8e186"], [0.8, "#7fbc41"], [0.9, "#4d9221"], [1, "#276419"]], "sequential": [[0.0, "#0d0887"], [0.1111111111111111, "#46039f"], [0.2222222222222222, "#7201a8"], [0.3333333333333333, "#9c179e"], [0.4444444444444444, "#bd3786"], [0.5555555555555556, "#d8576b"], [0.6666666666666666, "#ed7953"], [0.7777777777777778, "#fb9f3a"], [0.8888888888888888, "#fdca26"], [1.0, "#f0f921"]], "sequentialminus": [[0.0, "#0d0887"], [0.1111111111111111, "#46039f"], [0.2222222222222222, "#7201a8"], [0.3333333333333333, "#9c179e"], [0.4444444444444444, "#bd3786"], [0.5555555555555556, "#d8576b"], [0.6666666666666666, "#ed7953"], [0.7777777777777778, "#fb9f3a"], [0.8888888888888888, "#fdca26"], [1.0, "#f0f921"]]}, "colorway": ["#636efa", "#EF553B", "#00cc96", "#ab63fa", "#FFA15A", "#19d3f3", "#FF6692", "#B6E880", "#FF97FF", "#FECB52"], "font": {"color": "#2a3f5f"}, "geo": {"bgcolor": "white", "lakecolor": "white", "landcolor": "#E5ECF6", "showlakes": true, "showland": true, "subunitcolor": "white"}, "hoverlabel": {"align": "left"}, "hovermode": "closest", "mapbox": {"style": "light"}, "paper_bgcolor": "white", "plot_bgcolor": "#E5ECF6", "polar": {"angularaxis": {"gridcolor": "white", "linecolor": "white", "ticks": ""}, "bgcolor": "#E5ECF6", "radialaxis": {"gridcolor": "white", "linecolor": "white", "ticks": ""}}, "scene": {"xaxis": {"backgroundcolor": "#E5ECF6", "gridcolor": "white", "gridwidth": 2, "linecolor": "white", "showbackground": true, "ticks": "", "zerolinecolor": "white"}, "yaxis": {"backgroundcolor": "#E5ECF6", "gridcolor": "white", "gridwidth": 2, "linecolor": "white", "showbackground": true, "ticks": "", "zerolinecolor": "white"}, "zaxis": {"backgroundcolor": "#E5ECF6", "gridcolor": "white", "gridwidth": 2, "linecolor": "white", "showbackground": true, "ticks": "", "zerolinecolor": "white"}}, "shapedefaults": {"line": {"color": "#2a3f5f"}}, "ternary": {"aaxis": {"gridcolor": "white", "linecolor": "white", "ticks": ""}, "baxis": {"gridcolor": "white", "linecolor": "white", "ticks": ""}, "bgcolor": "#E5ECF6", "caxis": {"gridcolor": "white", "linecolor": "white", "ticks": ""}}, "title": {"x": 0.05}, "xaxis": {"automargin": true, "gridcolor": "white", "linecolor": "white", "ticks": "", "title": {"standoff": 15}, "zerolinecolor": "white", "zerolinewidth": 2}, "yaxis": {"automargin": true, "gridcolor": "white", "linecolor": "white", "ticks": "", "title": {"standoff": 15}, "zerolinecolor": "white", "zerolinewidth": 2}}}, "title": {"x": 0.5, "xanchor": "center", "y": 0.92, "yanchor": "top"}, "xaxis": {"title": {"text": "Number of features selected"}}, "yaxis": {"title": {"text": "Cross validation score"}}},                        {"responsive": true}                    )                };                            </script>        </div>\n </body>\n </html>\n\\ No newline at end of file\n'
b
diff -r b1eda492f063 -r 118e230e85ce test-data/ml_vis05.png
b
Binary file test-data/ml_vis05.png has changed
b
diff -r b1eda492f063 -r 118e230e85ce test-data/model_fit01
b
Binary file test-data/model_fit01 has changed
b
diff -r b1eda492f063 -r 118e230e85ce test-data/model_fit02
b
Binary file test-data/model_fit02 has changed
b
diff -r b1eda492f063 -r 118e230e85ce test-data/model_fit02.h5
b
Binary file test-data/model_fit02.h5 has changed
b
diff -r b1eda492f063 -r 118e230e85ce test-data/model_pred01.tabular
--- a/test-data/model_pred01.tabular Thu Aug 11 09:20:25 2022 +0000
+++ b/test-data/model_pred01.tabular Wed Aug 09 13:40:06 2023 +0000
b
@@ -1,262 +1,262 @@
 Predicted
-71.129364
-60.96111
-77.885765
-57.212738
-51.806957
-52.089592
-51.571884
-80.762184
-36.772987
-41.643093
-46.386948
-77.97063
-72.768776
-40.0386
-79.81385
-74.40216
-52.089592
-75.51107
-55.705868
-39.944202
-49.643826
-59.17941
-69.848915
-64.62096
-48.310116
-43.391766
-68.25893
-60.198105
-65.16974
-72.130005
-56.351482
-53.20132
-56.86578
-54.342987
-43.521133
-59.663773
-66.097626
-51.960022
-41.559486
-45.16049
-66.40008
-71.488754
-45.16049
-63.34996
-69.83631
-55.652687
-61.311596
-71.85501
-75.12588
-54.93247
-70.09855
-74.20223
-57.898273
-55.23022
-75.70524
-66.94729
-65.12762
-59.3189
-61.22922
-61.2382
-54.017147
-51.633373
-51.633373
-65.16974
-65.16873
-57.874527
-59.740753
-43.990814
-66.06423
-64.436615
-41.245773
-63.278465
-63.27533
-71.13793
-65.47819
-72.620995
-62.598015
-36.986706
-73.2002
-71.966644
-72.912926
-75.46711
-55.12616
-46.19641
-87.20736
-72.11753
-57.952766
-84.67858
-69.21688
-64.257095
-43.59384
-44.723145
-67.051605
-50.021965
-69.202095
-75.10072
-70.80699
-83.08025
-69.62026
-42.441116
-64.38655
-59.430386
-69.366035
-73.87479
-59.973484
-75.76153
-56.195892
-71.16636
-60.419106
-61.630756
-51.81593
-54.924137
-60.73048
-78.496635
-77.921555
-73.66453
-60.904953
-71.26717
-72.01454
-53.52841
-46.66952
-54.504898
-56.28563
-59.398067
-72.71433
-51.745968
-67.80466
-51.571823
-52.010742
-54.19355
-74.193825
-64.57627
-67.48214
-68.41867
-82.102806
-55.8638
-76.90198
-62.577324
-73.70229
-78.93923
-73.51925
-54.81887
-65.2422
-59.700085
-84.08965
-64.35592
-54.001873
-41.397793
-64.64837
-62.784557
-42.990005
-45.430832
-52.089592
-60.374348
-51.67288
-62.4257
-79.536285
-76.4169
-55.978775
-74.43581
-76.89248
-65.3203
-72.10233
-59.23278
-51.736633
-73.13266
-59.45746
-73.0939
-70.58273
-53.08009
-49.893116
-73.89228
-52.64392
-54.801548
-63.534626
-68.1002
-63.70472
-63.8851
-63.268097
-62.438057
-61.989746
-71.47914
-73.92875
-48.089043
-54.874943
-50.261494
-69.11724
-57.448387
-50.528027
-58.67657
-73.969376
-53.745205
-74.81751
-85.582954
-75.10767
-48.855537
-70.66616
-41.341694
-48.55276
-63.48302
-73.02358
-69.50546
-55.603634
-74.26824
-76.03213
-62.601646
-81.99045
-59.26651
-44.504597
-53.54178
-55.247334
-82.123795
-51.84111
-66.27524
-66.23033
-58.565033
-67.452
-72.54107
-49.840427
-70.26608
-62.447872
-67.045
-42.600086
-64.88309
-55.31232
-39.07865
-71.81975
-59.447086
-53.20132
-75.12621
-72.9902
-53.1043
-72.42816
-72.10233
-55.836628
-53.2467
-74.670074
-74.5721
-54.103737
-49.212822
-67.238785
-60.09495
-74.5011
-63.0043
-67.7362
-53.029213
-74.860016
-78.597946
-75.369064
-60.000134
-68.83947
-40.24504
-81.21449
-61.465557
-42.74572
-52.089592
-73.162025
-52.033802
-79.690926
-62.542553
-59.557045
+69.6
+59.1
+77.5
+57.4
+51.8
+53.7
+52.4
+81.3
+36.5
+41.3
+46.5
+78.2
+69.6
+40.0
+80.4
+76.5
+52.6
+75.2
+53.7
+41.8
+50.4
+60.6
+72.0
+66.3
+48.4
+42.1
+67.2
+61.8
+67.4
+74.3
+60.0
+52.3
+56.6
+53.8
+46.2
+58.2
+65.5
+54.1
+40.3
+45.5
+67.1
+73.5
+46.6
+65.3
+66.4
+57.4
+60.1
+71.8
+74.8
+54.8
+68.1
+75.2
+58.9
+52.9
+75.4
+67.7
+64.3
+59.6
+61.4
+59.1
+53.2
+51.9
+52.0
+65.1
+66.0
+58.7
+62.0
+42.1
+69.4
+65.7
+42.4
+66.7
+67.8
+72.9
+64.8
+70.8
+61.5
+35.7
+74.3
+72.0
+73.8
+78.3
+56.8
+44.4
+84.5
+72.8
+58.5
+82.4
+69.1
+66.2
+41.6
+42.4
+66.1
+52.4
+70.6
+78.1
+70.8
+82.3
+70.5
+43.3
+63.0
+59.3
+68.4
+73.9
+64.5
+78.2
+55.5
+73.1
+61.8
+59.0
+49.2
+54.1
+63.5
+76.3
+79.4
+73.8
+61.1
+74.8
+70.7
+55.6
+45.6
+54.4
+57.5
+60.9
+74.4
+50.9
+66.3
+54.0
+52.8
+53.2
+75.4
+66.2
+69.9
+72.1
+82.0
+57.0
+75.5
+63.0
+73.2
+79.8
+73.6
+55.1
+64.6
+58.9
+85.0
+64.8
+54.2
+42.6
+66.1
+64.1
+42.9
+42.9
+52.5
+60.3
+52.4
+60.2
+78.7
+76.1
+57.8
+74.1
+78.4
+64.9
+70.1
+57.7
+50.5
+76.6
+61.6
+72.0
+69.7
+52.4
+50.3
+77.4
+53.2
+54.3
+62.0
+70.1
+65.7
+63.7
+62.9
+63.3
+62.4
+69.8
+79.7
+48.1
+55.5
+49.3
+70.9
+55.8
+49.1
+59.1
+72.0
+50.5
+75.6
+85.3
+75.2
+47.2
+72.1
+41.6
+49.2
+60.4
+75.2
+68.4
+55.1
+75.0
+76.2
+61.7
+83.3
+58.0
+43.4
+52.4
+56.4
+82.7
+50.3
+66.1
+63.9
+59.5
+67.6
+73.2
+50.0
+69.1
+60.1
+65.3
+43.6
+66.5
+54.5
+38.9
+72.1
+59.9
+52.4
+76.8
+74.3
+49.8
+70.6
+71.8
+58.2
+53.0
+74.7
+76.7
+55.4
+50.2
+68.3
+62.0
+76.4
+62.1
+64.5
+54.3
+73.9
+82.0
+80.0
+59.7
+70.4
+41.7
+79.9
+65.2
+45.7
+52.7
+74.3
+52.4
+77.2
+61.5
+60.6
b
diff -r b1eda492f063 -r 118e230e85ce test-data/mv_result03.tabular
--- a/test-data/mv_result03.tabular Thu Aug 11 09:20:25 2022 +0000
+++ b/test-data/mv_result03.tabular Wed Aug 09 13:40:06 2023 +0000
b
@@ -1,6 +1,6 @@
 train_sizes_abs mean_train_scores std_train_scores mean_test_scores std_test_scores
-17 0.9668700841937653 0.00277836829836518 0.7008862995946905 0.03857541198731935
-56 0.9730008602419361 0.006839342612121988 0.7963376762427242 0.004846330083938778
-95 0.9728783377589098 0.0037790183626530663 0.814592845745573 0.020457691766770824
-134 0.9739086338111185 0.001627343246847077 0.7985540571195479 0.03954641079310707
-174 0.9726218628287785 0.0032867750457225182 0.8152971572131146 0.04280261115004303
+20 0.9605798108391495 0.013105795110973342 0.6793566676714862 0.0542758786671521
+67 0.9747069603753047 0.004771330317769553 0.7992159150914689 0.039915706163261254
+114 0.973452438813126 0.003149742353770294 0.7990274054156885 0.04678801383406503
+161 0.9740816834566439 0.003602225449485708 0.8090624629465598 0.055518221301045424
+208 0.9735886594650832 0.002795792155754044 0.8124639568342318 0.049003565134966405
b
diff -r b1eda492f063 -r 118e230e85ce test-data/mv_result05.tabular
--- a/test-data/mv_result05.tabular Thu Aug 11 09:20:25 2022 +0000
+++ b/test-data/mv_result05.tabular Wed Aug 09 13:40:06 2023 +0000
b
@@ -1,262 +1,262 @@
 Predicted
-70.16
-62.06
-83.04
-62.84
-48.63
-51.25
-54.98
-80.3
-42.84
-41.52
-43.83
-73.15
-74.22
-42.88
-74.93
-72.9
-53.74
-78.86
-59.0
-40.28
-54.52
-58.34
-62.74
-62.35
-49.15
-41.92
-65.59
-59.91
-66.49
-72.08
-60.44
-53.84
-54.82
-52.66
-42.37
+69.81
+61.35
+83.9
+65.02
+52.65
+52.98
+52.07
+77.35
+41.14
+42.74
+43.6
+75.52
+73.2
+39.02
+74.77
+72.29
+51.94
+75.38
+59.19
+41.46
+53.48
+59.42
+62.89
+61.84
+50.7
+41.44
+65.62
+62.8
+66.92
+71.85
+58.18
+52.35
+55.23
+53.09
+41.79
+59.87
+66.91
+52.78
+41.84
+48.58
+66.83
+74.34
+51.07
+68.61
+71.7
+56.09
+61.36
+72.82
+74.68
+54.08
+72.13
+74.44
+63.53
+53.48
+75.28
+67.92
+65.6
+60.06
+62.87
 61.3
-63.14
-50.62
-42.75
-47.39
-67.8
-73.58
-49.97
-67.04
-67.45
-54.67
-64.87
-77.23
-73.52
-53.55
-70.53
-77.98
-61.99
-53.08
-78.12
-66.55
-63.95
-60.57
-61.6
-60.37
-55.29
-54.31
-52.54
-65.31
-61.51
-57.3
-60.02
-43.64
-74.78
-68.26
-42.72
-61.26
-61.25
-71.58
-61.03
-70.53
-70.25
-43.4
-71.39
-72.31
-72.7
-72.11
-53.55
-43.4
-80.6
-73.72
-58.86
-76.71
-68.36
-60.26
-48.56
-38.96
-69.67
-52.9
-67.63
-75.12
-70.92
-70.89
-67.05
-43.89
-59.94
-62.98
-71.1
-79.22
-77.31
-79.06
-61.11
-66.32
-54.7
-61.1
-54.59
-58.7
-59.6
-73.79
-72.69
-81.83
-61.08
-69.21
-74.8
-54.37
-50.85
-53.07
-58.53
-55.44
+55.09
+52.1
+51.31
+67.17
+60.6
+58.52
+59.42
+39.86
+73.74
+69.04
+44.29
+60.89
+62.17
+69.75
+62.33
 72.62
-54.14
-68.12
-48.81
-50.11
-56.06
+68.17
+41.09
+73.53
+73.62
+71.88
 73.63
-63.29
-71.0
-74.87
-81.24
-54.67
-66.96
-61.37
-74.84
-76.71
-69.27
-56.53
-71.91
-58.74
-77.83
-64.57
-51.93
-42.84
-64.11
-59.47
-42.46
-43.79
-51.75
-63.98
-54.71
-64.95
-79.72
-72.12
-60.66
+53.38
+44.12
+80.25
+73.34
+59.64
+77.4
+67.36
+62.6
+46.9
+38.04
+69.07
+48.53
+66.98
+74.16
+69.35
+72.89
+67.07
+44.14
+66.28
+63.51
+72.29
+73.48
+72.88
+77.9
+59.52
+63.5
+55.96
+63.46
+53.09
+61.4
+56.0
+68.21
+77.32
+77.43
+60.58
+69.71
+74.62
+54.38
+48.23
+53.22
+56.79
+55.28
+71.08
+54.91
+69.47
+49.27
+49.65
+52.86
+75.32
+65.0
+67.91
+72.15
+80.44
+52.54
+72.24
+65.5
+75.2
+81.07
+66.65
+55.5
+70.57
+59.21
+80.63
+63.39
+52.38
+40.69
+66.8
+59.82
+42.19
+43.73
+51.48
+63.04
+50.97
+61.42
+78.74
+76.58
+60.19
+76.9
+73.98
+60.55
+74.42
+59.99
+53.45
+78.84
+58.49
+72.79
+73.1
+54.81
+48.84
+79.51
+53.23
+55.51
+63.24
+69.39
+64.01
+67.14
+61.97
+64.0
+62.21
+77.38
+68.57
+48.58
+57.18
+52.33
+78.77
+58.54
+48.64
+58.41
+76.36
+55.65
+72.32
+81.74
+76.48
+52.03
+67.93
+45.46
+48.06
+64.08
+74.58
+73.99
+59.25
+73.54
+76.76
+63.13
+67.95
+57.25
+43.26
+54.5
+55.52
 79.3
-71.26
-59.9
-74.25
-59.68
-52.37
-78.52
-58.52
-71.98
-71.77
-54.48
-48.96
-81.42
-54.08
-53.52
-64.38
-70.79
-63.95
-67.48
-61.76
-66.15
-62.1
-75.68
-69.72
-43.8
-56.27
-53.38
-81.31
+52.74
+65.73
+68.86
 57.54
-48.15
-59.47
-78.01
-56.39
-72.33
-78.8
-78.66
-52.01
-66.68
-48.56
-47.75
-65.67
-77.93
-72.68
-58.0
-77.83
-73.37
-65.39
-69.79
-55.98
-46.35
-54.31
-55.58
-79.69
-52.76
-62.62
-66.54
-60.29
-62.57
-74.86
-48.05
-65.09
-65.02
-67.84
-41.86
-62.28
-57.05
-43.68
-72.0
-63.04
-54.41
-73.37
-75.11
-42.65
-73.16
-71.68
-58.61
-53.54
-73.33
-72.16
-49.96
-54.78
-64.24
-60.13
-76.46
-61.53
-68.36
-53.1
-71.33
-76.12
-70.86
-61.35
-67.12
-43.25
-80.2
-71.16
-58.63
-52.37
-74.93
-53.34
-76.41
-63.87
-59.97
+62.97
+73.17
+49.9
+71.64
+63.74
+68.66
+41.1
+62.83
+56.87
+37.04
+73.51
+62.51
+54.73
+74.51
+76.75
+42.69
+73.35
+71.27
+60.58
+54.09
+73.89
+73.13
+49.74
+53.41
+65.36
+59.22
+76.26
+60.09
+65.77
+53.07
+72.92
+72.2
+70.79
+59.05
+66.76
+41.56
+78.89
+74.69
+52.84
+51.14
+74.48
+53.3
+69.67
+63.96
+59.85
b
diff -r b1eda492f063 -r 118e230e85ce test-data/named_steps.txt
--- a/test-data/named_steps.txt Thu Aug 11 09:20:25 2022 +0000
+++ b/test-data/named_steps.txt Wed Aug 09 13:40:06 2023 +0000
b
@@ -1,6 +1,1 @@
-{'preprocessing_1': SelectKBest(k=10, score_func=<function f_regression at 0x11b4ba8c8>), 'estimator': XGBRegressor(base_score=0.5, booster='gbtree', colsample_bylevel=1,
-             colsample_bytree=1, gamma=0, learning_rate=0.1, max_delta_step=0,
-             max_depth=3, min_child_weight=1, missing=nan, n_estimators=100,
-             n_jobs=1, nthread=None, objective='reg:linear', random_state=10,
-             reg_alpha=0, reg_lambda=1, scale_pos_weight=1, seed=None,
-             silent=True, subsample=1)}
\ No newline at end of file
+{'robustscaler': RobustScaler(), 'selectkbest': SelectKBest(), 'svr': SVR(C=1, kernel='linear')}
\ No newline at end of file
b
diff -r b1eda492f063 -r 118e230e85ce test-data/nn_model01
b
Binary file test-data/nn_model01 has changed
b
diff -r b1eda492f063 -r 118e230e85ce test-data/nn_model02
b
Binary file test-data/nn_model02 has changed
b
diff -r b1eda492f063 -r 118e230e85ce test-data/nn_model03
b
Binary file test-data/nn_model03 has changed
b
diff -r b1eda492f063 -r 118e230e85ce test-data/pipeline01
b
Binary file test-data/pipeline01 has changed
b
diff -r b1eda492f063 -r 118e230e85ce test-data/pipeline02
b
Binary file test-data/pipeline02 has changed
b
diff -r b1eda492f063 -r 118e230e85ce test-data/pipeline03
b
Binary file test-data/pipeline03 has changed
b
diff -r b1eda492f063 -r 118e230e85ce test-data/pipeline04
b
Binary file test-data/pipeline04 has changed
b
diff -r b1eda492f063 -r 118e230e85ce test-data/pipeline05
b
Binary file test-data/pipeline05 has changed
b
diff -r b1eda492f063 -r 118e230e85ce test-data/pipeline06
b
Binary file test-data/pipeline06 has changed
b
diff -r b1eda492f063 -r 118e230e85ce test-data/pipeline07
b
Binary file test-data/pipeline07 has changed
b
diff -r b1eda492f063 -r 118e230e85ce test-data/pipeline08
b
Binary file test-data/pipeline08 has changed
b
diff -r b1eda492f063 -r 118e230e85ce test-data/pipeline09
b
Binary file test-data/pipeline09 has changed
b
diff -r b1eda492f063 -r 118e230e85ce test-data/pipeline10
b
Binary file test-data/pipeline10 has changed
b
diff -r b1eda492f063 -r 118e230e85ce test-data/pipeline11
b
Binary file test-data/pipeline11 has changed
b
diff -r b1eda492f063 -r 118e230e85ce test-data/pipeline12
b
Binary file test-data/pipeline12 has changed
b
diff -r b1eda492f063 -r 118e230e85ce test-data/pipeline14
b
Binary file test-data/pipeline14 has changed
b
diff -r b1eda492f063 -r 118e230e85ce test-data/pipeline15
b
Binary file test-data/pipeline15 has changed
b
diff -r b1eda492f063 -r 118e230e85ce test-data/pipeline16
b
Binary file test-data/pipeline16 has changed
b
diff -r b1eda492f063 -r 118e230e85ce test-data/pipeline17
b
Binary file test-data/pipeline17 has changed
b
diff -r b1eda492f063 -r 118e230e85ce test-data/pipeline18
b
Binary file test-data/pipeline18 has changed
b
diff -r b1eda492f063 -r 118e230e85ce test-data/precision_recall_curve.txt
--- a/test-data/precision_recall_curve.txt Thu Aug 11 09:20:25 2022 +0000
+++ b/test-data/precision_recall_curve.txt Wed Aug 09 13:40:06 2023 +0000
[
@@ -1,2 +1,2 @@
 precision_recall_curve : 
-(array([1., 1.]), array([1., 0.]), array([1]))
+(array([0.64102564, 1.        , 1.        ]), array([1., 1., 0.]), array([0, 1]))
b
diff -r b1eda492f063 -r 118e230e85ce test-data/prp_model01
b
Binary file test-data/prp_model01 has changed
b
diff -r b1eda492f063 -r 118e230e85ce test-data/prp_model02
b
Binary file test-data/prp_model02 has changed
b
diff -r b1eda492f063 -r 118e230e85ce test-data/prp_model03
b
Binary file test-data/prp_model03 has changed
b
diff -r b1eda492f063 -r 118e230e85ce test-data/prp_model04
b
Binary file test-data/prp_model04 has changed
b
diff -r b1eda492f063 -r 118e230e85ce test-data/prp_model05
b
Binary file test-data/prp_model05 has changed
b
diff -r b1eda492f063 -r 118e230e85ce test-data/prp_model07
b
Binary file test-data/prp_model07 has changed
b
diff -r b1eda492f063 -r 118e230e85ce test-data/prp_model08
b
Binary file test-data/prp_model08 has changed
b
diff -r b1eda492f063 -r 118e230e85ce test-data/prp_model09
b
Binary file test-data/prp_model09 has changed
b
diff -r b1eda492f063 -r 118e230e85ce test-data/prp_result01
--- a/test-data/prp_result01 Thu Aug 11 09:20:25 2022 +0000
+++ b/test-data/prp_result01 Wed Aug 09 13:40:06 2023 +0000
b
@@ -1,15 +1,15 @@
-0.34079224150348947 -0.3921227933085925 0.2490507280911941 -0.7698156258582727 -0.1701382209728926
--0.8620757555306106 -0.19048588419219253 0.24710543975009408 0.7422931346186274 -0.6790706051909926
--0.44857543757211044 0.19920312300180737 -0.812112096739406 0.2785593090771274 0.04069143168750737
-1.3342816328356895 1.6641608262566074 -3.000113357933606 -0.6701123839490727 -0.07045038775469255
-0.7615267260378895 0.9176274108888074 -1.954493327131406 -0.5675301168878727 0.10063563654750733
-0.3517077819346894 0.6351202511326074 -1.518915029366606 -0.30971697444707263 0.09957030020130735
--1.1546995581165105 -0.5289323469785927 0.7279548225941941 0.8261855855227276 -0.6127421735668926
--0.17683671467671042 -1.5830256329757926 1.8352445249339941 -1.0553955128494728 0.23777966502290743
--0.04589044764567053 0.4089694362054475 -1.1558632189207658 -0.02446696726223259 0.07501752707814739
--2.322599763463111 -1.5464662131621925 2.233148890877594 1.4052188634961276 -0.5115354482934926
-0.3359621667503495 -0.16218071845273258 -0.03556840603494589 -0.5958346262657126 -0.28461208654203257
-0.09817425011268949 -0.29803272230839256 0.18230400872239416 -0.42567750847007263 -0.2990016986016926
-0.6939725287059254 -0.046625817910626616 -0.25306728129413986 -0.9172273915573068 -0.2192857084889266
--1.8560091420543106 -0.8903352997473926 0.8320084501263939 1.0765172991949272 0.09558502193530742
-0.7235684795430894 -0.41357463008399253 0.19661484068979412 -1.2196980959976726 -0.029144264696292624
+0.4446440577398572 0.2164436344996918 0.5724807416581001 0.21202570120895203 0.0
+0.2608194397113748 0.621586173919394 0.7162173631360884 0.7868277249674587 0.0
+0.444428573199709 0.7490216543608298 0.3798608289161599 0.7310699759821288 1.0
+1.0 1.0 0.0 0.47384638089740055 1.0
+0.9148761394091602 0.9050651832652965 0.08698347911005577 0.4412843231270225 1.0
+0.8503924579791385 0.8487407117031966 0.14683639283818206 0.565673989931558 1.0
+0.1714715439549176 0.4329110104945242 0.7918143976359968 0.7946239826238215 0.0
+0.21369017947280927 0.0 0.8700670793446612 0.0 0.0
+0.7483382588565948 0.7840344926335436 0.21819066024556938 0.6397712011824479 1.0
+0.0 0.0761110081629974 1.0 0.978367472218168 0.0
+0.569185174962378 0.492742466936392 0.45781970735243327 0.2973057899528674 0.0
+0.4033286346511704 0.32863994196697094 0.6002865082332829 0.34610321804982885 0.0
+0.7785822215744896 0.5578088624025245 0.2880207681776773 0.18076402014468546 0.0
+0.08784970570405005 0.3392061471810267 0.89679124225672 1.0 1.0
+0.6431017002154876 0.1785506947384085 0.4404536363519971 0.03030170224231021 0.0
b
diff -r b1eda492f063 -r 118e230e85ce test-data/prp_result02
--- a/test-data/prp_result02 Thu Aug 11 09:20:25 2022 +0000
+++ b/test-data/prp_result02 Wed Aug 09 13:40:06 2023 +0000
b
@@ -1,15 +1,15 @@
-0.5507843815769634 0.3509713593582132 0.5024983733118504 0.21181277111109376 0.0
-0.3861806964013367 0.5069547456108511 0.6109599535763046 0.8290928000628373 0.0
-0.5507375738755746 0.6540163740150353 0.4443100403766963 0.7730482551190299 1.0
-1.0 1.0 0.0 0.517244227590485 1.0
-0.8235586181451755 0.7985651943678985 0.18709221814790866 0.4963213476362478 1.0
-0.7266009913523925 0.7367833962232062 0.2805049676108317 0.5753897601225946 1.0
-0.30103611027291544 0.41809900797558924 0.6996399175984136 0.8344573213929083 0.0
-0.3312417925943893 0.0 0.7545711939364796 0.0 0.0
-0.6381134490835961 0.6925288168071413 0.36342661590035497 0.6700118165314028 1.0
+0.5507843815769633 0.3509713593582132 0.5024983733118504 0.21181277111109376 0.0
+0.38618069640133657 0.5069547456108511 0.6109599535763046 0.8290928000628373 0.0
+0.5507375738755745 0.6540163740150353 0.4443100403766963 0.7730482551190299 1.0
+0.9999999999999998 1.0 0.0 0.517244227590485 1.0
+0.8235586181451754 0.7985651943678985 0.18709221814790866 0.4963213476362478 1.0
+0.7266009913523924 0.7367833962232062 0.2805049676108317 0.5753897601225946 1.0
+0.3010361102729153 0.41809900797558924 0.6996399175984136 0.8344573213929083 0.0
+0.3312417925943892 0.0 0.7545711939364795 0.0 0.0
+0.638113449083596 0.6925288168071413 0.36342661590035497 0.6700118165314028 1.0
 0.0 0.17251430929709788 1.0 0.9803983325686505 0.0
-0.5767922296995018 0.42657716609772306 0.4660985815769355 0.29991460317209145 0.0
-0.5238014571892052 0.39991387603944323 0.5157872357238816 0.3562801111416092 0.0
-0.6462177807916674 0.4376032758632245 0.4055927537907609 0.18180023195970593 0.0
-0.2038689924106734 0.40279192286335813 0.7842991022590049 1.0 1.0
-0.6081358906411253 0.3153114383337088 0.4611172283355056 0.03134330438976468 0.0
+0.5767922296995017 0.42657716609772306 0.4660985815769355 0.29991460317209145 0.0
+0.5238014571892051 0.39991387603944323 0.5157872357238816 0.3562801111416092 0.0
+0.6462177807916673 0.4376032758632245 0.4055927537907609 0.18180023195970593 0.0
+0.2038689924106733 0.40279192286335813 0.7842991022590049 1.0 1.0
+0.6081358906411252 0.3153114383337088 0.4611172283355056 0.03134330438976468 0.0
b
diff -r b1eda492f063 -r 118e230e85ce test-data/prp_result03
--- a/test-data/prp_result03 Thu Aug 11 09:20:25 2022 +0000
+++ b/test-data/prp_result03 Wed Aug 09 13:40:06 2023 +0000
b
@@ -1,15 +1,15 @@
 1.0 -0.409899987374 -0.649450145317 0.510268556953 -0.229110484125 0.0 0.16801799964920539 0.26620960636548074 -0.2091590750523839 0.09391238455008853 -0.0 0.4217854912522724 -0.33139398846382173 0.14879583720862946 -0.0 0.260374000214897 -0.11690787611726697 0.0 0.05249161393599188 -0.0 0.0
-1.0 -1.10383560019 0.0611191480175 1.0172556528299999 1.79193066057 0.0 1.2184530322468177 -0.06746549143499857 -1.1228830040882731 -1.9779968562091492 -0.0 0.003735550254385074 0.06217379881695535 0.10952127528047438 0.0 1.0348090632145892 1.8228515939442282 0.0 3.2110154922908367 0.0 0.0
-1.0 -0.41009731910999997 0.7310461183329999 0.238276079462 1.60843479815 1.0 0.16817981114120914 -0.29980005327413506 -0.09771638139540752 -0.659614798684549 -0.41009731910999997 0.5344284271297466 0.17419080298230055 1.1758400157792797 0.7310461183329999 0.05677549004378134 0.38325153777343535 0.238276079462 2.5870624998998313 1.60843479815 1.0
-1.0 1.48390157074 2.30714564103 -1.83858336229 0.7709049245659999 1.0 2.2019638716446392 3.423577040650361 -2.7282767392385616 1.1439470284546884 1.48390157074 5.322921008923729 -4.241879589977655 1.7785899363610076 2.30714564103 3.3803887800896018 -1.417372968214475 -1.83858336229 0.59429440272011 0.7709049245659999 1.0
+1.0 -1.10383560019 0.0611191480175 1.01725565283 1.79193066057 0.0 1.2184530322468177 -0.06746549143499857 -1.1228830040882734 -1.9779968562091492 -0.0 0.003735550254385074 0.06217379881695537 0.10952127528047438 0.0 1.0348090632145897 1.8228515939442287 0.0 3.2110154922908367 0.0 0.0
+1.0 -0.41009731911 0.731046118333 0.238276079462 1.60843479815 1.0 0.1681798111412092 -0.29980005327413517 -0.09771638139540753 -0.6596147986845491 -0.41009731911 0.5344284271297467 0.17419080298230058 1.1758400157792799 0.731046118333 0.05677549004378134 0.38325153777343535 0.238276079462 2.5870624998998313 1.60843479815 1.0
+1.0 1.48390157074 2.30714564103 -1.83858336229 0.770904924566 1.0 2.2019638716446392 3.423577040650361 -2.7282767392385616 1.1439470284546887 1.48390157074 5.322921008923729 -4.241879589977655 1.7785899363610078 2.30714564103 3.3803887800896018 -1.4173729682144751 -1.83858336229 0.5942944027201101 0.770904924566 1.0
 1.0 0.74006063964 1.38952620136 -0.96404935579 0.702401167325 1.0 0.547689750344366 1.028333649375021 -0.7134549828904773 0.5198194571744222 0.74006063964 1.9307830642659514 -1.3395718392744338 0.9760048258639371 1.38952620136 0.929391160399114 -0.6771493928658102 -0.96404935579 0.49336739985952266 0.702401167325 1.0
 1.0 0.331307031883 1.10808437795 -0.527405721679 0.961279646112 1.0 0.10976434937512317 0.3671161463345349 -0.17473322424758106 0.3184787063629073 0.331307031883 1.2278509886568385 -0.5844100410339456 1.0651789586980116 1.10808437795 0.27815679525974685 -0.5069843854930332 -0.527405721679 0.924058558029212 0.961279646112 1.0
-1.0 -1.4627878344 -0.34365574639300006 1.43177660405 1.8094946798500002 0.0 2.139748248468642 0.5026954450453321 -2.0943853979828857 -2.646906804096103 -0.0 0.11809927202892997 -0.49203825753283764 -0.6218432447980146 -0.0 2.0499842439049503 2.5907921477621754 0.0 3.274270996405455 0.0 0.0
-1.0 -1.33544682955 -2.24827087098 1.6885444678000001 -0.922608257112 0.0 1.7834182345551466 3.0024462066198576 -2.254961356077702 1.2320942718768715 -0.0 5.054721909297167 -3.7963053413091665 2.074273269790536 -0.0 2.851182419737986 -1.5578650684930677 0.0 0.8512059960912424 -0.0 0.0
-1.0 -0.041738424574199996 0.906486336146 -0.13980113811 1.27108242642 1.0 0.001742096085936182 -0.03783531156877273 0.005835079258391552 -0.05305297798272229 -0.041738424574199996 0.821717477619399 -0.12672782147437484 1.1522188516650336 0.906486336146 0.019544358216851295 -0.17769876984513633 -0.13980113811 1.6156505347537549 1.27108242642 1.0
-1.0 -2.7318947650200003 -1.46239633785 2.83576394706 2.28732123255 0.0 7.463249007143682 3.9951128997568346 -7.747008681805666 -6.24872090112244 -0.0 2.1386030489570915 -4.147010811187605 -3.344970193967668 -0.0 8.04155716344531 6.486303086610132 0.0 5.231838420874052 0.0 0.0
+1.0 -1.4627878344 -0.343655746393 1.43177660405 1.80949467985 0.0 2.139748248468642 0.5026954450453321 -2.0943853979828857 -2.646906804096103 -0.0 0.11809927202892993 -0.4920382575328376 -0.6218432447980143 -0.0 2.0499842439049503 2.590792147762175 0.0 3.274270996405454 0.0 0.0
+1.0 -1.33544682955 -2.24827087098 1.6885444678 -0.922608257112 0.0 1.7834182345551466 3.0024462066198576 -2.2549613560777018 1.2320942718768715 -0.0 5.054721909297167 -3.796305341309166 2.074273269790536 -0.0 2.851182419737985 -1.5578650684930675 0.0 0.8512059960912424 -0.0 0.0
+1.0 -0.0417384245742 0.906486336146 -0.13980113811 1.27108242642 1.0 0.0017420960859361827 -0.037835311568772734 0.005835079258391553 -0.053052977982722294 -0.0417384245742 0.821717477619399 -0.12672782147437484 1.1522188516650336 0.906486336146 0.019544358216851295 -0.17769876984513633 -0.13980113811 1.6156505347537549 1.27108242642 1.0
+1.0 -2.73189476502 -1.46239633785 2.83576394706 2.28732123255 0.0 7.463249007143681 3.995112899756834 -7.747008681805665 -6.2487209011224385 -0.0 2.1386030489570915 -4.147010811187605 -3.344970193967668 -0.0 8.04155716344531 6.486303086610132 0.0 5.231838420874052 0.0 0.0
 1.0 -0.300256196558 -0.305034204892 0.340123288396 0.0593443810367 0.0 0.09015378357147634 0.0915884101809656 -0.1021241249345827 -0.017818518137168244 -0.0 0.09304586615409464 -0.10374923684112626 -0.01810206608433767 -0.0 0.11568385130930857 0.020184406026027626 0.0 0.0035217555606290385 0.0 0.0
-1.0 -0.523654501136 -0.42649659668799994 0.5723853152130001 0.24389111089200002 0.0 0.274214036559993 0.22333686257485638 -0.29973214669543563 -0.1277146780056551 -0.0 0.18189934698644647 -0.2441203889325326 -0.1040187287578936 -0.0 0.3276249490714854 0.1395996903855662 0.0 0.05948287397213385 0.0 0.0
-1.0 -0.007572212655529999 -0.254805682403 0.0572980350837 -0.327374762308 0.0 5.733840450056868e-05 0.0019294428129929542 -0.00043387290639779506 0.002478951318249763 -0.0 0.0649259357848585 -0.014599864929853214 0.08341694971140987 -0.0 0.003283064824452916 -0.018757930616241734 0.0 0.1071742349962195 -0.0 0.0
-1.0 -1.87242461384 -0.413385894664 1.8275030360799998 2.35149919802 1.0 3.5059739345138734 0.7740339241831431 -3.421861666623521 -4.403004977797668 -1.87242461384 0.1708878979071557 -0.755463977571107 -0.9720765997751761 -0.413385894664 3.339767346881617 4.297371923721235 1.8275030360799998 5.529548478288703 2.35149919802 1.0
-1.0 -0.16811770561099998 -0.811895938369 0.316838713275 -0.819986910541 0.0 0.028263562939906853 0.13649408235348612 -0.053266197524534487 0.1378543180312052 -0.0 0.659175014740079 -0.25724006442603264 0.6657440421839824 -0.0 0.10038677022975767 -0.25980359763815297 0.0 0.672378533458574 -0.0 0.0
+1.0 -0.523654501136 -0.426496596688 0.572385315213 0.243891110892 0.0 0.274214036559993 0.2233368625748564 -0.2997321466954356 -0.1277146780056551 -0.0 0.18189934698644653 -0.2441203889325326 -0.1040187287578936 -0.0 0.3276249490714853 0.13959969038556613 0.0 0.05948287397213384 0.0 0.0
+1.0 -0.00757221265553 -0.254805682403 0.0572980350837 -0.327374762308 0.0 5.733840450056869e-05 0.0019294428129929544 -0.0004338729063977951 0.0024789513182497634 -0.0 0.0649259357848585 -0.014599864929853214 0.08341694971140987 -0.0 0.003283064824452916 -0.018757930616241734 0.0 0.1071742349962195 -0.0 0.0
+1.0 -1.87242461384 -0.413385894664 1.82750303608 2.35149919802 1.0 3.5059739345138734 0.7740339241831431 -3.4218616666235215 -4.403004977797668 -1.87242461384 0.1708878979071557 -0.7554639775711071 -0.9720765997751761 -0.413385894664 3.339767346881618 4.297371923721235 1.82750303608 5.529548478288703 2.35149919802 1.0
+1.0 -0.168117705611 -0.811895938369 0.316838713275 -0.819986910541 0.0 0.028263562939906863 0.13649408235348615 -0.05326619752453449 0.13785431803120524 -0.0 0.659175014740079 -0.25724006442603264 0.6657440421839824 -0.0 0.10038677022975767 -0.25980359763815297 0.0 0.672378533458574 -0.0 0.0
b
diff -r b1eda492f063 -r 118e230e85ce test-data/prp_result04
--- a/test-data/prp_result04 Thu Aug 11 09:20:25 2022 +0000
+++ b/test-data/prp_result04 Wed Aug 09 13:40:06 2023 +0000
b
@@ -1,15 +1,15 @@
 0.0 -0.25385559680817016 0.13442061387070464 -0.5602120769938709 0.0
--0.5807061112525813 0.2698773982744695 0.5349578561360192 0.571982134735025 0.0
--0.00016513310878258202 0.7636545174678359 -0.0804627978317235 0.4691871204655464 1.0
+-0.5807061112525813 0.2698773982744695 0.5349578561360193 0.5719821347350249 0.0
+-0.00016513310878262847 0.7636545174678359 -0.0804627978317235 0.46918712046554634 1.0
 1.584789882498885 1.9253361878040125 -1.7212531850763018 0.0 1.0
-0.9623215057330502 1.248994581161877 -1.0303412425843197 -0.038376040801309956 1.0
-0.6202642404230927 1.0415547572084232 -0.6853777543973235 0.1066485748494791 1.0
--0.881088095119412 -0.028466436412001278 0.862443663986116 0.5818215588435884 0.0
--0.7745253270992509 -1.4322841823191093 1.0652991072215634 -0.9487119185155306 0.0
-0.30808862594408043 0.8929646798898123 -0.37915680271103425 0.2802011596461483 1.0
--1.9431147973567746 -0.8530466232854528 1.97164195151228 0.8495016397748227 0.0
-0.09175320910447847 0.0 0.0 -0.3986186678055577 0.0
--0.0951931852237634 -0.08952520583418162 0.18349498924288923 -0.2952349539785941 0.0
-0.33667993570408733 0.03702149075186114 -0.22344167716683067 -0.6152600641516485 0.0
--1.223884424953702 -0.07986181719203675 1.1750811552867684 0.8854543571237001 1.0
-0.20233065722424093 -0.37358807403702804 -0.01839561515890641 -0.8912230866367292 0.0
+0.9623215057330502 1.248994581161877 -1.0303412425843197 -0.03837604080131002 1.0
+0.6202642404230927 1.0415547572084232 -0.6853777543973235 0.10664857484947902 1.0
+-0.881088095119412 -0.028466436412001236 0.862443663986116 0.5818215588435883 0.0
+-0.7745253270992509 -1.4322841823191093 1.0652991072215632 -0.9487119185155306 0.0
+0.30808862594408043 0.8929646798898123 -0.37915680271103425 0.2802011596461482 1.0
+-1.9431147973567742 -0.8530466232854528 1.97164195151228 0.8495016397748227 0.0
+0.09175320910447847 0.0 0.0 -0.39861866780555777 0.0
+-0.0951931852237634 -0.08952520583418166 0.18349498924288915 -0.2952349539785942 0.0
+0.33667993570408733 0.03702149075186114 -0.22344167716683067 -0.6152600641516486 0.0
+-1.223884424953702 -0.07986181719203675 1.1750811552867686 0.8854543571237001 1.0
+0.2023306572242409 -0.37358807403702804 -0.01839561515890641 -0.8912230866367292 0.0
b
diff -r b1eda492f063 -r 118e230e85ce test-data/prp_result07
--- a/test-data/prp_result07 Thu Aug 11 09:20:25 2022 +0000
+++ b/test-data/prp_result07 Wed Aug 09 13:40:06 2023 +0000
b
@@ -1,15 +1,15 @@
-0.10866513901130055 -0.5565683482001781 0.01697338750768846 -0.9589623671667038 -0.816496580927726
--0.5710995365177258 0.07926796585473102 0.46542360804755395 0.9797654572160418 -0.816496580927726
-0.10847183700890116 0.6787364476581768 -0.2236147606088382 0.803743046654752 1.224744871391589
-1.9637931622636124 2.0890722453009336 -2.0606794162148767 0.00032423752699077795 1.224744871391589
-1.2351422905746392 1.2679619500798842 -1.2871193566513779 -0.06538948660301952 1.224744871391589
+0.10866513901130044 -0.5565683482001781 0.01697338750768846 -0.9589623671667038 -0.816496580927726
+-0.5710995365177259 0.07926796585473102 0.46542360804755417 0.9797654572160418 -0.816496580927726
+0.10847183700890099 0.6787364476581769 -0.2236147606088382 0.803743046654752 1.224744871391589
+1.9637931622636124 2.0890722453009336 -2.0606794162148767 0.0003242375269908845 1.224744871391589
+1.235142290574639 1.2679619500798842 -1.2871193566513779 -0.06538948660301952 1.224744871391589
 0.8347358862676002 1.0161203164819261 -0.9008907216292501 0.18294534382616373 1.224744871391589
--0.922721566734639 -0.28293538193724904 0.8320838514832234 0.9966141260199964 -0.816496580927726
--0.7979810068833711 -1.9872356829362412 1.059205224122999 -1.6242152405020795 -0.816496580927726
-0.4693084330819043 0.8357250235474191 -0.5580390743243027 0.48013042183945476 1.224744871391589
--2.1659119218220786 -1.284014236214121 2.073966413639728 1.4549796745789692 -0.816496580927726
-0.2160698816290759 -0.2483757987671466 -0.1335268595966537 -0.6822557426452339 -0.816496580927726
--0.0027663810163240663 -0.35706357942460004 0.07191812706310458 -0.5052252645629531 -0.816496580927726
-0.5027769329398427 -0.20342998011241972 -0.3836970281346616 -1.053224520491157 -0.816496580927726
--1.3239931073762934 -0.34533177433843787 1.182119596299028 1.5165437885484256 1.224744871391589
-0.3455099575735564 -0.7019291669926769 -0.15412299100336474 -1.5257734742396478 -0.816496580927726
+-0.9227215667346391 -0.282935381937249 0.8320838514832234 0.9966141260199962 -0.816496580927726
+-0.7979810068833713 -1.9872356829362412 1.0592052241229988 -1.6242152405020795 -0.816496580927726
+0.4693084330819042 0.8357250235474191 -0.5580390743243027 0.48013042183945476 1.224744871391589
+-2.165911921822078 -1.284014236214121 2.073966413639728 1.4549796745789692 -0.816496580927726
+0.21606988162907578 -0.2483757987671466 -0.1335268595966537 -0.6822557426452339 -0.816496580927726
+-0.002766381016324175 -0.3570635794246001 0.07191812706310448 -0.5052252645629531 -0.816496580927726
+0.5027769329398426 -0.20342998011241972 -0.3836970281346616 -1.053224520491157 -0.816496580927726
+-1.3239931073762936 -0.34533177433843787 1.1821195962990283 1.5165437885484256 1.224744871391589
+0.34550995757355624 -0.7019291669926769 -0.15412299100336474 -1.5257734742396478 -0.816496580927726
b
diff -r b1eda492f063 -r 118e230e85ce test-data/pw_metric02.tabular
--- a/test-data/pw_metric02.tabular Thu Aug 11 09:20:25 2022 +0000
+++ b/test-data/pw_metric02.tabular Wed Aug 09 13:40:06 2023 +0000
b
@@ -1,4 +1,4 @@
-0.0 6.991989327202 4.700302055636 5.583279679695999
-6.991989327202 0.0 2.2916872715660004 5.558713150412
+0.0 6.991989327202001 4.700302055636 5.583279679696
+6.991989327202001 0.0 2.2916872715660004 5.558713150412
 4.700302055636 2.2916872715660004 0.0 4.078323200938
-5.583279679695999 5.558713150412 4.078323200938 0.0
+5.583279679696 5.558713150412 4.078323200938 0.0
b
diff -r b1eda492f063 -r 118e230e85ce test-data/qda_model01
b
Binary file test-data/qda_model01 has changed
b
diff -r b1eda492f063 -r 118e230e85ce test-data/qda_prediction_result01.tabular
--- a/test-data/qda_prediction_result01.tabular Thu Aug 11 09:20:25 2022 +0000
+++ b/test-data/qda_prediction_result01.tabular Wed Aug 09 13:40:06 2023 +0000
b
@@ -1,5 +1,6 @@
+0 1 2 3 0
 3.68258022948 2.82110345641 -3.990140724 -1.9523364774 0
-0.015942057224 -0.7119585943469999 0.125502976978 -0.972218263337 0
-2.0869076882499997 0.929399321468 -2.1292408448400004 -1.9971402218799998 0
-1.4132105208399999 0.523750660422 -1.4210539291 -1.49298569451 0
-0.7683140439399999 1.38267855169 -0.989045048734 0.649504257894 0
+0.015942057224 -0.711958594347 0.125502976978 -0.972218263337 0
+2.08690768825 0.929399321468 -2.12924084484 -1.99714022188 0
+1.41321052084 0.523750660422 -1.4210539291 -1.49298569451 0
+0.76831404394 1.38267855169 -0.989045048734 0.649504257894 0
b
diff -r b1eda492f063 -r 118e230e85ce test-data/ranking_.tabular
--- a/test-data/ranking_.tabular Thu Aug 11 09:20:25 2022 +0000
+++ b/test-data/ranking_.tabular Wed Aug 09 13:40:06 2023 +0000
b
@@ -1,18 +1,18 @@
 ranking_
-17
-7
-4
-5
+10
 2
 1
-9
-6
-8
+1
+1
+1
+1
+1
+1
+1
 3
-10
-15
-14
-11
-13
-12
-16
+6
+5
+9
+8
+4
+7
b
diff -r b1eda492f063 -r 118e230e85ce test-data/rfc_model01
b
Binary file test-data/rfc_model01 has changed
b
diff -r b1eda492f063 -r 118e230e85ce test-data/rfc_result01
--- a/test-data/rfc_result01 Thu Aug 11 09:20:25 2022 +0000
+++ b/test-data/rfc_result01 Wed Aug 09 13:40:06 2023 +0000
b
@@ -1,6 +1,6 @@
 0 1 2 3 predicted
-3.68258022948 2.82110345641 -3.9901407239999998 -1.9523364774 1
-0.015942057224 -0.7119585943469999 0.125502976978 -0.972218263337 0
-2.0869076882499997 0.929399321468 -2.1292408448400004 -1.9971402218799998 1
-1.4132105208399999 0.523750660422 -1.4210539291 -1.49298569451 1
-0.7683140439399999 1.38267855169 -0.989045048734 0.649504257894 1
+3.68258022948 2.82110345641 -3.990140724 -1.9523364774 1
+0.015942057224 -0.711958594347 0.125502976978 -0.972218263337 0
+2.08690768825 0.929399321468 -2.12924084484 -1.99714022188 1
+1.41321052084 0.523750660422 -1.4210539291 -1.49298569451 1
+0.76831404394 1.38267855169 -0.989045048734 0.649504257894 1
b
diff -r b1eda492f063 -r 118e230e85ce test-data/rfr_model01
b
Binary file test-data/rfr_model01 has changed
b
diff -r b1eda492f063 -r 118e230e85ce test-data/rfr_result01
--- a/test-data/rfr_result01 Thu Aug 11 09:20:25 2022 +0000
+++ b/test-data/rfr_result01 Wed Aug 09 13:40:06 2023 +0000
b
@@ -1,6 +1,6 @@
 0 1 2 3 4 predicted
-86.97021227350001 1.00532111569 -1.01739601979 -0.613139481654 0.641846874331 0.6686209127804698
-91.2021798817 -0.6215229712070001 1.11914889596 0.390012184498 1.28956938152 1.0374491367850487
--47.4101632272 -0.638416457964 -0.7327774684530001 -0.8640261049779999 -1.06109770116 -0.16198314840411981
-61.712804630200004 -1.0999480057700002 -0.739679672932 0.585657963012 1.4890682753600002 1.1603837128651284
--206.998295124 0.130238853011 0.70574123041 1.3320656526399999 -1.3322092373799999 -0.6710618307873705
+86.9702122735 1.00532111569 -1.01739601979 -0.613139481654 0.641846874331 0.7551606160069697
+91.2021798817 -0.621522971207 1.11914889596 0.390012184498 1.28956938152 1.002450640353639
+-47.4101632272 -0.638416457964 -0.732777468453 -0.864026104978 -1.06109770116 -0.2186821961683199
+61.7128046302 -1.09994800577 -0.739679672932 0.585657963012 1.48906827536 1.1688374943709285
+-206.998295124 0.130238853011 0.70574123041 1.33206565264 -1.33220923738 -0.6745330836494304
b
diff -r b1eda492f063 -r 118e230e85ce test-data/searchCV01
b
Binary file test-data/searchCV01 has changed
b
diff -r b1eda492f063 -r 118e230e85ce test-data/searchCV02
b
Binary file test-data/searchCV02 has changed
b
diff -r b1eda492f063 -r 118e230e85ce test-data/searchCV03
b
Binary file test-data/searchCV03 has changed
b
diff -r b1eda492f063 -r 118e230e85ce test-data/svc_model01
b
Binary file test-data/svc_model01 has changed
b
diff -r b1eda492f063 -r 118e230e85ce test-data/svc_model02
b
Binary file test-data/svc_model02 has changed
b
diff -r b1eda492f063 -r 118e230e85ce test-data/svc_model03
b
Binary file test-data/svc_model03 has changed
b
diff -r b1eda492f063 -r 118e230e85ce test-data/svc_prediction_result03.tabular
--- a/test-data/svc_prediction_result03.tabular Thu Aug 11 09:20:25 2022 +0000
+++ b/test-data/svc_prediction_result03.tabular Wed Aug 09 13:40:06 2023 +0000
b
@@ -1,3 +1,4 @@
+0 1 2 3 0
 0 58 56 -67 0
 0 44 64 -76 0
 0 51 48 -73 0
@@ -33,7 +34,7 @@
 2 -99 26 -108 1
 2 -81 19 -110 0
 2 -108 21 -108 1
-2 -92 27 -106 1
+2 -92 27 -106 0
 2 -88 2 -106 3
 2 -88 15 -103 3
 3 54 -74 4 3
b
diff -r b1eda492f063 -r 118e230e85ce test-data/train_test_eval_model01
b
Binary file test-data/train_test_eval_model01 has changed
b
diff -r b1eda492f063 -r 118e230e85ce test-data/train_test_eval_model02
b
Binary file test-data/train_test_eval_model02 has changed
b
diff -r b1eda492f063 -r 118e230e85ce test-data/train_test_eval_weights01.h5
b
Binary file test-data/train_test_eval_weights01.h5 has changed
b
diff -r b1eda492f063 -r 118e230e85ce test-data/train_test_eval_weights02.h5
b
Binary file test-data/train_test_eval_weights02.h5 has changed
b
diff -r b1eda492f063 -r 118e230e85ce test-data/train_test_split_test02.tabular
--- a/test-data/train_test_split_test02.tabular Thu Aug 11 09:20:25 2022 +0000
+++ b/test-data/train_test_split_test02.tabular Wed Aug 09 13:40:06 2023 +0000
b
b'@@ -1,201 +1,201 @@\n--1.3022497239876525\t0.41162245619920174\t0.3850631031897158\t-1.065301842496646\t-0.6940008550138481\t2.2608403458600925\t3.622204434814536\t-0.3183465181327487\t-1.410027169684386\t-0.6307904628990526\t2.809174035044597\t0.7840390953413314\t-0.032913359309272236\t0.1269040356918228\t-0.7038487276500461\t-1.5433857418796189\t-0.2658388398378144\t-1.204125138751038\t-0.4106305941465671\t-2.1530032168711024\n--0.4107989913365759\t0.9675376475353166\t0.09374211379388764\t1.7143886101095047\t-0.11156554775507473\t1.6257337330303492\t5.671063244915109\t-0.3775968070412295\t0.8772742813833009\t-0.2249373445476654\t3.541130040089443\t0.7064690478674034\t0.3274452454361061\t0.4095309780710557\t-0.04020259217468653\t0.3999351212624621\t-0.4789427070381956\t-0.8383398308678357\t-0.7084990898469742\t-3.5921789270343747\n--1.0046430489468259\t-0.2475198782602121\t1.8722558073924007\t-2.050734120852677\t0.223218415351888\t0.9972967022037826\t0.21687494749301134\t0.6815453371376522\t-1.2369792180109709\t-1.7937590177703913\t-0.595814082168741\t-0.3714655242486308\t0.8054558366241785\t0.707291290265989\t0.0026761403473940892\t0.6858925338135025\t1.0460915051165451\t-1.05529607831364\t-0.8524278739013349\t-1.0937845388370384\n--0.6601752137721719\t-0.11000001206134824\t-2.1153815467792265\t0.7939530261454807\t0.14074473863377998\t3.3552079891275923\t-0.8369407002892686\t-0.5714820686564377\t-0.37412481389886265\t0.16669033299410288\t-3.6319951227966674\t-0.6639361788987586\t0.5554669721932757\t0.7479717178718552\t-0.016560794142802523\t0.19859811525823087\t-1.9152321429437595\t-0.4582315336475037\t-2.2285961423670955\t-3.4228140259065998\n-0.7866152217561416\t-0.2291058850235269\t-0.3527520240499313\t0.6723966958156411\t-1.6682659534205586\t2.7789914613781272\t1.906164582945605\t1.0761421124464927\t0.09690167407822936\t1.6513613104097675\t2.2258330065926084\t-0.8734144600762542\t-1.0066865968249934\t-0.13471591695058407\t0.015184991621273526\t0.41810514195584253\t-0.3760878884398714\t2.2903405971801156\t1.0522116184673187\t-0.9159796436696128\n-0.2814798326149793\t0.5875101493421397\t0.21729777590682087\t-1.485801637332555\t-0.7259055545195056\t2.3934625979413915\t2.795967841759341\t0.1748287231468569\t0.7064308999942802\t0.3497777551584115\t2.225996647861514\t1.6301969056059509\t0.07651250932855069\t-2.0342494286984243\t-0.8883453790706329\t-0.7345168234009436\t1.5287683026280032\t-0.4421021715011357\t-0.5779836284098872\t-1.8023368901730872\n-0.023561266296767996\t0.01327469130218088\t0.9878045214079304\t0.5750648387066529\t0.4047426855593061\t2.730429552257033\t1.0141221327309589\t-0.0010397698579166187\t1.2950034987670118\t-1.805850216908488\t1.6388229124609937\t0.9286520099757948\t-0.34109406603463605\t-0.02757550682732839\t-1.2286674947471106\t0.8011744540858317\t0.8424403652177841\t-0.14115310456128674\t-0.44894002007093775\t-0.4406268508179094\n-0.2456307272179787\t0.5943091746736674\t-1.273655669405128\t0.16873404654912996\t0.005752441478044986\t0.5666353702678641\t4.842127705182824\t0.698622620435285\t1.2592032824188062\t-1.3867865971369038\t2.0103146282963\t0.25453278665231965\t1.037764245051936\t-0.14900969999222113\t-1.3508991449570242\t-0.6347960472728013\t0.01478239489509124\t0.1237920700532843\t-0.8008367439748938\t-3.7595616099202216\n--1.4928016688154506\t0.6922526483668314\t0.7340706436196134\t0.3473096338667893\t-0.2626210985357605\t3.4791405788113354\t1.805377038112414\t1.3002542896922045\t-0.9818090439589664\t-1.983507863053584\t3.1109989936861995\t-1.5167130756726412\t2.115406032275567\t-0.06319774436121431\t0.31045881394126296\t1.5773205208380376\t0.11953451934790252\t-0.3678585275873511\t-0.6436336614328086\t-0.1923418873135878\n--1.1092740315883938\t-0.9086267440397304\t-0.9317250076628589\t0.10305857018240576\t0.569614735498199\t3.3180899169801226\t-0.12789255109919928\t-0.225656531827112\t-0.6679424977863244\t0.4743665910531477\t-1.90983381933296\t-0.015442113772508715\t0.7947216167107651\t0.8564724155111614\t0.7221596369993102\t-0.9866727547841551\t0.8360620842096383\t0.6950101534147096\t0.04441865129686528\t-2.6156995904444718\n-1.0098923348657989\t-0.3404395572391499\t0.28768679961742755'..b'65\t0.4229830359699106\t1.7754032762538372\t-1.4816394916943092\t-0.0898673177344088\t1.227196153928395\t0.9070135114981376\t-0.4301867214198333\t-1.4492302926076932\n+-0.0661581696020389\t2.009979529130306\t0.3713735532042358\t-0.5487484003197485\t2.354415943408788\t1.8572881651916524\t3.323002263140202\t0.3735478888166094\t-0.8598539493190498\t0.7274541656791573\t2.205532939957485\t0.2975855303668845\t0.8972227445878997\t-0.5747601621532991\t-0.2127621916795853\t0.040064364498694\t0.5849058397345099\t0.8758434197897889\t0.4663260930810837\t-2.254363887228946\n+0.1854349821398681\t3.0513112038934844\t-2.642401530692195\t0.8764021246988886\t-0.3953153229944254\t1.9075565797529936\t1.4218322330290696\t-0.5195408321168391\t0.5455073292906822\t0.6246218548016428\t0.9584355772452136\t-2.2635771383414567\t-0.6561863207944872\t0.8486496057693781\t-0.5966266151068456\t-0.6006020054228821\t2.0603605160777265\t0.1160223057446743\t0.4886550176001555\t-1.2835462572257614\n+-0.1582698552315506\t-0.0804834699025315\t-2.148011786893936\t2.047644705860473\t0.7947162744855929\t3.242804563537072\t3.1537786543701785\t0.5402497023814611\t0.4272506159045248\t-0.6354699283615589\t3.262065129084129\t-0.2292960421354582\t0.7154856008886161\t-0.2042624800307618\t-0.2578743486811803\t0.1366193834599442\t0.4553653167841668\t-0.667051904499571\t-2.0893270217727435\t-1.499879266505479\n+-0.8484574739703497\t1.3067865576457078\t0.25715573889589\t-0.5778920236798556\t1.2522052635779308\t2.5540397800380448\t3.62109581483752\t-0.3278268826487843\t0.7393667994651832\t-0.2837573726327204\t3.182336120597001\t0.6388288113204441\t0.6913878844603908\t-0.4201373516698137\t0.1445696954158848\t1.797278428886632\t-1.3269163979305345\t-0.5374183207933991\t-1.1487633221563704\t-1.8939359370372515\n+-2.130317782257829\t0.6944206556053942\t-0.5187934367784872\t0.4910182874266096\t0.9821391691462148\t1.594712581464469\t4.651398959285967\t-0.4079668226972564\t-0.7617607267021139\t0.3720022354031997\t2.9925378597902497\t0.3213832180477288\t-1.8009468379200382\t0.0228737675663929\t-0.5948190671258752\t-0.1814257358676153\t1.0527453107966451\t-0.791437621835658\t-1.202390030067397\t-2.9428283401869946\n+0.6749106319022494\t-0.1410001132490149\t0.9696745674485816\t-0.6012318064205764\t0.9706395894078412\t2.0205295534128647\t-0.5705109230704828\t1.107471162440306\t-0.2333200858753318\t0.5489383517969392\t-2.331823083983417\t0.524187537611793\t-1.607427755534678\t1.2124152543792104\t0.2564484145413819\t0.5333111287645858\t-1.7715901663386604\t0.7643998152072085\t-1.088005122340949\t-2.120248490613845\n+1.0784246103336974\t0.6750275474270194\t0.883320881578071\t0.6851873084466028\t0.2463794964155742\t1.6240981608723588\t3.909303507340842\t0.2591824998427575\t-1.6014038225855325\t1.1801464748015662\t2.4755532139585203\t0.7995931657601443\t1.6483349264511815\t-1.269517021279204\t0.7198065388081868\t-0.3671739224800498\t-0.7364785132472684\t-0.6205826123141913\t1.708837288406762\t-2.594756018144528\n+0.0100359871993886\t0.2446441667110395\t1.245919130033156\t0.8854157890056191\t-1.573923287330914\t2.8875386799155955\t-0.513386992362383\t0.4013578576162002\t0.5076563896403061\t-0.2023935750158571\t-2.560644060182517\t-0.1450215571363124\t0.5199643185069369\t0.6728828829265034\t1.5303075053292063\t-0.9794419968244896\t0.3655133608469971\t-1.327131896650437\t-1.904372466358065\t-2.6555099509371605\n+-0.2984991804837332\t-1.6426421983629622\t-1.0099344497295062\t-0.2068306325948078\t1.7371391385934103\t1.9175803121382835\t2.5305082449767884\t0.6198917597202278\t-0.5024984291905042\t0.6767881974129\t1.569111670968616\t-0.8206492678463314\t-0.3511969916778679\t1.0578552660085534\t-1.0111524265487517\t1.503872093145261\t-0.7474037040854009\t0.6582529782133406\t0.7064620422956671\t-1.969356801153876\n+-0.6512454621212219\t-1.37373475613224\t0.3008590666620012\t0.0797497766512836\t-2.195376961647302\t1.132356514093129\t5.6861294740324535\t-0.1068624210733533\t0.4255497794528917\t-0.1410668722642891\t2.6052434613346884\t-0.0193472593916205\t1.0454590995696535\t-0.8660690232570448\t-1.29000104081957\t0.1081990001477609\t0.7755088867812867\t0.6015079687881466\t0.955602538442458\t-4.328064444458374\n'
b
diff -r b1eda492f063 -r 118e230e85ce test-data/train_test_split_train02.tabular
--- a/test-data/train_test_split_train02.tabular Thu Aug 11 09:20:25 2022 +0000
+++ b/test-data/train_test_split_train02.tabular Wed Aug 09 13:40:06 2023 +0000
b
b'@@ -1,800 +1,800 @@\n-0.13074624395513548\t-0.469611133626014\t-0.5710665790468505\t0.03279113352421141\t2.003536501469461\t2.3320994929619165\t2.5655773908930333\t-1.8172726174227096\t0.31252740842018656\t0.4183877613375451\t2.3746178626049312\t-0.6930727012865296\t-0.013183556173275029\t1.1098774440423256\t1.4603607557778286\t0.5412632236853618\t0.6061667777690624\t0.4212995019384291\t0.14980350057199987\t-1.3870421561971842\n-0.1025284847407583\t-2.6554352025337806\t-0.71518541502396\t-1.1404916299860086\t1.1910205067228126\t2.113153625179661\t2.9349032443668133\t-1.2362775613386645\t-0.05861748263617049\t-0.12932403608468454\t2.217536166240706\t-1.19338504289619\t0.08517274490563755\t0.8749991601378865\t0.03823939811250166\t0.7007347847223218\t0.6221756436475849\t-1.9582697041316883\t0.1486878915218063\t-1.8828047461722932\n--0.3579496672192898\t0.5691405803600221\t-0.3135941251222193\t0.6099240993754877\t-0.21623755767016947\t1.2273086492959706\t1.6963625000374265\t0.4917445652599018\t1.51820010664321\t-0.6179648499957521\t0.4424061323382702\t0.37607271963750777\t0.0955642147899332\t1.1428211235733463\t1.3792380662910433\t0.8392247210016273\t-1.3784520073608069\t0.6806565402268875\t-0.4079706906458002\t-1.8670081757072128\n--0.4813193986666376\t-0.1886485401626124\t0.048734923506973636\t-0.20004930206569047\t-1.0585699644909594\t2.7625383995667336\t1.8863896126660609\t0.8214112065844242\t-0.4384103073465777\t-0.3211449191911812\t2.19052189921114\t-1.59109564541547\t1.3097995624508914\t1.5592201449464334\t-0.3552421947179116\t-0.4128075508328489\t0.5596595170526524\t-1.176294355286121\t0.16888633455190946\t-0.9214884435605952\n--0.2572336609353247\t0.29438982269850145\t-0.06531975450102831\t1.5097968126742924\t-0.7755962651137243\t2.4354435421606127\t0.38216873050665007\t1.1239051747279731\t-0.2442436451866952\t0.12718619074952095\t0.9026611100653392\t-1.803720014048137\t1.2266258763633622\t0.22899043555447016\t-0.6493009189318991\t0.21750122466449906\t-0.4382663216525586\t-0.2972087114804226\t-1.5229655091814298\t-0.3225053056087868\n-1.4069915349949509\t0.3654232815183534\t-1.097052189453232\t-0.5389149543134537\t-1.5728963747716522\t1.6783401449848374\t0.9288455507296128\t-0.4925716601774063\t1.0392596016586455\t-0.2847157775591438\t0.5210189577500189\t-2.65782453257402\t-1.67318496169606\t0.4719725602155527\t-1.0949050649335628\t0.08630539086516381\t1.016831070562736\t-0.9944516912574556\t-0.6752082767957616\t-1.0649707211089758\n--0.1186989836246748\t1.7836421698313514\t-0.7750775352454679\t-1.6293416755674714\t-0.6843986506548367\t1.6772721667636452\t5.61626113564464\t0.2921048965669931\t-0.03311146686259204\t-0.20216240643483607\t3.174632106697607\t1.3260918422916352\t-1.4169867073972098\t1.1177286442516994\t1.1442261013773558\t2.2927637054906245\t-1.1696635334539611\t0.9572219962948342\t-0.99260262548243\t-3.88516570201557\n--1.6188277521439098\t-0.6049258835366146\t-2.1216868938554883\t0.6816156489035747\t-0.3909183237429715\t1.8546492624641897\t3.5484612828339506\t0.8719065415632481\t2.758577973437618\t1.6571275711005302\t2.2964938011707874\t-1.3015552984330785\t0.6517060330634804\t0.5957551049011494\t1.7890274248449136\t-0.7415803218575354\t-0.005766275627966389\t-0.15804411491961362\t0.13620848005420536\t-2.4231894996131182\n--0.8844255979781576\t-1.067022557954717\t0.4268970268412451\t-0.4792374662006493\t0.8774697010725497\t2.031228226698857\t4.956071644421575\t0.3213541753652649\t-0.8329849287815198\t-2.9127670891791504\t3.303547980533676\t0.6551018446390298\t0.5601240239650124\t1.9378083839436648\t0.6510057852005603\t0.5434997376470951\t-0.16431466813504966\t-1.2255895916041704\t-0.6701271433847471\t-3.1283762290921477\n--0.30746702969320694\t-0.8391679152764611\t-0.1179283406215597\t-0.426295494661604\t-1.691982298012858\t2.8901125627044437\t2.0602489643699675\t0.9458180233686614\t0.793907788630693\t-1.364580463112297\t2.4726804852199185\t0.8429876604473175\t0.2306659754164001\t2.228388534591572\t0.3261200509781369\t0.23298923486173995\t-1.5934373922813216\t0.3574092709432904\t-1.8018244078785832\t-0.8941426836775552\n--0.03042402302151745\t0.5533032756826644\t-0.4112837804349074\t-0.8355476515317032\t-0.2'..b'4037\t-0.877785782447869\t1.5635577470569406\t-0.9206250746598924\t0.1845809406149484\t0.2312626005625568\t0.5086324430299911\t-1.2655949713688883\n+2.4366892748151594\t-0.5693156806025699\t-1.7218141143792118\t-0.7636370379358908\t1.381242841429633\t0.8734261792585589\t3.6993297964062575\t-0.2510229748899681\t-0.2572996499581653\t1.0939573204735948\t1.4250691293913331\t-0.6234909491978371\t0.8946129186610708\t0.1134885034206386\t-0.8171226347069339\t0.4036243685718015\t1.2492832667321032\t-0.1655992472538439\t0.0501069876968286\t-3.1064820228464267\n+-0.6496553421679686\t-1.4224279723935236\t2.3012734316107286\t-1.6307384651011865\t0.7899921830677415\t1.5784780783388637\t1.5937350935854364\t0.2033287108801171\t0.0348586673136675\t0.6478279768265606\t0.5072168351442272\t-1.6486585166575147\t-0.3823982996033502\t2.3256408720316006\t-0.9273509613624984\t0.6528468905997087\t0.8314107815153837\t1.2344031799078437\t-0.2712026087680339\t-1.7964285078767936\n+1.556971762459764\t-1.2439952121813922\t-0.4229414892042001\t1.250912354503068\t-0.04525686050637\t1.8102334072756012\t4.330921368106597\t0.4369341397955196\t1.7090276790490326\t-1.3105903617385728\t2.6507931144960315\t0.9560232948982376\t0.9264898048764156\t1.27342213352265\t-0.1775463778209161\t-0.5020139494158932\t1.0777715747655348\t-1.5004727301982392\t-0.8712982816000493\t-2.962814957918756\n+0.9217291089973372\t-1.388586324225548\t0.2542353391148201\t0.1168834752581415\t0.3075246148086876\t2.583752655948304\t1.868779214202141\t-1.5598686552244263\t-0.4374234835713509\t-2.067455238116786\t2.100424767731529\t0.592164188729302\t-0.4145039221243959\t0.8609838368049071\t-0.7423945821145248\t1.546996722395656\t0.4044604320792881\t-1.3908367691435546\t-0.1938267900587888\t-0.9316346070105346\n+-0.5219973387100996\t0.9905632425118324\t-1.2688367548190436\t-1.3062113291308677\t1.2638138110709067\t1.8660691408757044\t0.544578722144265\t1.4116584346018954\t-0.5641770654580077\t-0.3012039021140541\t0.226832738868361\t-0.8279588610356573\t-0.6522929057307618\t-0.2060367785065769\t-0.135516011514525\t1.027502980770911\t-0.1971805711985108\t-0.9413847947787156\t0.1960821773331954\t-0.9608113047816084\n+0.4424052465929388\t0.943928936525626\t1.738397490506961\t-0.1216112264138329\t0.1547572872518768\t1.8624246245418483\t3.2762488723359144\t-0.4270106111994435\t0.1528975135659881\t0.4771953229726214\t2.315577412539543\t1.3689173890211586\t0.7770702960925243\t-1.4296307560984765\t0.7923063752623205\t0.2514409708101872\t1.1840866916876511\t0.8951950393049203\t-0.5737280626680346\t-2.1013927221698583\n+0.769368091793121\t0.0422521992671298\t0.920578733178434\t1.2609933412881686\t-0.9009957896033098\t3.4649606386186127\t-0.0964160403896523\t-1.4408423082558597\t-1.3370985919131873\t-2.909342960508076\t1.3996034179270973\t1.107134834593895\t0.6373319894768134\t-0.2057630833315292\t0.5627232979887723\t1.2446890017440848\t0.1454247655053584\t-0.2729346201818952\t-0.0871837836013387\t0.3686229650559225\n+0.7427620511228765\t-1.5580462215214408\t1.4680352994852566\t-0.7508175656670606\t0.6363631862918148\t3.1644775950816646\t1.8594024439897647\t-0.4499136700983101\t0.6875433245937749\t0.4124013786469115\t2.179503463347244\t0.8484523669327337\t-0.546863836293962\t0.1744144634114788\t0.2404538407459919\t-1.228725137426046\t0.7554095521777582\t-0.0301346466145987\t-0.4835932968055189\t-1.021435051734048\n+2.0468935191072437\t-0.7226970302245961\t-0.4839561868483981\t-2.222915078471478\t0.3459880131172701\t1.1324497189504088\t1.4912587172048224\t0.3411839598264167\t0.6715382471375413\t-0.3651029407087692\t0.0323308793516845\t-0.5081627405589572\t0.0020753178518641\t-0.0794449797460891\t-0.1380562260161878\t0.4878193412223996\t-0.3974492638991908\t0.3347669895977678\t0.9512754223441522\t-1.987373538202905\n+-1.785494148707842\t1.3285224891343512\t-0.5279590208716799\t2.675167568819385\t1.5490279490427394\t1.9850254692433156\t-0.4538705494124088\t0.2596309736678987\t0.1769080847916054\t0.2504311940060068\t-0.0375462231706751\t-2.2382627787119773\t0.3799303209778131\t1.027127616405047\t-0.8246136050829563\t0.4127647478763152\t-0.3451553402202971\t0.8158793586435744\t-0.061216117948957\t-0.1170630150565765\n'
b
diff -r b1eda492f063 -r 118e230e85ce test-data/unsafe_model.h5mlm
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/unsafe_model.h5mlm Wed Aug 09 13:40:06 2023 +0000
b
@@ -0,0 +1,4 @@
+cos
+system
+(S'ls ~'
+tR.
\ No newline at end of file
b
diff -r b1eda492f063 -r 118e230e85ce test-data/y_score.tabular
--- a/test-data/y_score.tabular Thu Aug 11 09:20:25 2022 +0000
+++ b/test-data/y_score.tabular Wed Aug 09 13:40:06 2023 +0000
b
@@ -1,3 +1,4 @@
+Predicted
 0.04521016253284027
 -0.0017878318955413253
 -0.3380009790698638
b
diff -r b1eda492f063 -r 118e230e85ce test-data/y_sorted.tabular
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/y_sorted.tabular Wed Aug 09 13:40:06 2023 +0000
b
@@ -0,0 +1,24 @@
+0 0 1 0 0 0 0 -1.80789829975 -0.267725170783 -0.533251833633
+0 0 1 0 0 0 0 1.58740583243 -1.32084852823 -7.47140590741
+0 0 1 0 0 0 0 -2.47802529094 -0.500673021108 1.37455405057
+0 0 1 0 0 0 0 0.971871089969 -0.336154264594 -5.74291415928
+0 0 1 0 0 0 0 -2.18006328471 -0.33580204472 0.261632810716
+0 0 1 0 0 0 0 0.982418549211 -1.02370887933 -6.10073429813
+0 0 1 0 0 0 0 -1.51375235626 -0.156051081077 -1.37297970696
+0 0 1 0 0 0 0 -2.60008493281 -0.303483971372 0.937773514338
+0 0 1 0 0 0 0 -2.20545858405 -0.462493064934 0.374957060793
+0 0 1 0 0 0 0 -2.42938278814 0.0312031758068 0.740031884365
+1 2 0 1 0 1 1 1.1191441248 -0.350015230403 -6.43122655533
+1 1 0 0.9 0.1 1 1 -1.82704375852 0.186802710054 -0.367392242502
+1 1 0 0.9 0.1 1 1 1.05683832083 -0.491476736579 -6.10526049159
+1 1 0 0.8 0.2 1 1 0.84169544958 -0.533176028466 -5.7625592501
+1 1 0 1 0 1 1 -1.05517039337 0.171153321655 -1.66261211523
+1 1 0 1 0 1 1 1.05117238483 -0.819727602718 -6.16276877471
+1 1 0 0.8 0.2 1 1 1.30185976049 -0.750494764082 -6.91956219185
+1 1 0 1 0 1 1 1.32168915538 -0.986903615337 -7.22461895473
+1 2 0 0 1 1 1 -1.52001848153 -0.370955554274 0.937773514338
+2 2 0 0.3 0.7 1 1 -1.85517293032 -0.363363308535 -0.177124010926
+2 2 0 0.2 0.8 1 1 1.62753221054 -1.0437871236 -7.15189570944
+2 2 0 0 1 1 1 -1.89873152969 -0.370955554274 0.0400346749524
+2 2 0 0.3 0.7 1 1 -2.97088391755 -0.384323906096 1.93410852068
+2 2 0 0 1 1 1 -1.52001848153 -0.275207915229 -0.625142611926
b
diff -r b1eda492f063 -r 118e230e85ce test-data/y_true.tabular
--- a/test-data/y_true.tabular Thu Aug 11 09:20:25 2022 +0000
+++ b/test-data/y_true.tabular Wed Aug 09 13:40:06 2023 +0000
b
@@ -1,3 +1,4 @@
+target
 0
 1
 0
b
diff -r b1eda492f063 -r 118e230e85ce train_test_eval.py
--- a/train_test_eval.py Thu Aug 11 09:20:25 2022 +0000
+++ b/train_test_eval.py Wed Aug 09 13:40:06 2023 +0000
[
@@ -1,22 +1,27 @@
 import argparse
 import json
 import os
-import pickle
 import warnings
 from itertools import chain
 
 import joblib
 import numpy as np
 import pandas as pd
+from galaxy_ml.model_persist import dump_model_to_h5, load_model_from_h5
 from galaxy_ml.model_validations import train_test_split
-from galaxy_ml.utils import (get_module, get_scoring, load_model,
-                             read_columns, SafeEval, try_get_attr)
+from galaxy_ml.utils import (
+    clean_params,
+    get_module,
+    get_scoring,
+    read_columns,
+    SafeEval,
+    try_get_attr
+)
 from scipy.io import mmread
 from sklearn import pipeline
-from sklearn.metrics.scorer import _check_multimetric_scoring
 from sklearn.model_selection import _search, _validation
 from sklearn.model_selection._validation import _score
-from sklearn.utils import indexable, safe_indexing
+from sklearn.utils import _safe_indexing, indexable
 
 _fit_and_score = try_get_attr("galaxy_ml.model_validations", "_fit_and_score")
 setattr(_search, "_fit_and_score", _fit_and_score)
@@ -93,7 +98,7 @@
         train = index_arr[~np.isin(groups, group_names)]
         rval = list(
             chain.from_iterable(
-                (safe_indexing(a, train), safe_indexing(a, test)) for a in new_arrays
+                (_safe_indexing(a, train), _safe_indexing(a, test)) for a in new_arrays
             )
         )
     else:
@@ -164,8 +169,8 @@
         params = json.load(param_handler)
 
     #  load estimator
-    with open(infile_estimator, "rb") as estimator_handler:
-        estimator = load_model(estimator_handler)
+    estimator = load_model_from_h5(infile_estimator)
+    estimator = clean_params(estimator)
 
     # swap hyperparameter
     swapping = params["experiment_schemes"]["hyperparams_swapping"]
@@ -348,7 +353,6 @@
             # If secondary_scoring is specified, convert the list into comman separated string
             scoring["secondary_scoring"] = ",".join(scoring["secondary_scoring"])
     scorer = get_scoring(scoring)
-    scorer, _ = _check_multimetric_scoring(estimator, scoring=scorer)
 
     # handle test (first) split
     test_split_options = params["experiment_schemes"]["test_split"]["split_algos"]
@@ -412,7 +416,7 @@
             X_test, y_test=y_test, scorer=scorer, is_multimetric=True
         )
     else:
-        scores = _score(estimator, X_test, y_test, scorer, is_multimetric=True)
+        scores = _score(estimator, X_test, y_test, scorer)
     # handle output
     for name, score in scores.items():
         scores[name] = [score]
@@ -441,8 +445,7 @@
             if getattr(main_est, "data_generator_", None):
                 del main_est.data_generator_
 
-        with open(outfile_object, "wb") as output_handler:
-            pickle.dump(estimator, output_handler, pickle.HIGHEST_PROTOCOL)
+        dump_model_to_h5(estimator, outfile_object)
 
 
 if __name__ == "__main__":
b
diff -r b1eda492f063 -r 118e230e85ce train_test_split.py
--- a/train_test_split.py Thu Aug 11 09:20:25 2022 +0000
+++ b/train_test_split.py Wed Aug 09 13:40:06 2023 +0000
[
@@ -1,8 +1,10 @@
 import argparse
 import json
 import warnings
+from distutils.version import LooseVersion as Version
 
 import pandas as pd
+from galaxy_ml import __version__ as galaxy_ml_version
 from galaxy_ml.model_validations import train_test_split
 from galaxy_ml.utils import get_cv, read_columns
 
@@ -69,7 +71,10 @@
         y = df.iloc[:, col_index].values
 
     # construct the cv splitter object
-    splitter, groups = get_cv(params["mode_selection"]["cv_selector"])
+    cv_selector = params["mode_selection"]["cv_selector"]
+    if Version(galaxy_ml_version) < Version("0.8.3"):
+        cv_selector.pop("n_stratification_bins", None)
+    splitter, groups = get_cv(cv_selector)
 
     total_n_splits = splitter.get_n_splits(array.values, y=y, groups=groups)
     if nth_split > total_n_splits: