Previous changeset 15:2df8f5c30edc (2019-12-16) Next changeset 17:d2afc87db26b (2020-03-11) |
Commit message:
"planemo upload for repository https://github.com/bgruening/galaxytools/tree/master/tools/sklearn commit 756f8be9c3cd437e131e6410cd625c24fe078e8c" |
modified:
main_macros.xml test-data/glm_result07 |
added:
test-data/lgb_class_model.txt test-data/lgb_prediction_result01.tabular test-data/lgb_regr_model.txt |
b |
diff -r 2df8f5c30edc -r f9639b488779 main_macros.xml --- a/main_macros.xml Mon Dec 16 05:21:05 2019 -0500 +++ b/main_macros.xml Wed Jan 22 07:44:40 2020 -0500 |
b |
@@ -220,6 +220,28 @@ </param> </xml> + <!-- LightGBM --> + <xml name="feature_fraction" token_help="LightGBM will randomly select part of the features for each iteration (tree) if feature_fraction is smaller than 1.0. For example, if you set it to 0.8, LightGBM will select 80% of features before training each tree."> + <param argument="feature_fraction" type="float" value="1.0" label="Proportion of features to train each tree" help="@HELP@"/> + </xml> + + <xml name="lambda_l1" token_help=" "> + <param argument="lambda_l1" type="float" value="0.0" label="L1 regularization" help="@HELP@"/> + </xml> + + <xml name="lambda_l2" token_help=" "> + <param argument="lambda_l2" type="float" value="0.0" label="L1 regularization" help="@HELP@"/> + </xml> + + <xml name="min_gain_to_split" token_help=" "> + <param argument="min_gain_to_split" type="float" value="0.0" label="Minimal gain to perform split" help="@HELP@"/> + </xml> + + <xml name="min_child_weight" token_help="Minimal sum hessian in one leaf. It can be used to deal with over-fitting."> + <param argument="min_child_weight" type="float" value="0.0" label="Minimal sum hessian in one leaf" help="@HELP@"/> + </xml> + + <!--Parameters--> <xml name="tol" token_default_value="0.0" token_help_text="Early stopping heuristics based on the relative center changes. Set to default (0.0) to disable this convergence detection."> <param argument="tol" type="float" optional="true" value="@DEFAULT_VALUE@" label="Tolerance" help="@HELP_TEXT@"/> |
b |
diff -r 2df8f5c30edc -r f9639b488779 test-data/glm_result07 --- a/test-data/glm_result07 Mon Dec 16 05:21:05 2019 -0500 +++ b/test-data/glm_result07 Wed Jan 22 07:44:40 2020 -0500 |
b |
@@ -1,5 +1,5 @@ 86.97021227350001 1.00532111569 -1.01739601979 -0.613139481654 0.641846874331 0.6093152833692663 91.2021798817 -0.6215229712070001 1.11914889596 0.390012184498 1.28956938152 0.5963828164943974 --47.4101632272 -0.638416457964 -0.7327774684530001 -0.8640261049779999 -1.06109770116 -0.07927429227257943 +-47.4101632272 -0.638416457964 -0.7327774684530001 -0.8640261049779999 -1.06109770116 -0.07927429227257948 61.712804630200004 -1.0999480057700002 -0.739679672932 0.585657963012 1.4890682753600002 0.2621440442022235 -206.998295124 0.130238853011 0.70574123041 1.3320656526399999 -1.3322092373799999 -1.7330414645145749 |
b |
diff -r 2df8f5c30edc -r f9639b488779 test-data/lgb_class_model.txt --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/test-data/lgb_class_model.txt Wed Jan 22 07:44:40 2020 -0500 |
[ |
@@ -0,0 +1,151 @@ +tree +version=v3 +num_class=1 +num_tree_per_iteration=1 +label_index=0 +max_feature_idx=3 +objective=binary sigmoid:1 +feature_names=Column_0 Column_1 Column_2 Column_3 +feature_infos=none none none none +tree_sizes=228 + +Tree=0 +num_leaves=1 +num_cat=0 +split_feature= +split_gain= +threshold= +decision_type= +left_child= +right_child= +leaf_value=-0.40546510810816427 +leaf_weight= +leaf_count= +internal_value= +internal_weight= +internal_count= +shrinkage=1 + + +end of trees + +feature importances: + +parameters: +[boosting: gbdt] +[objective: binary] +[metric: binary_logloss] +[tree_learner: serial] +[device_type: cpu] +[data: ] +[valid: ] +[num_iterations: 100] +[learning_rate: 0.02] +[num_leaves: 32] +[num_threads: 0] +[max_depth: 8] +[min_data_in_leaf: 20] +[min_sum_hessian_in_leaf: 39] +[bagging_fraction: 0.9] +[pos_bagging_fraction: 1] +[neg_bagging_fraction: 1] +[bagging_freq: 0] +[bagging_seed: 18467] +[feature_fraction: 0.9] +[feature_fraction_bynode: 1] +[feature_fraction_seed: 26500] +[early_stopping_round: 0] +[first_metric_only: 0] +[max_delta_step: 0] +[lambda_l1: 0.04] +[lambda_l2: 0.07] +[min_gain_to_split: 0.02] +[drop_rate: 0.1] +[max_drop: 50] +[skip_drop: 0.5] +[xgboost_dart_mode: 0] +[uniform_drop: 0] +[drop_seed: 6334] +[top_rate: 0.2] +[other_rate: 0.1] +[min_data_per_group: 100] +[max_cat_threshold: 32] +[cat_l2: 10] +[cat_smooth: 10] +[max_cat_to_onehot: 4] +[top_k: 20] +[monotone_constraints: ] +[feature_contri: ] +[forcedsplits_filename: ] +[forcedbins_filename: ] +[refit_decay_rate: 0.9] +[cegb_tradeoff: 1] +[cegb_penalty_split: 0] +[cegb_penalty_feature_lazy: ] +[cegb_penalty_feature_coupled: ] +[verbosity: -1] +[max_bin: 255] +[max_bin_by_feature: ] +[min_data_in_bin: 3] +[bin_construct_sample_cnt: 200000] +[histogram_pool_size: -1] +[data_random_seed: 41] +[output_model: LightGBM_model.txt] +[snapshot_freq: -1] +[input_model: ] +[output_result: LightGBM_predict_result.txt] +[initscore_filename: ] +[valid_data_initscores: ] +[pre_partition: 0] +[enable_bundle: 1] +[max_conflict_rate: 0] +[is_enable_sparse: 1] +[sparse_threshold: 0.8] +[use_missing: 1] +[zero_as_missing: 0] +[two_round: 0] +[save_binary: 0] +[header: 0] +[label_column: ] +[weight_column: ] +[group_column: ] +[ignore_column: ] +[categorical_feature: ] +[predict_raw_score: 0] +[predict_leaf_index: 0] +[predict_contrib: 0] +[num_iteration_predict: -1] +[pred_early_stop: 0] +[pred_early_stop_freq: 10] +[pred_early_stop_margin: 10] +[convert_model_language: ] +[convert_model: gbdt_prediction.cpp] +[num_class: 1] +[is_unbalance: 0] +[scale_pos_weight: 1] +[sigmoid: 1] +[boost_from_average: 1] +[reg_sqrt: 0] +[alpha: 0.9] +[fair_c: 1] +[poisson_max_delta_step: 0.7] +[tweedie_variance_power: 1.5] +[max_position: 20] +[lambdamart_norm: 1] +[label_gain: ] +[metric_freq: 1] +[is_provide_training_metric: 0] +[eval_at: ] +[multi_error_top_k: 1] +[num_machines: 1] +[local_listen_port: 12400] +[time_out: 120] +[machine_list_filename: ] +[machines: ] +[gpu_platform_id: -1] +[gpu_device_id: -1] +[gpu_use_dp: 0] + +end of parameters + +pandas_categorical:null |
b |
diff -r 2df8f5c30edc -r f9639b488779 test-data/lgb_prediction_result01.tabular --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/test-data/lgb_prediction_result01.tabular Wed Jan 22 07:44:40 2020 -0500 |
b |
b'@@ -0,0 +1,262 @@\n+year\tmonth\tday\ttemp_2\ttemp_1\taverage\tforecast_noaa\tforecast_acc\tforecast_under\tfriend\tweek_Fri\tweek_Mon\tweek_Sat\tweek_Sun\tweek_Thurs\tweek_Tues\tweek_Wed\tpredicted\n+2016\t9\t19\t68\t69\t69.7\t65\t74\t71\t88\t0\t1\t0\t0\t0\t0\t0\t71.89319490976423\n+2016\t4\t14\t60\t59\t58.1\t57\t63\t58\t66\t0\t0\t0\t0\t1\t0\t0\t59.01499037390416\n+2016\t7\t30\t85\t88\t77.3\t75\t79\t77\t70\t0\t0\t1\t0\t0\t0\t0\t75.7624470867011\n+2016\t5\t15\t82\t65\t64.7\t63\t69\t64\t58\t0\t0\t0\t1\t0\t0\t0\t57.569131115445174\n+2016\t1\t18\t54\t50\t47.5\t44\t48\t49\t58\t0\t1\t0\t0\t0\t0\t0\t53.09785655110459\n+2016\t1\t25\t48\t51\t48.2\t45\t51\t49\t63\t0\t1\t0\t0\t0\t0\t0\t53.51723077599964\n+2016\t11\t25\t49\t52\t48.6\t45\t52\t47\t41\t1\t0\t0\t0\t0\t0\t0\t51.95292617354113\n+2016\t7\t20\t73\t78\t76.7\t75\t78\t77\t66\t0\t0\t0\t0\t0\t0\t1\t80.03391243189029\n+2016\t12\t17\t39\t35\t45.2\t43\t47\t46\t38\t0\t0\t1\t0\t0\t0\t0\t38.021020662843554\n+2016\t12\t8\t42\t40\t46.1\t45\t51\t47\t36\t0\t0\t0\t0\t1\t0\t0\t43.871817980640564\n+2016\t12\t28\t42\t47\t45.3\t41\t49\t44\t58\t0\t0\t0\t0\t0\t0\t1\t45.76312225952035\n+2016\t7\t17\t76\t72\t76.3\t76\t78\t77\t88\t0\t0\t0\t1\t0\t0\t0\t78.44319295537714\n+2016\t7\t7\t69\t76\t74.4\t73\t77\t74\t72\t0\t0\t0\t0\t1\t0\t0\t70.5335293567219\n+2016\t12\t15\t40\t39\t45.3\t45\t49\t47\t46\t0\t0\t0\t0\t1\t0\t0\t39.057420195088596\n+2016\t6\t27\t71\t78\t72.2\t70\t74\t72\t84\t0\t1\t0\t0\t0\t0\t0\t82.20245159198711\n+2016\t5\t31\t64\t71\t67.3\t63\t72\t68\t85\t0\t0\t0\t0\t0\t1\t0\t78.30191181424183\n+2016\t1\t20\t54\t48\t47.7\t44\t52\t49\t61\t0\t0\t0\t0\t0\t0\t1\t54.04964089659319\n+2016\t8\t10\t73\t72\t77.0\t77\t78\t77\t68\t0\t0\t0\t0\t0\t0\t1\t76.39576480057465\n+2016\t3\t23\t56\t57\t54.7\t50\t58\t55\t70\t0\t0\t0\t0\t0\t0\t1\t52.935205395366545\n+2016\t12\t24\t45\t40\t45.1\t44\t47\t46\t39\t0\t0\t1\t0\t0\t0\t0\t40.72928485821922\n+2016\t1\t19\t50\t54\t47.6\t47\t49\t48\t53\t0\t0\t0\t0\t0\t1\t0\t48.15143238233738\n+2016\t11\t6\t65\t58\t53.2\t52\t57\t55\t71\t0\t0\t0\t1\t0\t0\t0\t61.18233215509339\n+2016\t4\t17\t60\t68\t58.6\t58\t62\t59\t54\t0\t0\t0\t1\t0\t0\t0\t77.18078446802005\n+2016\t10\t29\t60\t65\t55.3\t55\t59\t55\t65\t0\t0\t1\t0\t0\t0\t0\t67.20288900944993\n+2016\t2\t1\t48\t47\t48.8\t46\t49\t49\t51\t0\t1\t0\t0\t0\t0\t0\t48.34602414815062\n+2016\t12\t12\t44\t44\t45.6\t43\t50\t45\t42\t0\t1\t0\t0\t0\t0\t0\t44.253448105719876\n+2016\t5\t30\t64\t64\t67.1\t64\t70\t66\t69\t0\t1\t0\t0\t0\t0\t0\t71.3927492219339\n+2016\t10\t23\t59\t62\t57.1\t57\t58\t59\t67\t0\t0\t0\t1\t0\t0\t0\t62.58006444737433\n+2016\t9\t30\t68\t66\t65.7\t64\t67\t65\t74\t1\t0\t0\t0\t0\t0\t0\t65.68744660437471\n+2016\t9\t12\t77\t70\t71.8\t67\t73\t73\t90\t0\t1\t0\t0\t0\t0\t0\t73.72156656653756\n+2016\t11\t2\t59\t57\t54.2\t54\t58\t55\t70\t0\t0\t0\t0\t0\t0\t1\t57.84328293804783\n+2016\t11\t17\t55\t50\t50.5\t46\t51\t50\t57\t0\t0\t0\t0\t1\t0\t0\t50.42632486665048\n+2016\t3\t3\t58\t55\t51.8\t49\t54\t50\t71\t0\t0\t0\t0\t1\t0\t0\t59.623494716733035\n+2016\t11\t21\t57\t55\t49.5\t46\t51\t49\t67\t0\t1\t0\t0\t0\t0\t0\t53.32237486832612\n+2016\t12\t27\t42\t42\t45.2\t41\t50\t47\t47\t0\t0\t0\t0\t0\t1\t0\t46.480428465622566\n+2016\t4\t24\t64\t65\t60.1\t57\t61\t60\t41\t0\t0\t0\t1\t0\t0\t0\t55.57021075899771\n+2016\t5\t20\t64\t63\t65.6\t63\t70\t64\t73\t1\t0\t0\t0\t0\t0\t0\t65.97337851386187\n+2016\t1\t16\t49\t48\t47.3\t45\t52\t46\t28\t0\t0\t1\t0\t0\t0\t0\t51.12832230287266\n+2016\t12\t7\t40\t42\t46.3\t44\t51\t46\t62\t0\t0\t0\t0\t0\t0\t1\t40.14107546376078\n+2016\t1\t7\t44\t51\t46.2\t45\t49\t46\t38\t0\t0\t0\t0\t1\t0\t0\t43.30978565286583\n+2016\t9\t24\t67\t64\t68.0\t65\t71\t66\t64\t0\t0\t1\t0\t0\t0\t0\t67.6354117078402\n+2016\t8\t30\t79\t75\t74.6\t74\t76\t75\t63\t0\t0\t0\t0\t0\t1\t0\t70.28790811869037\n+2016\t1\t11\t50\t52\t46.7\t42\t48\t48\t39\t0\t1\t0\t0\t0\t0\t0\t46.11736014295371\n+2016\t6\t9\t85\t67\t68.6\t66\t73\t69\t80\t0\t0\t0\t0\t1\t0\t0\t63.20117179031277\n+2016\t9\t22\t67\t68\t68.7\t65\t70\t69\t56\t0\t0\t0\t0\t1\t0\t0\t67.0947545497616\n+2016\t3\t25\t53\t54\t55.0\t53\t57\t57\t42\t1\t0\t0\t0\t0\t0\t0\t56.770929191177046\n+2016\t10\t24\t62\t62\t56.8\t52\t61\t57\t70\t0\t1\t0\t0\t0\t0\t0\t60.93905202931022\n+2016\t7\t16\t77\t76\t76.1\t76\t78\t75\t61\t0\t0\t1\t0\t0\t0\t0\t72.66331027774964\n+2016\t7\t1\t74\t73\t73.1\t71\t75\t72\t93\t1\t0\t0\t0\t0\t0\t0\t73.83790969748735\n+2016\t11\t18\t50\t52\t50.3\t50\t53\t50\t35\t1\t0\t0\t0\t0\t0\t0\t53.62951439199429\n+2016\t9\t3\t75\t70\t73.9\t71\t75\t73\t68\t0\t0\t1\t0\t0\t0\t0\t68.25054582286273\n+2016\t8\t2\t73\t77\t77.4\t75\t80\t79\t62\t0\t0\t0\t0\t0\t1\t0\t73.40030750588237\n+2016\t4\t5\t69\t60\t56.6\t52\t58\t56\t72\t0\t0\t0\t0\t0\t1\t0\t56.524806994243974\n+2016\t3\t13\t55\t52\t53.3\t50\t55\t53\t54\t0\t0\t0\t1\t0\t0\t0\t55.040326173834494\n+2016\t8\t28\t81\t79\t75.0\t71\t77\t76\t85\t0\t0\t0\t1\t0\t0\t0\t78.6959854541002\n+2016\t4\t9\t77\t76\t57.2\t53\t61\t57\t74\t0\t0\t1\t0\t0\t0\t0\t65.6864466867755\n+2016\t5\t26\t66\t66\t66.5\t64\t70\t65\t85\t0\t0\t0\t0\t1\t0\t0\t64.55452338839596\n+2016\t10\t10\t68\t57\t61.8\t58\t64\t61\t62\t0\t1\t0\t0\t0\t0\t0\t'..b'\t0\t0\t1\t0\t0\t0\t0\t74.99121824276693\n+2016\t6\t13\t65\t70\t69.3\t66\t72\t69\t79\t0\t1\t0\t0\t0\t0\t0\t68.0732655379396\n+2016\t2\t15\t55\t58\t49.9\t46\t52\t49\t53\t0\t1\t0\t0\t0\t0\t0\t55.102004217211054\n+2016\t8\t8\t72\t72\t77.1\t76\t78\t77\t65\t0\t1\t0\t0\t0\t0\t0\t72.67136576622894\n+2016\t7\t12\t74\t74\t75.4\t74\t77\t77\t71\t0\t0\t0\t0\t0\t1\t0\t78.09245255865027\n+2016\t10\t3\t63\t65\t64.5\t63\t68\t65\t49\t0\t1\t0\t0\t0\t0\t0\t61.87759257833735\n+2016\t4\t18\t68\t77\t58.8\t55\t59\t57\t39\t0\t1\t0\t0\t0\t0\t0\t88.05552200437032\n+2016\t2\t25\t60\t59\t50.9\t49\t51\t49\t35\t0\t0\t0\t0\t1\t0\t0\t60.080453480066495\n+2016\t1\t2\t44\t45\t45.7\t41\t50\t44\t61\t0\t0\t1\t0\t0\t0\t0\t42.90260865929038\n+2016\t2\t21\t51\t53\t50.5\t49\t54\t52\t46\t0\t0\t0\t1\t0\t0\t0\t51.27916079433249\n+2016\t3\t24\t57\t53\t54.9\t54\t56\t56\t72\t0\t0\t0\t0\t1\t0\t0\t53.574452562775726\n+2016\t7\t27\t85\t79\t77.3\t73\t78\t79\t79\t0\t0\t0\t0\t0\t0\t1\t79.10426435183506\n+2016\t2\t4\t51\t49\t49.0\t44\t54\t51\t44\t0\t0\t0\t0\t1\t0\t0\t49.689040256262984\n+2016\t10\t7\t66\t63\t62.9\t62\t67\t64\t78\t1\t0\t0\t0\t0\t0\t0\t64.03671566307656\n+2016\t4\t4\t63\t69\t56.5\t54\t59\t56\t45\t0\t1\t0\t0\t0\t0\t0\t60.302013981268445\n+2016\t2\t24\t51\t60\t50.8\t47\t53\t50\t46\t0\t0\t0\t0\t0\t0\t1\t60.45570099663864\n+2016\t10\t8\t63\t64\t62.5\t60\t65\t61\t73\t0\t0\t1\t0\t0\t0\t0\t67.37545950141302\n+2016\t9\t15\t75\t79\t71.0\t66\t76\t69\t64\t0\t0\t0\t0\t1\t0\t0\t71.13704674726802\n+2016\t1\t14\t49\t55\t47.0\t43\t47\t46\t58\t0\t0\t0\t0\t1\t0\t0\t48.22108131604957\n+2016\t4\t1\t68\t73\t56.0\t54\t59\t55\t41\t1\t0\t0\t0\t0\t0\t0\t70.48625922502303\n+2016\t10\t17\t62\t60\t59.1\t57\t63\t59\t62\t0\t1\t0\t0\t0\t0\t0\t60.636430182256134\n+2016\t6\t18\t71\t67\t70.2\t67\t75\t69\t77\t0\t0\t1\t0\t0\t0\t0\t66.4433387859395\n+2016\t12\t26\t41\t42\t45.2\t45\t48\t46\t58\t0\t1\t0\t0\t0\t0\t0\t45.76312225952035\n+2016\t5\t17\t57\t60\t65.0\t62\t65\t65\t55\t0\t0\t0\t0\t0\t1\t0\t69.19684320311531\n+2016\t11\t20\t55\t57\t49.8\t47\t54\t48\t30\t0\t0\t0\t1\t0\t0\t0\t54.710033660556284\n+2016\t12\t18\t35\t35\t45.2\t44\t46\t46\t36\t0\t0\t0\t1\t0\t0\t0\t39.46756549798724\n+2016\t9\t17\t71\t75\t70.3\t66\t73\t70\t84\t0\t0\t1\t0\t0\t0\t0\t69.35617071475366\n+2016\t2\t26\t59\t61\t51.1\t48\t56\t53\t65\t1\t0\t0\t0\t0\t0\t0\t59.73185678845597\n+2016\t2\t22\t53\t51\t50.6\t46\t51\t50\t59\t0\t1\t0\t0\t0\t0\t0\t53.18020992777652\n+2016\t6\t26\t69\t71\t71.9\t67\t74\t72\t70\t0\t0\t0\t1\t0\t0\t0\t78.40488138310084\n+2016\t7\t11\t71\t74\t75.3\t74\t79\t75\t71\t0\t1\t0\t0\t0\t0\t0\t73.09136153134294\n+2016\t12\t30\t48\t48\t45.4\t44\t46\t44\t42\t1\t0\t0\t0\t0\t0\t0\t50.28091536128874\n+2016\t7\t9\t68\t74\t74.9\t70\t79\t76\t60\t0\t0\t1\t0\t0\t0\t0\t71.20339563359369\n+2016\t6\t21\t70\t76\t70.8\t68\t75\t71\t57\t0\t0\t0\t0\t0\t1\t0\t72.95771553550574\n+2016\t3\t2\t54\t58\t51.6\t47\t54\t52\t37\t0\t0\t0\t0\t0\t0\t1\t55.415808249340266\n+2016\t2\t20\t53\t51\t50.4\t48\t55\t51\t43\t0\t0\t1\t0\t0\t0\t0\t53.467113223494465\n+2016\t9\t9\t67\t72\t72.6\t68\t77\t71\t78\t1\t0\t0\t0\t0\t0\t0\t73.51226363759592\n+2016\t9\t26\t67\t76\t67.2\t64\t69\t69\t74\t0\t1\t0\t0\t0\t0\t0\t77.37773578267426\n+2016\t1\t22\t52\t52\t47.9\t47\t48\t48\t60\t1\t0\t0\t0\t0\t0\t0\t55.28241093592609\n+2016\t11\t27\t52\t53\t48.2\t48\t49\t49\t53\t0\t0\t0\t1\t0\t0\t0\t49.4682256394598\n+2016\t6\t12\t67\t65\t69.1\t65\t73\t70\t83\t0\t0\t0\t1\t0\t0\t0\t69.67028083708267\n+2016\t10\t20\t61\t58\t58.1\t58\t59\t58\t43\t0\t0\t0\t0\t1\t0\t0\t62.70580969857187\n+2016\t7\t13\t74\t77\t75.6\t74\t78\t76\t56\t0\t0\t0\t0\t0\t0\t1\t75.83483000433928\n+2016\t11\t7\t58\t61\t52.9\t51\t56\t51\t35\t0\t1\t0\t0\t0\t0\t0\t63.36296331230509\n+2016\t10\t1\t66\t67\t65.3\t64\t70\t64\t54\t0\t0\t1\t0\t0\t0\t0\t63.164537625242914\n+2016\t11\t22\t55\t54\t49.3\t46\t54\t49\t58\t0\t0\t0\t0\t0\t1\t0\t52.82424210029568\n+2016\t6\t1\t71\t79\t67.4\t65\t69\t66\t58\t0\t0\t0\t0\t0\t0\t1\t74.74970804919086\n+2016\t5\t13\t81\t77\t64.3\t63\t67\t66\t67\t1\t0\t0\t0\t0\t0\t0\t80.80644721526915\n+2016\t6\t3\t75\t71\t67.7\t64\t71\t66\t55\t1\t0\t0\t0\t0\t0\t0\t79.42582159310099\n+2016\t4\t12\t59\t58\t57.7\t54\t59\t57\t61\t0\t0\t0\t0\t0\t1\t0\t60.16800954211399\n+2016\t3\t31\t64\t68\t55.9\t55\t59\t56\t56\t0\t0\t0\t0\t1\t0\t0\t72.6475912037824\n+2016\t12\t14\t43\t40\t45.4\t45\t48\t45\t49\t0\t0\t0\t0\t0\t0\t1\t39.528254308940774\n+2016\t8\t5\t75\t80\t77.3\t75\t81\t78\t71\t1\t0\t0\t0\t0\t0\t0\t79.29604852292455\n+2016\t5\t4\t87\t74\t62.3\t59\t65\t64\t61\t0\t0\t0\t0\t0\t0\t1\t61.10486980813424\n+2016\t12\t31\t48\t57\t45.5\t42\t48\t47\t57\t0\t0\t1\t0\t0\t0\t0\t43.43550339863723\n+2016\t1\t21\t48\t52\t47.8\t43\t51\t46\t57\t0\t0\t0\t0\t1\t0\t0\t52.14359963846155\n+2016\t7\t10\t74\t71\t75.1\t71\t77\t76\t95\t0\t0\t0\t1\t0\t0\t0\t74.74359573124775\n+2016\t3\t15\t54\t49\t53.6\t49\t58\t52\t70\t0\t0\t0\t0\t0\t1\t0\t51.087519902003045\n+2016\t4\t19\t77\t89\t59.0\t59\t63\t59\t61\t0\t0\t0\t0\t0\t1\t0\t79.9965354251416\n+2016\t10\t14\t66\t60\t60.2\t56\t64\t60\t78\t1\t0\t0\t0\t0\t0\t0\t59.60815096341522\n+2016\t4\t15\t59\t59\t58.3\t58\t61\t60\t40\t1\t0\t0\t0\t0\t0\t0\t59.51666216464264\n' |
b |
diff -r 2df8f5c30edc -r f9639b488779 test-data/lgb_regr_model.txt --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/test-data/lgb_regr_model.txt Wed Jan 22 07:44:40 2020 -0500 |
[ |
b'@@ -0,0 +1,180142 @@\n+tree\n+version=v3\n+num_class=1\n+num_tree_per_iteration=1\n+label_index=0\n+max_feature_idx=16\n+objective=regression\n+feature_names=Column_0 Column_1 Column_2 Column_3 Column_4 Column_5 Column_6 Column_7 Column_8 Column_9 Column_10 Column_11 Column_12 Column_13 Column_14 Column_15 Column_16\n+feature_infos=none [1:12] [1:31] [35:89] [35:117] [45.100000000000001:77.400000000000006] [41:77] [46:81] [44:79] [28:95] [0:1] [0:1] [0:1] [0:1] [0:1] [0:1] [0:1]\n+tree_sizes=518 530 532 530 531 532 530 534 533 535 535 532 532 535 534 530 534 533 532 533 533 537 534 456 531 456 534 458 458 536 538 532 536 613 538 532 536 458 533 536 613 533 537 536 533 537 534 537 537 536 458 532 535 612 538 537 614 617 615 615 538 539 615 616 617 539 616 536 615 537 615 539 540 618 619 617 538 540 458 539 457 459 461 460 538 616 613 612 614 459 617 542 618 461 541 615 617 540 619 542 618 622 541 618 622 543 541 541 618 619 538 619 617 462 542 620 618 459 618 541 544 457 460 623 619 461 545 459 625 461 624 624 544 620 623 543 622 623 544 544 622 621 544 545 464 544 462 624 621 543 623 463 543 622 622 545 462 543 624 464 622 464 624 616 463 464 624 464 624 543 541 623 464 617 541 545 625 541 620 542 541 541 542 542 518 543 539 547 542 547 541 546 542 543 541 540 626 543 544 543 545 548 541 545 546 542 543 545 547 548 546 544 546 543 542 548 544 547 541 543 546 547 540 544 545 542 545 626 543 539 463 544 544 542 544 545 545 544 539 546 628 544 545 547 543 545 547 543 541 543 543 543 545 620 544 545 522 543 544 545 544 549 546 543 543 545 603 546 544 602 543 547 623 544 546 544 623 544 549 544 624 545 546 623 543 624 543 545 623 543 546 542 546 545 546 623 547 464 542 544 544 545 543 548 622 466 546 624 542 545 624 544 624 545 547 623 544 546 543 465 547 542 625 545 545 625 543 625 542 547 624 544 549 544 545 626 621 545 542 465 549 545 466 624 465 621 465 627 622 542 623 466 539 464 548 626 543 545 543 465 625 546 466 624 626 623 542 540 544 623 544 625 544 545 546 466 621 546 623 467 546 547 544 543 546 546 544 543 543 546 624 547 466 541 547 546 465 548 543 546 544 547 546 547 544 465 543 625 547 546 546 544 543 546 625 464 625 544 544 624 546 466 623 546 623 627 543 624 546 544 546 545 550 625 548 545 543 547 544 627 465 547 622 628 545 627 543 460 627 466 546 544 544 629 624 628 623 545 543 624 547 544 625 545 544 548 544 543 466 549 547 627 465 629 623 544 467 548 545 622 550 546 548 627 621 548 548 466 625 464 544 624 546 544 626 465 630 545 546 547 546 547 630 619 548 628 547 624 545 626 546 627 549 544 468 547 542 545 549 548 627 628 549 628 546 549 546 544 621 624 544 628 547 545 546 465 629 626 546 546 545 545 545 625 546 464 549 545 545 628 546 550 467 544 630 628 547 547 543 628 544 543 624 546 548 548 465 631 545 624 546 623 467 546 626 467 543 548 630 549 545 547 548 549 547 545 629 466 548 624 549 548 540 630 468 546 542 544 546 546 627 630 469 549 543 547 548 467 625 629 627 547 548 546 545 540 544 549 625 541 546 467 548 546 548 631 544 627 468 550 622 629 543 544 548 548 547 467 630 467 549 628 468 547 468 549 627 467 546 546 548 546 630 543 468 544 626 546 545 627 627 548 545 549 548 625 626 631 626 546 625 547 629 547 469 548 545 548 547 469 546 629 547 545 468 549 549 548 544 548 630 545 547 549 467 547 543 630 544 547 467 547 548 627 623 626 465 469 547 547 547 546 629 548 548 546 546 547 545 548 627 467 550 545 548 548 546 545 552 548 546 629 546 547 543 625 548 548 550 545 547 543 545 548 548 548 549 466 547 549 543 548 547 548 628 469 626 546 626 547 545 550 628 549 544 623 552 544 549 468 552 539 546 467 548 630 544 548 545 627 548 549 624 551 551 545 468 549 468 544 548 548 550 546 548 549 628 548 549 628 548 547 548 550 546 546 550 550 548 550 629 547 547 551 551 546 468 550 466 546 547 545 550 549 628 552 550 545 549 551 549 630 630 467 547 549 551 545 624 468 552 547 549 469 548 543 549 546 548 546 624 546 625 469 552 549 630 544 549 551 547 548 543 550 628 545 551 549 549 545 549 551 550 545 468 547 550 550 '..b'98 -0.0264908\n+internal_weight=0 166 98 95\n+internal_count=261 166 98 95\n+shrinkage=0.02\n+\n+\n+Tree=9998\n+num_leaves=5\n+num_cat=0\n+split_feature=6 9 2 1\n+split_gain=0.0814517 0.423897 0.49947 0.560638\n+threshold=69.500000000000014 71.500000000000014 13.500000000000002 4.5000000000000009\n+decision_type=2 2 2 2\n+left_child=1 2 -1 -4\n+right_child=-2 -3 3 -5\n+leaf_value=0.0015287226576175053 0.00082953833355114027 -0.0021162448255171214 0.0011383155735161057 -0.0019402952165041626\n+leaf_weight=73 48 39 41 60\n+leaf_count=73 48 39 41 60\n+internal_value=0 -0.00938209 0.0120356 -0.0341362\n+internal_weight=0 213 174 101\n+internal_count=261 213 174 101\n+shrinkage=0.02\n+\n+\n+Tree=9999\n+num_leaves=5\n+num_cat=0\n+split_feature=5 5 4 9\n+split_gain=0.0787592 0.317423 0.245512 0.291236\n+threshold=70.000000000000014 64.200000000000003 60.500000000000007 45.500000000000007\n+decision_type=2 2 2 2\n+left_child=1 2 3 -1\n+right_child=-2 -3 -4 -5\n+leaf_value=0.00095524065220772586 0.00069620116702481252 -0.001859639079375311 0.0015142907661079197 -0.0011629911884827296\n+leaf_weight=46 62 40 44 69\n+leaf_count=46 62 40 44 69\n+internal_value=0 -0.0108758 0.00956023 -0.015434\n+internal_weight=0 199 159 115\n+internal_count=261 199 159 115\n+shrinkage=0.02\n+\n+\n+end of trees\n+\n+feature importances:\n+Column_9=8322\n+Column_2=6585\n+Column_5=5272\n+Column_4=4915\n+Column_6=4114\n+Column_3=3831\n+Column_1=3507\n+Column_7=2717\n+Column_8=2340\n+\n+parameters:\n+[boosting: gbdt]\n+[objective: regression]\n+[metric: ]\n+[tree_learner: serial]\n+[device_type: cpu]\n+[data: ]\n+[valid: ]\n+[num_iterations: 100]\n+[learning_rate: 0.02]\n+[num_leaves: 32]\n+[num_threads: 0]\n+[max_depth: 8]\n+[min_data_in_leaf: 20]\n+[min_sum_hessian_in_leaf: 39]\n+[bagging_fraction: 0.9]\n+[pos_bagging_fraction: 1]\n+[neg_bagging_fraction: 1]\n+[bagging_freq: 0]\n+[bagging_seed: 18467]\n+[feature_fraction: 0.9]\n+[feature_fraction_bynode: 1]\n+[feature_fraction_seed: 26500]\n+[early_stopping_round: 0]\n+[first_metric_only: 0]\n+[max_delta_step: 0]\n+[lambda_l1: 0.04]\n+[lambda_l2: 0.07]\n+[min_gain_to_split: 0.02]\n+[drop_rate: 0.1]\n+[max_drop: 50]\n+[skip_drop: 0.5]\n+[xgboost_dart_mode: 0]\n+[uniform_drop: 0]\n+[drop_seed: 6334]\n+[top_rate: 0.2]\n+[other_rate: 0.1]\n+[min_data_per_group: 100]\n+[max_cat_threshold: 32]\n+[cat_l2: 10]\n+[cat_smooth: 10]\n+[max_cat_to_onehot: 4]\n+[top_k: 20]\n+[monotone_constraints: ]\n+[feature_contri: ]\n+[forcedsplits_filename: ]\n+[forcedbins_filename: ]\n+[refit_decay_rate: 0.9]\n+[cegb_tradeoff: 1]\n+[cegb_penalty_split: 0]\n+[cegb_penalty_feature_lazy: ]\n+[cegb_penalty_feature_coupled: ]\n+[verbosity: -1]\n+[max_bin: 255]\n+[max_bin_by_feature: ]\n+[min_data_in_bin: 3]\n+[bin_construct_sample_cnt: 200000]\n+[histogram_pool_size: -1]\n+[data_random_seed: 41]\n+[output_model: LightGBM_model.txt]\n+[snapshot_freq: -1]\n+[input_model: ]\n+[output_result: LightGBM_predict_result.txt]\n+[initscore_filename: ]\n+[valid_data_initscores: ]\n+[pre_partition: 0]\n+[enable_bundle: 1]\n+[max_conflict_rate: 0]\n+[is_enable_sparse: 1]\n+[sparse_threshold: 0.8]\n+[use_missing: 1]\n+[zero_as_missing: 0]\n+[two_round: 0]\n+[save_binary: 0]\n+[header: 0]\n+[label_column: ]\n+[weight_column: ]\n+[group_column: ]\n+[ignore_column: ]\n+[categorical_feature: ]\n+[predict_raw_score: 0]\n+[predict_leaf_index: 0]\n+[predict_contrib: 0]\n+[num_iteration_predict: -1]\n+[pred_early_stop: 0]\n+[pred_early_stop_freq: 10]\n+[pred_early_stop_margin: 10]\n+[convert_model_language: ]\n+[convert_model: gbdt_prediction.cpp]\n+[num_class: 1]\n+[is_unbalance: 0]\n+[scale_pos_weight: 1]\n+[sigmoid: 1]\n+[boost_from_average: 1]\n+[reg_sqrt: 0]\n+[alpha: 0.9]\n+[fair_c: 1]\n+[poisson_max_delta_step: 0.7]\n+[tweedie_variance_power: 1.5]\n+[max_position: 20]\n+[lambdamart_norm: 1]\n+[label_gain: ]\n+[metric_freq: 1]\n+[is_provide_training_metric: 0]\n+[eval_at: ]\n+[multi_error_top_k: 1]\n+[num_machines: 1]\n+[local_listen_port: 12400]\n+[time_out: 120]\n+[machine_list_filename: ]\n+[machines: ]\n+[gpu_platform_id: -1]\n+[gpu_device_id: -1]\n+[gpu_use_dp: 0]\n+\n+end of parameters\n+\n+pandas_categorical:null\n' |