Repository 'sklearn_numeric_clustering'
hg clone https://toolshed.g2.bx.psu.edu/repos/bgruening/sklearn_numeric_clustering

Changeset 5:c3b36f4c7e4e (2018-04-10)
Previous changeset 4:681825b036f3 (2018-03-22) Next changeset 6:8fc64b67bb7f (2018-04-12)
Commit message:
planemo upload for repository https://github.com/bgruening/galaxytools/tools/sklearn commit 35fa73d6e9ba8f0789ddfb743d893d950a68af02
modified:
main_macros.xml
added:
test-data/gbc_model01
test-data/gbc_result01
test-data/gbr_model01
test-data/gbr_prediction_result01.tabular
test-data/regression_X.tabular
test-data/regression_test_X.tabular
test-data/regression_y.tabular
b
diff -r 681825b036f3 -r c3b36f4c7e4e main_macros.xml
--- a/main_macros.xml Thu Mar 22 13:46:32 2018 -0400
+++ b/main_macros.xml Tue Apr 10 15:18:33 2018 -0400
b
@@ -66,6 +66,7 @@
         <when value="load">
             <param name="infile_model" type="data" format="@MODEL@" label="Models" help="Select a model file."/>
             <param name="infile_data" type="data" format="@DATA@" label="Data (tabular)" help="Select the dataset you want to classify."/>
+            <param name="header" type="boolean" optional="True" truevalue="booltrue" falsevalue="boolfalse" checked="False" label="Does the dataset contain header:" />
             <conditional name="prediction_options">
                 <param name="prediction_option" type="select" label="Select the type of prediction">
                     <option value="predict">Predict class labels</option>
@@ -174,12 +175,12 @@
     <param argument="max_depth" type="integer" optional="true" value="@DEFAULT_VALUE@" label="Maximum depth of the tree" help="@HELP@"/>
   </xml>
 
-  <xml name="min_samples_split" token_default_value="2" token_help=" ">
-    <param argument="min_samples_split" type="integer" optional="true" value="@DEFAULT_VALUE@" label="Minimum number of samples required to split an internal node" help="@HELP@"/>
+  <xml name="min_samples_split" token_type="integer" token_default_value="2" token_help=" ">
+    <param argument="min_samples_split" type="@TYPE@" optional="true" value="@DEFAULT_VALUE@" label="Minimum number of samples required to split an internal node" help="@HELP@"/>
   </xml>
 
-  <xml name="min_samples_leaf" token_default_value="1" token_help=" ">
-    <param argument="min_samples_leaf" type="integer" optional="true" value="@DEFAULT_VALUE@" label="Minimum number of samples in newly created leaves" help="@HELP@"/>
+  <xml name="min_samples_leaf" token_type="integer" token_default_value="1" token_label="Minimum number of samples in newly created leaves" token_help=" ">
+    <param argument="min_samples_leaf" type="@TYPE@" optional="true" value="@DEFAULT_VALUE@" label="@LABEL@" help="@HELP@"/>
   </xml>
 
   <xml name="min_weight_fraction_leaf" token_default_value="0.0" token_help=" ">
@@ -190,6 +191,10 @@
     <param argument="max_leaf_nodes" type="integer" optional="true" value="@DEFAULT_VALUE@" label="Maximum number of leaf nodes in best-first method" help="@HELP@"/>
   </xml>
 
+  <xml name="min_impurity_decrease" token_default_value="0" token_help=" ">
+    <param argument="min_impurity_decrease" type="float" value="@DEFAULT_VALUE@" optional="true" label="The threshold value of impurity for stopping node splitting" help="@HELP@"/>
+  </xml>
+
   <xml name="bootstrap" token_checked="true" token_help=" ">
     <param argument="bootstrap" type="boolean" optional="true" truevalue="booltrue" falsevalue="boolflase" checked="@CHECKED@" label="Use bootstrap samples for building trees." help="@HELP@"/>
   </xml>
@@ -202,18 +207,57 @@
     </param>
   </xml>
 
+  <xml name="criterion2" token_help="">
+    <param argument="criterion" type="select" label="Function to measure the quality of a split" >
+      <option value="mse">mse - mean squared error</option>
+      <option value="mae">mae - mean absolute error</option>
+      <yield/>
+    </param>
+  </xml>
+
   <xml name="oob_score" token_checked="false" token_help=" ">
     <param argument="oob_score" type="boolean" optional="true" truevalue="booltrue" falsevalue="boolfalse" checked="@CHECKED@" label="Use out-of-bag samples to estimate the generalization error" help="@HELP@"/>
   </xml>
 
-  <xml name="max_features" token_default_value="auto" token_help="This could be an integer, float, string, or None. For more information please refer to help. ">
-    <param argument="max_features" type="text" optional="true" value="@DEFAULT_VALUE@" label="Number of features for finding the best split" help="@HELP@"/>
+  <xml name="max_features">
+    <conditional name="select_max_features">
+      <param argument="max_features" type="select" label="max_features">
+        <option value="auto" selected="true">auto - max_features=n_features</option>
+        <option value="sqrt">sqrt - max_features=sqrt(n_features)</option>
+        <option value="log2">log2 - max_features=log2(n_features)</option>
+        <option value="number_input">I want to type the number in or input None type</option>
+      </param>
+      <when value="auto">
+      </when>
+      <when value="sqrt">
+      </when>
+      <when value="log2">
+      </when>
+      <when value="number_input">
+        <param name="num_max_features" type="float" value="" optional="true" label="Input max_features number:" help="If int, consider the number of features at each split; If float, then max_features is a percentage and int(max_features * n_features) features are considered at each split."/>
+      </when>
+    </conditional>
+  </xml>
+
+  <xml name="verbose" token_default_value="0" token_help="If 1 then it prints progress and performance once in a while. If greater than 1 then it prints progress and performance for every tree.">
+    <param argument="verbose" type="integer" value="@DEFAULT_VALUE@" optional="true" label="Enable verbose output" help="@HELP@"/>
   </xml>
 
   <xml name="learning_rate" token_default_value="1.0" token_help=" ">
     <param argument="learning_rate" type="float" optional="true" value="@DEFAULT_VALUE@" label="Learning rate" help="@HELP@"/>
   </xml>
 
+  <xml name="subsample" token_help=" ">
+    <param argument="subsample" type="float" value="1.0" optional="true" label="The fraction of samples to be used for fitting the individual base learners" help="@HELP@"/>
+  </xml>
+
+  <xml name="presort">
+    <param argument="presort" type="select" label="Whether to presort the data to speed up the finding of best splits in fitting" >
+      <option value="auto" selected="true">auto</option>
+      <option value="true">true</option>
+      <option value="false">false</option>
+    </param>
+  </xml>
 
   <!--Parameters-->
   <xml name="tol" token_default_value="0.0" token_help_text="Early stopping heuristics based on the relative center changes. Set to default (0.0) to disable this convergence detection.">
@@ -228,6 +272,10 @@
     <param argument="fit_intercept" type="boolean" optional="true" truevalue="booltrue" falsevalue="boolfalse" checked="@CHECKED@" label="Estimate the intercept" help="If false, the data is assumed to be already centered."/>
   </xml>
 
+  <xml name="n_jobs" token_default_value="1" token_label="The number of jobs to run in parallel for both fit and predict">
+    <param argument="n_jobs" type="integer" value="@DEFAULT_VALUE@" optional="true" label="@LABEL@" help="If -1, then the number of jobs is set to the number of cores"/>
+  </xml>
+
   <xml name="n_iter" token_default_value="5" token_help_text="The number of passes over the training data (aka epochs). ">
     <param argument="n_iter" type="integer" optional="true" value="@DEFAULT_VALUE@" label="Number of iterations" help="@HELP_TEXT@"/>
   </xml>
b
diff -r 681825b036f3 -r c3b36f4c7e4e test-data/gbc_model01
b
Binary file test-data/gbc_model01 has changed
b
diff -r 681825b036f3 -r c3b36f4c7e4e test-data/gbc_result01
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/gbc_result01 Tue Apr 10 15:18:33 2018 -0400
b
@@ -0,0 +1,6 @@
+0 1 2 3 0
+3.68258022948 2.82110345641 -3.990140724 -1.9523364774 1
+0.015942057224 -0.711958594347 0.125502976978 -0.972218263337 0
+2.08690768825 0.929399321468 -2.12924084484 -1.99714022188 1
+1.41321052084 0.523750660422 -1.4210539291 -1.49298569451 1
+0.76831404394 1.38267855169 -0.989045048734 0.649504257894 1
b
diff -r 681825b036f3 -r c3b36f4c7e4e test-data/gbr_model01
b
Binary file test-data/gbr_model01 has changed
b
diff -r 681825b036f3 -r c3b36f4c7e4e test-data/gbr_prediction_result01.tabular
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/gbr_prediction_result01.tabular Tue Apr 10 15:18:33 2018 -0400
b
@@ -0,0 +1,88 @@
+year month day temp_2 temp_1 average forecast_noaa forecast_acc forecast_under friend week_Fri week_Mon week_Sat week_Sun week_Thurs week_Tues week_Wed 0
+2016 9 29 69 68 66.1 63 71 68 57 0 0 0 0 1 0 0 69.8047715468
+2016 4 27 59 60 60.7 59 65 60 50 0 0 0 0 0 0 1 62.3940847433
+2016 11 28 53 48 48.0 46 48 49 44 0 1 0 0 0 0 0 51.1656331745
+2016 10 12 60 62 61.0 60 63 63 52 0 0 0 0 0 0 1 60.7602326565
+2016 6 19 67 65 70.4 69 73 70 58 0 0 0 1 0 0 0 66.2416657667
+2016 5 7 68 77 63.0 61 65 63 83 0 0 1 0 0 0 0 71.7162060939
+2016 7 25 75 80 77.1 75 82 76 81 0 1 0 0 0 0 0 78.6168727393
+2016 8 15 90 83 76.6 76 79 75 70 0 1 0 0 0 0 0 77.9015583717
+2016 10 28 58 60 55.6 52 56 55 52 1 0 0 0 0 0 0 61.4191796096
+2016 6 5 80 81 68.0 64 70 66 54 0 0 0 1 0 0 0 74.4136969328
+2016 3 19 58 63 54.2 54 59 54 62 0 0 1 0 0 0 0 60.9589968112
+2016 6 7 92 86 68.3 67 69 70 58 0 0 0 0 0 1 0 75.5031094008
+2016 12 10 41 36 45.9 44 48 44 65 0 0 1 0 0 0 0 38.5555100028
+2016 4 23 73 64 59.9 56 63 59 57 0 0 1 0 0 0 0 64.0035135524
+2016 6 24 75 68 71.5 67 73 73 65 1 0 0 0 0 0 0 74.5305649268
+2016 2 9 51 57 49.4 45 52 49 57 0 0 0 0 0 1 0 57.0110982119
+2016 11 10 71 65 52.2 52 54 51 38 0 0 0 0 1 0 0 61.876179905
+2016 3 21 61 55 54.5 52 56 55 52 0 1 0 0 0 0 0 56.0732986026
+2016 2 28 60 57 51.3 48 56 53 66 0 0 0 1 0 0 0 56.9672058242
+2016 6 28 78 85 72.4 72 76 74 67 0 0 0 0 0 1 0 78.4438620045
+2016 10 6 63 66 63.3 62 67 63 55 0 0 0 0 1 0 0 63.9639842609
+2016 2 17 55 56 50.0 45 51 49 46 0 0 0 0 0 0 1 54.149464399
+2016 6 15 66 60 69.7 65 73 71 69 0 0 0 0 0 0 1 66.1043951877
+2016 10 15 60 60 59.9 59 62 59 46 0 0 1 0 0 0 0 61.6791270097
+2016 3 26 54 57 55.2 53 57 55 54 0 0 1 0 0 0 0 60.2367595132
+2016 1 26 51 54 48.3 44 53 50 61 0 0 0 0 0 1 0 52.9547372573
+2016 5 23 59 66 66.1 63 68 68 66 0 1 0 0 0 0 0 64.6813560623
+2016 1 10 48 50 46.5 45 48 48 49 0 0 0 1 0 0 0 45.1415524342
+2016 5 22 66 59 65.9 62 66 65 80 0 0 0 1 0 0 0 59.8874932366
+2016 7 15 75 77 76.0 74 80 78 75 1 0 0 0 0 0 0 82.9044308458
+2016 4 22 81 73 59.7 59 64 60 59 1 0 0 0 0 0 0 74.8537745899
+2016 4 29 61 64 61.2 61 65 61 49 1 0 0 0 0 0 0 65.3872817114
+2016 1 23 52 57 48.0 45 49 50 37 0 0 1 0 0 0 0 51.8565179701
+2016 8 16 83 84 76.5 72 78 78 90 0 0 0 0 0 1 0 83.6982049493
+2016 8 1 76 73 77.4 76 78 79 65 0 1 0 0 0 0 0 72.4140203449
+2016 2 27 61 60 51.2 51 53 53 61 0 0 1 0 0 0 0 60.839700499
+2016 2 12 56 55 49.6 49 52 48 33 1 0 0 0 0 0 0 54.9702164699
+2016 1 31 52 48 48.7 47 52 49 61 0 0 0 1 0 0 0 49.8435633428
+2016 9 5 67 68 73.5 71 75 73 54 0 1 0 0 0 0 0 69.325684558
+2016 12 20 39 46 45.1 45 49 45 62 0 0 0 0 0 1 0 43.4575487159
+2016 5 1 61 68 61.6 60 65 60 75 0 0 0 1 0 0 0 65.0535826144
+2016 3 28 59 51 55.5 55 57 55 47 0 1 0 0 0 0 0 57.5541221212
+2016 4 21 81 81 59.4 55 61 59 55 0 0 0 0 1 0 0 76.9948007001
+2016 1 6 40 44 46.1 43 49 48 40 0 0 0 0 0 0 1 41.3862075834
+2016 10 21 58 62 57.8 56 60 59 44 1 0 0 0 0 0 0 61.0523769432
+2016 5 2 68 77 61.9 60 66 61 59 0 1 0 0 0 0 0 74.2435105222
+2016 3 1 53 54 51.5 48 56 50 53 0 0 0 0 0 1 0 54.3306325137
+2016 7 21 78 82 76.8 73 81 78 84 0 0 0 0 1 0 0 81.2097724662
+2016 3 17 51 53 53.9 49 58 52 62 0 0 0 0 1 0 0 52.1836048796
+2016 12 6 46 40 46.4 44 50 45 56 0 0 0 0 0 1 0 42.2019357209
+2016 12 21 46 51 45.1 44 50 46 39 0 0 0 0 0 0 1 45.9011800782
+2016 1 4 44 41 45.9 44 48 46 53 0 1 0 0 0 0 0 41.1820761074
+2016 10 2 67 63 64.9 62 69 66 82 0 0 0 1 0 0 0 61.3727414202
+2016 5 28 65 64 66.8 64 69 65 64 0 0 1 0 0 0 0 65.5895934942
+2016 9 11 74 77 72.1 69 75 71 70 0 0 0 1 0 0 0 74.3381013887
+2016 10 25 62 61 56.5 53 60 55 70 0 0 0 0 0 1 0 61.2657495686
+2016 2 18 56 57 50.1 47 55 49 34 0 0 0 0 1 0 0 55.5571516621
+2016 11 1 117 59 54.5 51 59 55 61 0 0 0 0 0 1 0 60.8285501381
+2016 3 16 49 51 53.7 52 54 55 65 0 0 0 0 0 0 1 54.4944109202
+2016 4 26 55 59 60.5 56 61 62 75 0 0 0 0 0 1 0 61.8372077373
+2016 6 10 67 65 68.8 67 71 67 73 1 0 0 0 0 0 0 63.9222528587
+2016 2 3 46 51 48.9 48 49 50 40 0 0 0 0 0 0 1 48.8811572638
+2016 3 7 64 60 52.4 49 57 53 71 0 1 0 0 0 0 0 62.8822601273
+2016 9 18 75 68 70.0 66 73 71 90 0 0 0 1 0 0 0 71.4706106408
+2016 3 20 63 61 54.3 51 56 55 50 0 0 0 1 0 0 0 59.7324860951
+2016 4 6 60 57 56.8 53 59 57 64 0 0 0 0 0 0 1 58.9890626595
+2016 7 2 73 76 73.3 70 77 73 84 0 0 1 0 0 0 0 71.2799971324
+2016 7 5 71 68 74.0 72 77 74 62 0 0 0 0 0 1 0 68.9560415136
+2016 7 19 80 73 76.6 76 78 77 90 0 0 0 0 0 1 0 77.0157028161
+2016 12 9 40 41 46.0 43 51 44 54 1 0 0 0 0 0 0 42.1221149466
+2016 6 29 85 79 72.6 68 76 74 81 0 0 0 0 0 0 1 74.3021609896
+2016 3 22 55 56 54.6 51 55 54 64 0 0 0 0 0 1 0 57.100481947
+2016 4 3 71 63 56.3 54 61 56 64 0 0 0 1 0 0 0 60.29402298
+2016 1 17 48 54 47.4 45 51 46 47 0 0 0 1 0 0 0 50.2034551756
+2016 3 10 54 55 52.8 49 55 53 50 0 0 0 0 1 0 0 55.1100177804
+2016 5 9 82 63 63.4 59 66 62 64 0 1 0 0 0 0 0 61.9408775418
+2016 1 8 51 45 46.3 43 47 46 34 1 0 0 0 0 0 0 45.3158658848
+2016 8 11 72 76 76.9 74 81 75 80 0 0 0 0 1 0 0 74.2995087324
+2016 12 29 47 48 45.3 43 50 45 65 0 0 0 0 1 0 0 47.8575821187
+2016 11 23 54 54 49.1 48 52 49 38 0 0 0 0 0 0 1 51.5257711552
+2016 11 19 52 55 50.0 50 54 49 56 0 0 1 0 0 0 0 53.6344142464
+2016 4 7 57 68 56.9 52 61 55 38 0 0 0 0 1 0 0 66.7238759737
+2016 6 4 71 80 67.9 63 72 66 76 0 0 1 0 0 0 0 72.7073855763
+2016 6 17 67 71 70.0 66 74 69 54 1 0 0 0 0 0 0 73.4041601901
+2016 10 5 61 63 63.7 61 66 65 48 0 0 0 0 0 0 1 63.9616628787
+2016 3 4 55 59 51.9 47 56 53 45 1 0 0 0 0 0 0 58.3547591361
+2016 12 22 51 49 45.1 42 47 46 38 0 0 0 0 1 0 0 44.7834274452
b
diff -r 681825b036f3 -r c3b36f4c7e4e test-data/regression_X.tabular
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/regression_X.tabular Tue Apr 10 15:18:33 2018 -0400
b
b'@@ -0,0 +1,262 @@\n+year\tmonth\tday\ttemp_2\ttemp_1\taverage\tforecast_noaa\tforecast_acc\tforecast_under\tfriend\tweek_Fri\tweek_Mon\tweek_Sat\tweek_Sun\tweek_Thurs\tweek_Tues\tweek_Wed\n+2016\t9\t19\t68\t69\t69.7\t65\t74\t71\t88\t0\t1\t0\t0\t0\t0\t0\n+2016\t4\t14\t60\t59\t58.1\t57\t63\t58\t66\t0\t0\t0\t0\t1\t0\t0\n+2016\t7\t30\t85\t88\t77.3\t75\t79\t77\t70\t0\t0\t1\t0\t0\t0\t0\n+2016\t5\t15\t82\t65\t64.7\t63\t69\t64\t58\t0\t0\t0\t1\t0\t0\t0\n+2016\t1\t18\t54\t50\t47.5\t44\t48\t49\t58\t0\t1\t0\t0\t0\t0\t0\n+2016\t1\t25\t48\t51\t48.2\t45\t51\t49\t63\t0\t1\t0\t0\t0\t0\t0\n+2016\t11\t25\t49\t52\t48.6\t45\t52\t47\t41\t1\t0\t0\t0\t0\t0\t0\n+2016\t7\t20\t73\t78\t76.7\t75\t78\t77\t66\t0\t0\t0\t0\t0\t0\t1\n+2016\t12\t17\t39\t35\t45.2\t43\t47\t46\t38\t0\t0\t1\t0\t0\t0\t0\n+2016\t12\t8\t42\t40\t46.1\t45\t51\t47\t36\t0\t0\t0\t0\t1\t0\t0\n+2016\t12\t28\t42\t47\t45.3\t41\t49\t44\t58\t0\t0\t0\t0\t0\t0\t1\n+2016\t7\t17\t76\t72\t76.3\t76\t78\t77\t88\t0\t0\t0\t1\t0\t0\t0\n+2016\t7\t7\t69\t76\t74.4\t73\t77\t74\t72\t0\t0\t0\t0\t1\t0\t0\n+2016\t12\t15\t40\t39\t45.3\t45\t49\t47\t46\t0\t0\t0\t0\t1\t0\t0\n+2016\t6\t27\t71\t78\t72.2\t70\t74\t72\t84\t0\t1\t0\t0\t0\t0\t0\n+2016\t5\t31\t64\t71\t67.3\t63\t72\t68\t85\t0\t0\t0\t0\t0\t1\t0\n+2016\t1\t20\t54\t48\t47.7\t44\t52\t49\t61\t0\t0\t0\t0\t0\t0\t1\n+2016\t8\t10\t73\t72\t77.0\t77\t78\t77\t68\t0\t0\t0\t0\t0\t0\t1\n+2016\t3\t23\t56\t57\t54.7\t50\t58\t55\t70\t0\t0\t0\t0\t0\t0\t1\n+2016\t12\t24\t45\t40\t45.1\t44\t47\t46\t39\t0\t0\t1\t0\t0\t0\t0\n+2016\t1\t19\t50\t54\t47.6\t47\t49\t48\t53\t0\t0\t0\t0\t0\t1\t0\n+2016\t11\t6\t65\t58\t53.2\t52\t57\t55\t71\t0\t0\t0\t1\t0\t0\t0\n+2016\t4\t17\t60\t68\t58.6\t58\t62\t59\t54\t0\t0\t0\t1\t0\t0\t0\n+2016\t10\t29\t60\t65\t55.3\t55\t59\t55\t65\t0\t0\t1\t0\t0\t0\t0\n+2016\t2\t1\t48\t47\t48.8\t46\t49\t49\t51\t0\t1\t0\t0\t0\t0\t0\n+2016\t12\t12\t44\t44\t45.6\t43\t50\t45\t42\t0\t1\t0\t0\t0\t0\t0\n+2016\t5\t30\t64\t64\t67.1\t64\t70\t66\t69\t0\t1\t0\t0\t0\t0\t0\n+2016\t10\t23\t59\t62\t57.1\t57\t58\t59\t67\t0\t0\t0\t1\t0\t0\t0\n+2016\t9\t30\t68\t66\t65.7\t64\t67\t65\t74\t1\t0\t0\t0\t0\t0\t0\n+2016\t9\t12\t77\t70\t71.8\t67\t73\t73\t90\t0\t1\t0\t0\t0\t0\t0\n+2016\t11\t2\t59\t57\t54.2\t54\t58\t55\t70\t0\t0\t0\t0\t0\t0\t1\n+2016\t11\t17\t55\t50\t50.5\t46\t51\t50\t57\t0\t0\t0\t0\t1\t0\t0\n+2016\t3\t3\t58\t55\t51.8\t49\t54\t50\t71\t0\t0\t0\t0\t1\t0\t0\n+2016\t11\t21\t57\t55\t49.5\t46\t51\t49\t67\t0\t1\t0\t0\t0\t0\t0\n+2016\t12\t27\t42\t42\t45.2\t41\t50\t47\t47\t0\t0\t0\t0\t0\t1\t0\n+2016\t4\t24\t64\t65\t60.1\t57\t61\t60\t41\t0\t0\t0\t1\t0\t0\t0\n+2016\t5\t20\t64\t63\t65.6\t63\t70\t64\t73\t1\t0\t0\t0\t0\t0\t0\n+2016\t1\t16\t49\t48\t47.3\t45\t52\t46\t28\t0\t0\t1\t0\t0\t0\t0\n+2016\t12\t7\t40\t42\t46.3\t44\t51\t46\t62\t0\t0\t0\t0\t0\t0\t1\n+2016\t1\t7\t44\t51\t46.2\t45\t49\t46\t38\t0\t0\t0\t0\t1\t0\t0\n+2016\t9\t24\t67\t64\t68.0\t65\t71\t66\t64\t0\t0\t1\t0\t0\t0\t0\n+2016\t8\t30\t79\t75\t74.6\t74\t76\t75\t63\t0\t0\t0\t0\t0\t1\t0\n+2016\t1\t11\t50\t52\t46.7\t42\t48\t48\t39\t0\t1\t0\t0\t0\t0\t0\n+2016\t6\t9\t85\t67\t68.6\t66\t73\t69\t80\t0\t0\t0\t0\t1\t0\t0\n+2016\t9\t22\t67\t68\t68.7\t65\t70\t69\t56\t0\t0\t0\t0\t1\t0\t0\n+2016\t3\t25\t53\t54\t55.0\t53\t57\t57\t42\t1\t0\t0\t0\t0\t0\t0\n+2016\t10\t24\t62\t62\t56.8\t52\t61\t57\t70\t0\t1\t0\t0\t0\t0\t0\n+2016\t7\t16\t77\t76\t76.1\t76\t78\t75\t61\t0\t0\t1\t0\t0\t0\t0\n+2016\t7\t1\t74\t73\t73.1\t71\t75\t72\t93\t1\t0\t0\t0\t0\t0\t0\n+2016\t11\t18\t50\t52\t50.3\t50\t53\t50\t35\t1\t0\t0\t0\t0\t0\t0\n+2016\t9\t3\t75\t70\t73.9\t71\t75\t73\t68\t0\t0\t1\t0\t0\t0\t0\n+2016\t8\t2\t73\t77\t77.4\t75\t80\t79\t62\t0\t0\t0\t0\t0\t1\t0\n+2016\t4\t5\t69\t60\t56.6\t52\t58\t56\t72\t0\t0\t0\t0\t0\t1\t0\n+2016\t3\t13\t55\t52\t53.3\t50\t55\t53\t54\t0\t0\t0\t1\t0\t0\t0\n+2016\t8\t28\t81\t79\t75.0\t71\t77\t76\t85\t0\t0\t0\t1\t0\t0\t0\n+2016\t4\t9\t77\t76\t57.2\t53\t61\t57\t74\t0\t0\t1\t0\t0\t0\t0\n+2016\t5\t26\t66\t66\t66.5\t64\t70\t65\t85\t0\t0\t0\t0\t1\t0\t0\n+2016\t10\t10\t68\t57\t61.8\t58\t64\t61\t62\t0\t1\t0\t0\t0\t0\t0\n+2016\t4\t10\t76\t66\t57.4\t57\t60\t57\t60\t0\t0\t0\t1\t0\t0\t0\n+2016\t10\t19\t60\t61\t58.4\t58\t60\t57\t41\t0\t0\t0\t0\t0\t0\t1\n+2016\t3\t12\t56\t55\t53.1\t52\t58\t53\t65\t0\t0\t1\t0\t0\t0\t0\n+2016\t1\t24\t57\t48\t48.1\t46\t50\t48\t54\t0\t0\t0\t1\t0\t0\t0\n+2016\t2\t7\t53\t49\t49.2\t46\t51\t48\t63\t0\t0\t0\t1\t0\t0\t0\n+2016\t5\t27\t66\t65\t66.7\t64\t67\t68\t73\t1\t0\t0\t0\t0\t0\t0\n+2016\t5\t5\t74\t60\t62.5\t58\t66\t62\t56\t0\t0\t0\t0\t1\t0\t0\n+2016\t3\t11\t55\t56\t53.0\t53\t53\t51\t36\t1\t0\t0\t0\t0\t0\t0\n+2016\t10\t22\t62\t59\t57.4\t56\t59\t58\t44\t0\t0\t1\t0\t0\t0\t0\n+2016\t12\t11\t36\t44\t45.7\t41\t46\t47\t35\t0\t0\t0\t1\t0\t0\t0\n+2016\t5\t8\t77\t82\t63.2\t62\t65\t63\t83\t0\t0\t0\t1\t0\t0\t0\n+2016\t5\t29\t64\t64\t67.0\t65\t71\t65\t76\t0\t0\t0\t1\t0\t0\t0\n+2016\t12\t13\t44\t43\t45.5\t41\t47\t46\t46\t0\t0\t0\t0\t0\t1\t0\n+2016\t3\t30\t56\t64\t55.7\t51\t57\t56\t57\t0\t0\t0\t0\t0\t0\t1\n+2016\t11\t8\t61\t63\t52.7\t49\t57\t52\t49\t0\t0\t0\t0\t0\t1\t0\n+2016\t6\t20\t65\t70\t70.6\t67\t71\t70\t79\t0\t1\t0\t0\t0\t0\t0\n+2016\t11\t9\t63\t71\t52.4\t48\t56\t52\t42\t0\t0\t0\t0\t0\t0\t1\n+2016\t7\t3\t76\t76\t73.5\t69\t76\t75\t85\t0\t0\t0\t1\t0\t0\t0\n+2016\t10\t9\t64\t68\t62.1\t58\t65\t63\t55\t0\t0\t0\t1\t0\t0\t0\n+2016\t12\t16\t39\t39\t45.3\t44\t49\t44\t39\t1\t0\t0\t0\t0\t0\t0\n+2016\t9\t16\t79\t71\t70.7\t70\t74\t71\t52\t1\t0\t0\t0\t0\t0\t0\n+2016\t6\t25\t68\t69\t71.7\t68\t73\t73\t'..b'85\t0\t0\t0\t0\t0\t1\t0\n+2016\t11\t11\t65\t64\t51.9\t50\t53\t52\t55\t1\t0\t0\t0\t0\t0\t0\n+2016\t5\t21\t63\t66\t65.7\t62\t67\t65\t49\t0\t0\t1\t0\t0\t0\t0\n+2016\t3\t6\t57\t64\t52.2\t52\t53\t51\t49\t0\t0\t0\t1\t0\t0\t0\n+2016\t5\t18\t60\t71\t65.2\t61\t68\t65\t56\t0\t0\t0\t0\t0\t0\t1\n+2016\t5\t11\t67\t75\t63.8\t62\t68\t63\t60\t0\t0\t0\t0\t0\t0\t1\n+2016\t1\t9\t45\t48\t46.4\t46\t50\t45\t47\t0\t0\t1\t0\t0\t0\t0\n+2016\t3\t8\t60\t53\t52.5\t48\t56\t51\t70\t0\t0\t0\t0\t0\t1\t0\n+2016\t1\t15\t55\t49\t47.1\t46\t51\t46\t65\t1\t0\t0\t0\t0\t0\t0\n+2016\t6\t8\t86\t85\t68.5\t67\t70\t69\t81\t0\t0\t0\t0\t0\t0\t1\n+2016\t2\t10\t57\t62\t49.4\t48\t50\t49\t30\t0\t0\t0\t0\t0\t0\t1\n+2016\t12\t3\t46\t50\t47.0\t42\t52\t47\t58\t0\t0\t1\t0\t0\t0\t0\n+2016\t10\t27\t65\t58\t55.9\t51\t60\t55\t39\t0\t0\t0\t0\t1\t0\t0\n+2016\t8\t7\t79\t72\t77.2\t74\t78\t77\t95\t0\t0\t0\t1\t0\t0\t0\n+2016\t11\t16\t57\t55\t50.7\t50\t51\t49\t34\t0\t0\t0\t0\t0\t0\t1\n+2016\t9\t10\t72\t74\t72.3\t70\t77\t74\t91\t0\t0\t1\t0\t0\t0\t0\n+2016\t7\t29\t83\t85\t77.3\t77\t80\t79\t77\t1\t0\t0\t0\t0\t0\t0\n+2016\t8\t3\t77\t73\t77.3\t77\t81\t77\t93\t0\t0\t0\t0\t0\t0\t1\n+2016\t12\t1\t52\t52\t47.4\t44\t48\t49\t39\t0\t0\t0\t0\t1\t0\t0\n+2016\t9\t25\t64\t67\t67.6\t64\t72\t67\t62\t0\t0\t0\t1\t0\t0\t0\n+2016\t12\t23\t49\t45\t45.1\t45\t49\t44\t35\t1\t0\t0\t0\t0\t0\t0\n+2016\t12\t2\t52\t46\t47.2\t46\t51\t49\t41\t1\t0\t0\t0\t0\t0\t0\n+2016\t10\t13\t62\t66\t60.6\t60\t62\t60\t57\t0\t0\t0\t0\t1\t0\t0\n+2016\t7\t23\t81\t71\t77.0\t75\t81\t76\t86\t0\t0\t1\t0\t0\t0\t0\n+2016\t6\t13\t65\t70\t69.3\t66\t72\t69\t79\t0\t1\t0\t0\t0\t0\t0\n+2016\t2\t15\t55\t58\t49.9\t46\t52\t49\t53\t0\t1\t0\t0\t0\t0\t0\n+2016\t8\t8\t72\t72\t77.1\t76\t78\t77\t65\t0\t1\t0\t0\t0\t0\t0\n+2016\t7\t12\t74\t74\t75.4\t74\t77\t77\t71\t0\t0\t0\t0\t0\t1\t0\n+2016\t10\t3\t63\t65\t64.5\t63\t68\t65\t49\t0\t1\t0\t0\t0\t0\t0\n+2016\t4\t18\t68\t77\t58.8\t55\t59\t57\t39\t0\t1\t0\t0\t0\t0\t0\n+2016\t2\t25\t60\t59\t50.9\t49\t51\t49\t35\t0\t0\t0\t0\t1\t0\t0\n+2016\t1\t2\t44\t45\t45.7\t41\t50\t44\t61\t0\t0\t1\t0\t0\t0\t0\n+2016\t2\t21\t51\t53\t50.5\t49\t54\t52\t46\t0\t0\t0\t1\t0\t0\t0\n+2016\t3\t24\t57\t53\t54.9\t54\t56\t56\t72\t0\t0\t0\t0\t1\t0\t0\n+2016\t7\t27\t85\t79\t77.3\t73\t78\t79\t79\t0\t0\t0\t0\t0\t0\t1\n+2016\t2\t4\t51\t49\t49.0\t44\t54\t51\t44\t0\t0\t0\t0\t1\t0\t0\n+2016\t10\t7\t66\t63\t62.9\t62\t67\t64\t78\t1\t0\t0\t0\t0\t0\t0\n+2016\t4\t4\t63\t69\t56.5\t54\t59\t56\t45\t0\t1\t0\t0\t0\t0\t0\n+2016\t2\t24\t51\t60\t50.8\t47\t53\t50\t46\t0\t0\t0\t0\t0\t0\t1\n+2016\t10\t8\t63\t64\t62.5\t60\t65\t61\t73\t0\t0\t1\t0\t0\t0\t0\n+2016\t9\t15\t75\t79\t71.0\t66\t76\t69\t64\t0\t0\t0\t0\t1\t0\t0\n+2016\t1\t14\t49\t55\t47.0\t43\t47\t46\t58\t0\t0\t0\t0\t1\t0\t0\n+2016\t4\t1\t68\t73\t56.0\t54\t59\t55\t41\t1\t0\t0\t0\t0\t0\t0\n+2016\t10\t17\t62\t60\t59.1\t57\t63\t59\t62\t0\t1\t0\t0\t0\t0\t0\n+2016\t6\t18\t71\t67\t70.2\t67\t75\t69\t77\t0\t0\t1\t0\t0\t0\t0\n+2016\t12\t26\t41\t42\t45.2\t45\t48\t46\t58\t0\t1\t0\t0\t0\t0\t0\n+2016\t5\t17\t57\t60\t65.0\t62\t65\t65\t55\t0\t0\t0\t0\t0\t1\t0\n+2016\t11\t20\t55\t57\t49.8\t47\t54\t48\t30\t0\t0\t0\t1\t0\t0\t0\n+2016\t12\t18\t35\t35\t45.2\t44\t46\t46\t36\t0\t0\t0\t1\t0\t0\t0\n+2016\t9\t17\t71\t75\t70.3\t66\t73\t70\t84\t0\t0\t1\t0\t0\t0\t0\n+2016\t2\t26\t59\t61\t51.1\t48\t56\t53\t65\t1\t0\t0\t0\t0\t0\t0\n+2016\t2\t22\t53\t51\t50.6\t46\t51\t50\t59\t0\t1\t0\t0\t0\t0\t0\n+2016\t6\t26\t69\t71\t71.9\t67\t74\t72\t70\t0\t0\t0\t1\t0\t0\t0\n+2016\t7\t11\t71\t74\t75.3\t74\t79\t75\t71\t0\t1\t0\t0\t0\t0\t0\n+2016\t12\t30\t48\t48\t45.4\t44\t46\t44\t42\t1\t0\t0\t0\t0\t0\t0\n+2016\t7\t9\t68\t74\t74.9\t70\t79\t76\t60\t0\t0\t1\t0\t0\t0\t0\n+2016\t6\t21\t70\t76\t70.8\t68\t75\t71\t57\t0\t0\t0\t0\t0\t1\t0\n+2016\t3\t2\t54\t58\t51.6\t47\t54\t52\t37\t0\t0\t0\t0\t0\t0\t1\n+2016\t2\t20\t53\t51\t50.4\t48\t55\t51\t43\t0\t0\t1\t0\t0\t0\t0\n+2016\t9\t9\t67\t72\t72.6\t68\t77\t71\t78\t1\t0\t0\t0\t0\t0\t0\n+2016\t9\t26\t67\t76\t67.2\t64\t69\t69\t74\t0\t1\t0\t0\t0\t0\t0\n+2016\t1\t22\t52\t52\t47.9\t47\t48\t48\t60\t1\t0\t0\t0\t0\t0\t0\n+2016\t11\t27\t52\t53\t48.2\t48\t49\t49\t53\t0\t0\t0\t1\t0\t0\t0\n+2016\t6\t12\t67\t65\t69.1\t65\t73\t70\t83\t0\t0\t0\t1\t0\t0\t0\n+2016\t10\t20\t61\t58\t58.1\t58\t59\t58\t43\t0\t0\t0\t0\t1\t0\t0\n+2016\t7\t13\t74\t77\t75.6\t74\t78\t76\t56\t0\t0\t0\t0\t0\t0\t1\n+2016\t11\t7\t58\t61\t52.9\t51\t56\t51\t35\t0\t1\t0\t0\t0\t0\t0\n+2016\t10\t1\t66\t67\t65.3\t64\t70\t64\t54\t0\t0\t1\t0\t0\t0\t0\n+2016\t11\t22\t55\t54\t49.3\t46\t54\t49\t58\t0\t0\t0\t0\t0\t1\t0\n+2016\t6\t1\t71\t79\t67.4\t65\t69\t66\t58\t0\t0\t0\t0\t0\t0\t1\n+2016\t5\t13\t81\t77\t64.3\t63\t67\t66\t67\t1\t0\t0\t0\t0\t0\t0\n+2016\t6\t3\t75\t71\t67.7\t64\t71\t66\t55\t1\t0\t0\t0\t0\t0\t0\n+2016\t4\t12\t59\t58\t57.7\t54\t59\t57\t61\t0\t0\t0\t0\t0\t1\t0\n+2016\t3\t31\t64\t68\t55.9\t55\t59\t56\t56\t0\t0\t0\t0\t1\t0\t0\n+2016\t12\t14\t43\t40\t45.4\t45\t48\t45\t49\t0\t0\t0\t0\t0\t0\t1\n+2016\t8\t5\t75\t80\t77.3\t75\t81\t78\t71\t1\t0\t0\t0\t0\t0\t0\n+2016\t5\t4\t87\t74\t62.3\t59\t65\t64\t61\t0\t0\t0\t0\t0\t0\t1\n+2016\t12\t31\t48\t57\t45.5\t42\t48\t47\t57\t0\t0\t1\t0\t0\t0\t0\n+2016\t1\t21\t48\t52\t47.8\t43\t51\t46\t57\t0\t0\t0\t0\t1\t0\t0\n+2016\t7\t10\t74\t71\t75.1\t71\t77\t76\t95\t0\t0\t0\t1\t0\t0\t0\n+2016\t3\t15\t54\t49\t53.6\t49\t58\t52\t70\t0\t0\t0\t0\t0\t1\t0\n+2016\t4\t19\t77\t89\t59.0\t59\t63\t59\t61\t0\t0\t0\t0\t0\t1\t0\n+2016\t10\t14\t66\t60\t60.2\t56\t64\t60\t78\t1\t0\t0\t0\t0\t0\t0\n+2016\t4\t15\t59\t59\t58.3\t58\t61\t60\t40\t1\t0\t0\t0\t0\t0\t0\n'
b
diff -r 681825b036f3 -r c3b36f4c7e4e test-data/regression_test_X.tabular
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/regression_test_X.tabular Tue Apr 10 15:18:33 2018 -0400
b
@@ -0,0 +1,88 @@
+year month day temp_2 temp_1 average forecast_noaa forecast_acc forecast_under friend week_Fri week_Mon week_Sat week_Sun week_Thurs week_Tues week_Wed
+2016 9 29 69 68 66.1 63 71 68 57 0 0 0 0 1 0 0
+2016 4 27 59 60 60.7 59 65 60 50 0 0 0 0 0 0 1
+2016 11 28 53 48 48.0 46 48 49 44 0 1 0 0 0 0 0
+2016 10 12 60 62 61.0 60 63 63 52 0 0 0 0 0 0 1
+2016 6 19 67 65 70.4 69 73 70 58 0 0 0 1 0 0 0
+2016 5 7 68 77 63.0 61 65 63 83 0 0 1 0 0 0 0
+2016 7 25 75 80 77.1 75 82 76 81 0 1 0 0 0 0 0
+2016 8 15 90 83 76.6 76 79 75 70 0 1 0 0 0 0 0
+2016 10 28 58 60 55.6 52 56 55 52 1 0 0 0 0 0 0
+2016 6 5 80 81 68.0 64 70 66 54 0 0 0 1 0 0 0
+2016 3 19 58 63 54.2 54 59 54 62 0 0 1 0 0 0 0
+2016 6 7 92 86 68.3 67 69 70 58 0 0 0 0 0 1 0
+2016 12 10 41 36 45.9 44 48 44 65 0 0 1 0 0 0 0
+2016 4 23 73 64 59.9 56 63 59 57 0 0 1 0 0 0 0
+2016 6 24 75 68 71.5 67 73 73 65 1 0 0 0 0 0 0
+2016 2 9 51 57 49.4 45 52 49 57 0 0 0 0 0 1 0
+2016 11 10 71 65 52.2 52 54 51 38 0 0 0 0 1 0 0
+2016 3 21 61 55 54.5 52 56 55 52 0 1 0 0 0 0 0
+2016 2 28 60 57 51.3 48 56 53 66 0 0 0 1 0 0 0
+2016 6 28 78 85 72.4 72 76 74 67 0 0 0 0 0 1 0
+2016 10 6 63 66 63.3 62 67 63 55 0 0 0 0 1 0 0
+2016 2 17 55 56 50.0 45 51 49 46 0 0 0 0 0 0 1
+2016 6 15 66 60 69.7 65 73 71 69 0 0 0 0 0 0 1
+2016 10 15 60 60 59.9 59 62 59 46 0 0 1 0 0 0 0
+2016 3 26 54 57 55.2 53 57 55 54 0 0 1 0 0 0 0
+2016 1 26 51 54 48.3 44 53 50 61 0 0 0 0 0 1 0
+2016 5 23 59 66 66.1 63 68 68 66 0 1 0 0 0 0 0
+2016 1 10 48 50 46.5 45 48 48 49 0 0 0 1 0 0 0
+2016 5 22 66 59 65.9 62 66 65 80 0 0 0 1 0 0 0
+2016 7 15 75 77 76.0 74 80 78 75 1 0 0 0 0 0 0
+2016 4 22 81 73 59.7 59 64 60 59 1 0 0 0 0 0 0
+2016 4 29 61 64 61.2 61 65 61 49 1 0 0 0 0 0 0
+2016 1 23 52 57 48.0 45 49 50 37 0 0 1 0 0 0 0
+2016 8 16 83 84 76.5 72 78 78 90 0 0 0 0 0 1 0
+2016 8 1 76 73 77.4 76 78 79 65 0 1 0 0 0 0 0
+2016 2 27 61 60 51.2 51 53 53 61 0 0 1 0 0 0 0
+2016 2 12 56 55 49.6 49 52 48 33 1 0 0 0 0 0 0
+2016 1 31 52 48 48.7 47 52 49 61 0 0 0 1 0 0 0
+2016 9 5 67 68 73.5 71 75 73 54 0 1 0 0 0 0 0
+2016 12 20 39 46 45.1 45 49 45 62 0 0 0 0 0 1 0
+2016 5 1 61 68 61.6 60 65 60 75 0 0 0 1 0 0 0
+2016 3 28 59 51 55.5 55 57 55 47 0 1 0 0 0 0 0
+2016 4 21 81 81 59.4 55 61 59 55 0 0 0 0 1 0 0
+2016 1 6 40 44 46.1 43 49 48 40 0 0 0 0 0 0 1
+2016 10 21 58 62 57.8 56 60 59 44 1 0 0 0 0 0 0
+2016 5 2 68 77 61.9 60 66 61 59 0 1 0 0 0 0 0
+2016 3 1 53 54 51.5 48 56 50 53 0 0 0 0 0 1 0
+2016 7 21 78 82 76.8 73 81 78 84 0 0 0 0 1 0 0
+2016 3 17 51 53 53.9 49 58 52 62 0 0 0 0 1 0 0
+2016 12 6 46 40 46.4 44 50 45 56 0 0 0 0 0 1 0
+2016 12 21 46 51 45.1 44 50 46 39 0 0 0 0 0 0 1
+2016 1 4 44 41 45.9 44 48 46 53 0 1 0 0 0 0 0
+2016 10 2 67 63 64.9 62 69 66 82 0 0 0 1 0 0 0
+2016 5 28 65 64 66.8 64 69 65 64 0 0 1 0 0 0 0
+2016 9 11 74 77 72.1 69 75 71 70 0 0 0 1 0 0 0
+2016 10 25 62 61 56.5 53 60 55 70 0 0 0 0 0 1 0
+2016 2 18 56 57 50.1 47 55 49 34 0 0 0 0 1 0 0
+2016 11 1 117 59 54.5 51 59 55 61 0 0 0 0 0 1 0
+2016 3 16 49 51 53.7 52 54 55 65 0 0 0 0 0 0 1
+2016 4 26 55 59 60.5 56 61 62 75 0 0 0 0 0 1 0
+2016 6 10 67 65 68.8 67 71 67 73 1 0 0 0 0 0 0
+2016 2 3 46 51 48.9 48 49 50 40 0 0 0 0 0 0 1
+2016 3 7 64 60 52.4 49 57 53 71 0 1 0 0 0 0 0
+2016 9 18 75 68 70.0 66 73 71 90 0 0 0 1 0 0 0
+2016 3 20 63 61 54.3 51 56 55 50 0 0 0 1 0 0 0
+2016 4 6 60 57 56.8 53 59 57 64 0 0 0 0 0 0 1
+2016 7 2 73 76 73.3 70 77 73 84 0 0 1 0 0 0 0
+2016 7 5 71 68 74.0 72 77 74 62 0 0 0 0 0 1 0
+2016 7 19 80 73 76.6 76 78 77 90 0 0 0 0 0 1 0
+2016 12 9 40 41 46.0 43 51 44 54 1 0 0 0 0 0 0
+2016 6 29 85 79 72.6 68 76 74 81 0 0 0 0 0 0 1
+2016 3 22 55 56 54.6 51 55 54 64 0 0 0 0 0 1 0
+2016 4 3 71 63 56.3 54 61 56 64 0 0 0 1 0 0 0
+2016 1 17 48 54 47.4 45 51 46 47 0 0 0 1 0 0 0
+2016 3 10 54 55 52.8 49 55 53 50 0 0 0 0 1 0 0
+2016 5 9 82 63 63.4 59 66 62 64 0 1 0 0 0 0 0
+2016 1 8 51 45 46.3 43 47 46 34 1 0 0 0 0 0 0
+2016 8 11 72 76 76.9 74 81 75 80 0 0 0 0 1 0 0
+2016 12 29 47 48 45.3 43 50 45 65 0 0 0 0 1 0 0
+2016 11 23 54 54 49.1 48 52 49 38 0 0 0 0 0 0 1
+2016 11 19 52 55 50.0 50 54 49 56 0 0 1 0 0 0 0
+2016 4 7 57 68 56.9 52 61 55 38 0 0 0 0 1 0 0
+2016 6 4 71 80 67.9 63 72 66 76 0 0 1 0 0 0 0
+2016 6 17 67 71 70.0 66 74 69 54 1 0 0 0 0 0 0
+2016 10 5 61 63 63.7 61 66 65 48 0 0 0 0 0 0 1
+2016 3 4 55 59 51.9 47 56 53 45 1 0 0 0 0 0 0
+2016 12 22 51 49 45.1 42 47 46 38 0 0 0 0 1 0 0
b
diff -r 681825b036f3 -r c3b36f4c7e4e test-data/regression_y.tabular
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/regression_y.tabular Tue Apr 10 15:18:33 2018 -0400
b
@@ -0,0 +1,262 @@
+actual
+71
+59
+76
+57
+54
+54
+52
+82
+35
+41
+48
+80
+68
+39
+85
+79
+52
+76
+53
+41
+48
+61
+77
+68
+46
+43
+71
+62
+67
+74
+57
+52
+59
+54
+47
+55
+66
+54
+40
+45
+67
+70
+45
+65
+67
+57
+61
+72
+76
+55
+67
+73
+57
+54
+75
+66
+65
+60
+59
+58
+52
+51
+51
+64
+68
+55
+62
+44
+63
+64
+40
+68
+71
+76
+65
+71
+57
+35
+75
+71
+75
+77
+57
+49
+90
+68
+59
+87
+68
+68
+40
+46
+64
+52
+71
+79
+68
+86
+72
+41
+64
+58
+67
+74
+59
+73
+55
+75
+63
+58
+48
+51
+65
+81
+80
+73
+60
+76
+69
+56
+46
+55
+57
+64
+74
+49
+65
+55
+53
+52
+75
+66
+68
+65
+83
+60
+76
+62
+73
+79
+77
+55
+63
+60
+85
+63
+57
+42
+66
+65
+44
+45
+53
+59
+52
+59
+79
+77
+55
+72
+80
+68
+68
+58
+49
+72
+64
+71
+67
+51
+51
+71
+52
+56
+61
+68
+63
+60
+63
+59
+60
+64
+81
+50
+54
+48
+67
+56
+49
+60
+72
+50
+77
+88
+75
+46
+76
+40
+50
+60
+75
+66
+55
+73
+77
+61
+89
+61
+44
+51
+54
+83
+49
+64
+60
+59
+68
+71
+49
+71
+60
+65
+42
+71
+55
+39
+68
+60
+51
+78
+74
+57
+71
+73
+55
+53
+74
+77
+57
+48
+70
+62
+75
+63
+63
+54
+75
+82
+80
+60
+73
+39
+79
+60
+40
+52
+74
+51
+81
+60
+60