Previous changeset 20:038cecaa9e7c (2018-08-23) Next changeset 22:2e69c6ca6e91 (2018-10-11) |
Commit message:
planemo upload for repository https://github.com/bgruening/galaxytools/tree/master/tools/sklearn commit 2a058459e6daf0486871f93845f00fdb4a4eaca1 |
modified:
ensemble.xml main_macros.xml test-data/gbc_model01 test-data/gbr_model01 test-data/glm_model01 test-data/glm_model02 test-data/glm_model03 test-data/glm_model04 test-data/glm_model05 test-data/glm_model06 test-data/glm_model07 test-data/glm_model08 test-data/lda_model01 test-data/lda_model02 test-data/pipeline01 test-data/pipeline02 test-data/pipeline03 test-data/pipeline04 test-data/pipeline05 test-data/pipeline06 test-data/pipeline07 test-data/pipeline08 test-data/pipeline09 test-data/pipeline10 test-data/qda_model01 test-data/qda_prediction_result01.tabular test-data/rfc_model01 test-data/rfr_model01 test-data/searchCV01 test-data/svc_prediction_result03.tabular utils.py |
added:
sk_whitelist.json test-data/nn_model01 test-data/nn_model02 test-data/nn_model03 test-data/pickle_blacklist test-data/searchCV02 test-data/svc_model01 test-data/svc_model02 test-data/svc_model03 |
removed:
sk_whitelist.py test-data/nn_model01.txt test-data/nn_model02.txt test-data/nn_model03.txt test-data/svc_model01.txt test-data/svc_model02.txt test-data/svc_model03.txt |
b |
diff -r 038cecaa9e7c -r 9ce3e347506c ensemble.xml --- a/ensemble.xml Thu Aug 23 16:16:12 2018 -0400 +++ b/ensemble.xml Sat Sep 29 07:30:08 2018 -0400 |
[ |
@@ -22,8 +22,9 @@ import pandas from scipy.io import mmread -execfile("$__tool_directory__/sk_whitelist.py") -execfile("$__tool_directory__/utils.py", globals()) +with open("$__tool_directory__/sk_whitelist.json", "r") as f: + sk_whitelist = json.load(f) +exec(open("$__tool_directory__/utils.py").read(), globals()) # Get inputs, outputs. input_json_path = sys.argv[1] @@ -75,7 +76,7 @@ else: with open(infile_model, 'rb') as model_handler: - classifier_object = SafePickler.load(model_handler) + classifier_object = load_model(model_handler) header = 'infer' if params["selected_tasks"]["header"] else None data = pandas.read_csv(infile_data, sep='\t', header=header, index_col=None, parse_dates=True, encoding=None, tupleize_cols=False) prediction = classifier_object.predict(data) @@ -265,7 +266,7 @@ <param name="selected_task" value="train"/> <param name="selected_algorithm" value="GradientBoostingRegressor"/> <param name="max_features" value="number_input"/> - <param name="num_max_features" value=""/> + <param name="num_max_features" value="0.5"/> <param name="random_state" value="42"/> <output name="outfile_fit" file="gbr_model01" compare="sim_size" delta="500"/> </test> |
b |
diff -r 038cecaa9e7c -r 9ce3e347506c main_macros.xml --- a/main_macros.xml Thu Aug 23 16:16:12 2018 -0400 +++ b/main_macros.xml Sat Sep 29 07:30:08 2018 -0400 |
[ |
@@ -3,7 +3,7 @@ <xml name="python_requirements"> <requirements> - <requirement type="package" version="2.7">python</requirement> + <requirement type="package" version="3.6">python</requirement> <requirement type="package" version="0.19.1">scikit-learn</requirement> <requirement type="package" version="0.22.0">pandas</requirement> <requirement type="package" version="0.72.1">xgboost</requirement> @@ -1408,7 +1408,7 @@ <data format="tabular" name="outfile_predict"> <filter>selected_tasks['selected_task'] == 'load'</filter> </data> - <data format="zip" name="outfile_fit"> + <data format="zip" name="outfile_fit" label="${tool.name}.${selected_tasks.selected_algorithms.selected_algorithm}"> <filter>selected_tasks['selected_task'] == 'train'</filter> </data> </outputs> |
b |
diff -r 038cecaa9e7c -r 9ce3e347506c sk_whitelist.json --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/sk_whitelist.json Sat Sep 29 07:30:08 2018 -0400 |
[ |
b'@@ -0,0 +1,753 @@\n+{ "SK_NAMES": [\n+ "sklearn._ASSUME_FINITE", "sklearn._isotonic._inplace_contiguous_isotonic_regression",\n+ "sklearn._isotonic._make_unique", "sklearn.base.BaseEstimator",\n+ "sklearn.base.BiclusterMixin", "sklearn.base.ClassifierMixin",\n+ "sklearn.base.ClusterMixin", "sklearn.base.DensityMixin",\n+ "sklearn.base.MetaEstimatorMixin", "sklearn.base.RegressorMixin",\n+ "sklearn.base.TransformerMixin", "sklearn.base._first_and_last_element",\n+ "sklearn.base._pprint", "sklearn.base.clone",\n+ "sklearn.base.is_classifier", "sklearn.base.is_regressor",\n+ "sklearn.clone", "sklearn.cluster.AffinityPropagation",\n+ "sklearn.cluster.AgglomerativeClustering", "sklearn.cluster.Birch",\n+ "sklearn.cluster.DBSCAN", "sklearn.cluster.FeatureAgglomeration",\n+ "sklearn.cluster.KMeans", "sklearn.cluster.MeanShift",\n+ "sklearn.cluster.MiniBatchKMeans", "sklearn.cluster.SpectralBiclustering",\n+ "sklearn.cluster.SpectralClustering", "sklearn.cluster.SpectralCoclustering",\n+ "sklearn.cluster._dbscan_inner.dbscan_inner", "sklearn.cluster._feature_agglomeration.AgglomerationTransform",\n+ "sklearn.cluster._hierarchical.WeightedEdge", "sklearn.cluster._hierarchical._get_parents",\n+ "sklearn.cluster._hierarchical._hc_get_descendent", "sklearn.cluster._hierarchical.average_merge",\n+ "sklearn.cluster._hierarchical.compute_ward_dist", "sklearn.cluster._hierarchical.hc_get_heads",\n+ "sklearn.cluster._hierarchical.max_merge", "sklearn.cluster._k_means._assign_labels_array",\n+ "sklearn.cluster._k_means._assign_labels_csr", "sklearn.cluster._k_means._centers_dense",\n+ "sklearn.cluster._k_means._centers_sparse", "sklearn.cluster._k_means._mini_batch_update_csr",\n+ "sklearn.cluster._k_means_elkan.k_means_elkan", "sklearn.cluster.affinity_propagation",\n+ "sklearn.cluster.affinity_propagation_.AffinityPropagation", "sklearn.cluster.affinity_propagation_.affinity_propagation",\n+ "sklearn.cluster.bicluster.BaseSpectral", "sklearn.cluster.bicluster.SpectralBiclustering",\n+ "sklearn.cluster.bicluster.SpectralCoclustering", "sklearn.cluster.bicluster._bistochastic_normalize",\n+ "sklearn.cluster.bicluster._log_normalize", "sklearn.cluster.bicluster._scale_normalize",\n+ "sklearn.cluster.birch.Birch", "sklearn.cluster.birch._CFNode",\n+ "sklearn.cluster.birch._CFSubcluster", "sklearn.cluster.birch._iterate_sparse_X",\n+ "sklearn.cluster.birch._split_node", "sklearn.cluster.dbscan",\n+ "sklearn.cluster.dbscan_.DBSCAN", "sklearn.cluster.dbscan_.dbscan",\n+ "sklearn.cluster.estimate_bandwidth", "sklearn.cluster.get_bin_seeds",\n+ "sklearn.cluster.hierarchical.AgglomerativeClustering", "sklearn.cluster.hierarchical.FeatureAgglomeration",\n+ "sklearn.cluster.hierarchical._TREE_BUILDERS", "sklearn.cluster.hierarchical._average_linkage",\n+ "sklearn.cluster.hierarchical._complete_linkage", "sklearn.cluster.hierarchical._fix_connectivity",\n+ "sklearn.cluster.hierarchical._hc_cut", "sklearn.cluster.hierarchical.linkage_tree",\n+ "sklearn.cluster.hierarchical.ward_tree", "sklearn.cluster.k_means",\n+ "sklearn.cluster.k_means_.FLOAT_DTYPES", "sklearn.cluster.k_means_.KMeans",\n+ "sklearn.cluster.k_means_.MiniBatchKMeans", "sklearn.cluster.k_means_._init_centroids",\n+ "sklearn.cluster.k_means_._k_init", "sklearn.cluster.k_means_._kmeans_single_elkan",\n+ "sklearn.cluster.k_means_._kmeans_single_lloyd", "sklearn.cluster.k_means_._labels_inertia",\n+ "sklearn.cluster.k_means_._labels_inertia_precompute_dense", "sklearn.cluster.k_means_._mini_batch_convergence",\n+ "sklearn.cluster.k_means_._mini_batch_step", "sklearn.cluster.k_means_._tolerance",\n+ "sklearn.cluster.k_means_._validate_center_shape", "sklearn.cluster.k_means_.k_means",\n+ "sklearn.cluster.k_means_.string_types", "sklearn.cluster.linkage_tree",\n+ "sklearn.cluster.mean_shift", "sklearn.cluster.mean_shift_.MeanShift",\n+ "sklearn.cluster.mean_shift_._mean_shift_single_seed", "sklearn.cluster'..b'.validation.check_X_y",\n+ "sklearn.utils.validation.check_array", "sklearn.utils.validation.check_consistent_length",\n+ "sklearn.utils.validation.check_is_fitted", "sklearn.utils.validation.check_memory",\n+ "sklearn.utils.validation.check_non_negative", "sklearn.utils.validation.check_random_state",\n+ "sklearn.utils.validation.check_symmetric", "sklearn.utils.validation.column_or_1d",\n+ "sklearn.utils.validation.has_fit_parameter", "sklearn.utils.validation.indexable",\n+ "sklearn.utils.weight_vector.WeightVector"\n+],\n+\n+ "SKR_NAMES": [\n+ "skrebate.MultiSURF", "skrebate.MultiSURFstar",\n+ "skrebate.ReliefF", "skrebate.SURF",\n+ "skrebate.SURFstar", "skrebate.TuRF",\n+ "skrebate.multisurf.MultiSURF", "skrebate.multisurfstar.MultiSURFstar",\n+ "skrebate.relieff.ReliefF", "skrebate.scoring_utils.MultiSURF_compute_scores",\n+ "skrebate.scoring_utils.MultiSURFstar_compute_scores", "skrebate.scoring_utils.ReliefF_compute_scores",\n+ "skrebate.scoring_utils.SURF_compute_scores", "skrebate.scoring_utils.SURFstar_compute_scores",\n+ "skrebate.scoring_utils.compute_score", "skrebate.scoring_utils.get_row_missing",\n+ "skrebate.scoring_utils.ramp_function", "skrebate.surf.SURF",\n+ "skrebate.surfstar.SURFstar", "skrebate.turf.TuRF"\n+ ],\n+\n+ "XGB_NAMES": [\n+ "xgboost.Booster", "xgboost.DMatrix",\n+ "xgboost.VERSION_FILE", "xgboost.XGBClassifier",\n+ "xgboost.XGBModel", "xgboost.XGBRegressor",\n+ "xgboost.callback._fmt_metric", "xgboost.callback._get_callback_context",\n+ "xgboost.callback.early_stop", "xgboost.callback.print_evaluation",\n+ "xgboost.callback.record_evaluation", "xgboost.callback.reset_learning_rate",\n+ "xgboost.compat.PANDAS_INSTALLED", "xgboost.compat.PY3",\n+ "xgboost.compat.SKLEARN_INSTALLED", "xgboost.compat.STRING_TYPES",\n+ "xgboost.compat.py_str", "xgboost.core.Booster",\n+ "xgboost.core.CallbackEnv", "xgboost.core.DMatrix",\n+ "xgboost.core.EarlyStopException", "xgboost.core.PANDAS_DTYPE_MAPPER",\n+ "xgboost.core.PANDAS_INSTALLED", "xgboost.core.PY3",\n+ "xgboost.core.STRING_TYPES", "xgboost.core.XGBoostError",\n+ "xgboost.core._check_call", "xgboost.core._load_lib",\n+ "xgboost.core._maybe_pandas_data", "xgboost.core._maybe_pandas_label",\n+ "xgboost.core.c_array", "xgboost.core.c_str",\n+ "xgboost.core.ctypes2buffer", "xgboost.core.ctypes2numpy",\n+ "xgboost.core.from_cstr_to_pystr", "xgboost.core.from_pystr_to_cstr",\n+ "xgboost.cv", "xgboost.f",\n+ "xgboost.libpath.XGBoostLibraryNotFound", "xgboost.libpath.find_lib_path",\n+ "xgboost.plot_importance", "xgboost.plot_tree",\n+ "xgboost.plotting._EDGEPAT", "xgboost.plotting._EDGEPAT2",\n+ "xgboost.plotting._LEAFPAT", "xgboost.plotting._NODEPAT",\n+ "xgboost.plotting._parse_edge", "xgboost.plotting._parse_node",\n+ "xgboost.plotting.plot_importance", "xgboost.plotting.plot_tree",\n+ "xgboost.plotting.to_graphviz", "xgboost.rabit.DTYPE_ENUM__",\n+ "xgboost.rabit.STRING_TYPES", "xgboost.rabit._init_rabit",\n+ "xgboost.rabit.allreduce", "xgboost.rabit.broadcast",\n+ "xgboost.rabit.finalize", "xgboost.rabit.get_processor_name",\n+ "xgboost.rabit.get_rank", "xgboost.rabit.get_world_size",\n+ "xgboost.rabit.init", "xgboost.rabit.tracker_print",\n+ "xgboost.rabit.version_number", "xgboost.sklearn.SKLEARN_INSTALLED",\n+ "xgboost.sklearn.XGBClassifier", "xgboost.sklearn.XGBModel",\n+ "xgboost.sklearn.XGBRegressor", "xgboost.sklearn._objective_decorator",\n+ "xgboost.to_graphviz", "xgboost.train",\n+ "xgboost.training.CVPack", "xgboost.training.SKLEARN_INSTALLED",\n+ "xgboost.training.STRING_TYPES", "xgboost.training._train_internal",\n+ "xgboost.training.aggcv", "xgboost.training.cv",\n+ "xgboost.training.mknfold", "xgboost.training.train"\n+ ],\n+\n+\n+ "NUMPY_NAMES": [\n+ "numpy.core.multiarray._reconstruct", "numpy.ndarray",\n+ "numpy.dtype", "numpy.core.multiarray.scalar",\n+ "numpy.random.__RandomState_ctor"\n+ ]\n+}\n\\ No newline at end of file\n' |
b |
diff -r 038cecaa9e7c -r 9ce3e347506c sk_whitelist.py --- a/sk_whitelist.py Thu Aug 23 16:16:12 2018 -0400 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 |
b |
b"@@ -1,757 +0,0 @@\n-# class or function names from scikit-learn\n-SK_NAMES = (\n- 'sklearn._ASSUME_FINITE', 'sklearn._isotonic._inplace_contiguous_isotonic_regression',\n- 'sklearn._isotonic._make_unique', 'sklearn.base.BaseEstimator',\n- 'sklearn.base.BiclusterMixin', 'sklearn.base.ClassifierMixin',\n- 'sklearn.base.ClusterMixin', 'sklearn.base.DensityMixin',\n- 'sklearn.base.MetaEstimatorMixin', 'sklearn.base.RegressorMixin',\n- 'sklearn.base.TransformerMixin', 'sklearn.base._first_and_last_element',\n- 'sklearn.base._pprint', 'sklearn.base.clone',\n- 'sklearn.base.is_classifier', 'sklearn.base.is_regressor',\n- 'sklearn.clone', 'sklearn.cluster.AffinityPropagation',\n- 'sklearn.cluster.AgglomerativeClustering', 'sklearn.cluster.Birch',\n- 'sklearn.cluster.DBSCAN', 'sklearn.cluster.FeatureAgglomeration',\n- 'sklearn.cluster.KMeans', 'sklearn.cluster.MeanShift',\n- 'sklearn.cluster.MiniBatchKMeans', 'sklearn.cluster.SpectralBiclustering',\n- 'sklearn.cluster.SpectralClustering', 'sklearn.cluster.SpectralCoclustering',\n- 'sklearn.cluster._dbscan_inner.dbscan_inner', 'sklearn.cluster._feature_agglomeration.AgglomerationTransform',\n- 'sklearn.cluster._hierarchical.WeightedEdge', 'sklearn.cluster._hierarchical._get_parents',\n- 'sklearn.cluster._hierarchical._hc_get_descendent', 'sklearn.cluster._hierarchical.average_merge',\n- 'sklearn.cluster._hierarchical.compute_ward_dist', 'sklearn.cluster._hierarchical.hc_get_heads',\n- 'sklearn.cluster._hierarchical.max_merge', 'sklearn.cluster._k_means._assign_labels_array',\n- 'sklearn.cluster._k_means._assign_labels_csr', 'sklearn.cluster._k_means._centers_dense',\n- 'sklearn.cluster._k_means._centers_sparse', 'sklearn.cluster._k_means._mini_batch_update_csr',\n- 'sklearn.cluster._k_means_elkan.k_means_elkan', 'sklearn.cluster.affinity_propagation',\n- 'sklearn.cluster.affinity_propagation_.AffinityPropagation', 'sklearn.cluster.affinity_propagation_.affinity_propagation',\n- 'sklearn.cluster.bicluster.BaseSpectral', 'sklearn.cluster.bicluster.SpectralBiclustering',\n- 'sklearn.cluster.bicluster.SpectralCoclustering', 'sklearn.cluster.bicluster._bistochastic_normalize',\n- 'sklearn.cluster.bicluster._log_normalize', 'sklearn.cluster.bicluster._scale_normalize',\n- 'sklearn.cluster.birch.Birch', 'sklearn.cluster.birch._CFNode',\n- 'sklearn.cluster.birch._CFSubcluster', 'sklearn.cluster.birch._iterate_sparse_X',\n- 'sklearn.cluster.birch._split_node', 'sklearn.cluster.dbscan',\n- 'sklearn.cluster.dbscan_.DBSCAN', 'sklearn.cluster.dbscan_.dbscan',\n- 'sklearn.cluster.estimate_bandwidth', 'sklearn.cluster.get_bin_seeds',\n- 'sklearn.cluster.hierarchical.AgglomerativeClustering', 'sklearn.cluster.hierarchical.FeatureAgglomeration',\n- 'sklearn.cluster.hierarchical._TREE_BUILDERS', 'sklearn.cluster.hierarchical._average_linkage',\n- 'sklearn.cluster.hierarchical._complete_linkage', 'sklearn.cluster.hierarchical._fix_connectivity',\n- 'sklearn.cluster.hierarchical._hc_cut', 'sklearn.cluster.hierarchical.linkage_tree',\n- 'sklearn.cluster.hierarchical.ward_tree', 'sklearn.cluster.k_means',\n- 'sklearn.cluster.k_means_.FLOAT_DTYPES', 'sklearn.cluster.k_means_.KMeans',\n- 'sklearn.cluster.k_means_.MiniBatchKMeans', 'sklearn.cluster.k_means_._init_centroids',\n- 'sklearn.cluster.k_means_._k_init', 'sklearn.cluster.k_means_._kmeans_single_elkan',\n- 'sklearn.cluster.k_means_._kmeans_single_lloyd', 'sklearn.cluster.k_means_._labels_inertia',\n- 'sklearn.cluster.k_means_._labels_inertia_precompute_dense', 'sklearn.cluster.k_means_._mini_batch_convergence',\n- 'sklearn.cluster.k_means_._mini_batch_step', 'sklearn.cluster.k_means_._tolerance',\n- 'sklearn.cluster.k_means_._validate_center_shape', 'sklearn.cluster.k_means_.k_means',\n- 'sklearn.cluster.k_means_.string_types', 'sklearn.cluster.linkage_tree',\n- 'sklearn.cluster.mean_shift', 'sklearn.cluster.mean_shift_.MeanShift',\n- 'sklearn.cluster.mean_shift_."..b"n.utils.validation.check_array', 'sklearn.utils.validation.check_consistent_length',\n- 'sklearn.utils.validation.check_is_fitted', 'sklearn.utils.validation.check_memory',\n- 'sklearn.utils.validation.check_non_negative', 'sklearn.utils.validation.check_random_state',\n- 'sklearn.utils.validation.check_symmetric', 'sklearn.utils.validation.column_or_1d',\n- 'sklearn.utils.validation.has_fit_parameter', 'sklearn.utils.validation.indexable',\n- 'sklearn.utils.weight_vector.WeightVector'\n-)\n-\n-\n-# class or function names from skrebate\n-SKR_NAMES = (\n- 'skrebate.MultiSURF', 'skrebate.MultiSURFstar',\n- 'skrebate.ReliefF', 'skrebate.SURF',\n- 'skrebate.SURFstar', 'skrebate.TuRF',\n- 'skrebate.multisurf.MultiSURF', 'skrebate.multisurfstar.MultiSURFstar',\n- 'skrebate.relieff.ReliefF', 'skrebate.scoring_utils.MultiSURF_compute_scores',\n- 'skrebate.scoring_utils.MultiSURFstar_compute_scores', 'skrebate.scoring_utils.ReliefF_compute_scores',\n- 'skrebate.scoring_utils.SURF_compute_scores', 'skrebate.scoring_utils.SURFstar_compute_scores',\n- 'skrebate.scoring_utils.compute_score', 'skrebate.scoring_utils.get_row_missing',\n- 'skrebate.scoring_utils.ramp_function', 'skrebate.surf.SURF',\n- 'skrebate.surfstar.SURFstar', 'skrebate.turf.TuRF'\n-)\n-\n-\n-# class or function names from xgboost\n-XGB_NAMES = (\n- 'xgboost.Booster', 'xgboost.DMatrix',\n- 'xgboost.VERSION_FILE', 'xgboost.XGBClassifier',\n- 'xgboost.XGBModel', 'xgboost.XGBRegressor',\n- 'xgboost.callback._fmt_metric', 'xgboost.callback._get_callback_context',\n- 'xgboost.callback.early_stop', 'xgboost.callback.print_evaluation',\n- 'xgboost.callback.record_evaluation', 'xgboost.callback.reset_learning_rate',\n- 'xgboost.compat.PANDAS_INSTALLED', 'xgboost.compat.PY3',\n- 'xgboost.compat.SKLEARN_INSTALLED', 'xgboost.compat.STRING_TYPES',\n- 'xgboost.compat.py_str', 'xgboost.core.Booster',\n- 'xgboost.core.CallbackEnv', 'xgboost.core.DMatrix',\n- 'xgboost.core.EarlyStopException', 'xgboost.core.PANDAS_DTYPE_MAPPER',\n- 'xgboost.core.PANDAS_INSTALLED', 'xgboost.core.PY3',\n- 'xgboost.core.STRING_TYPES', 'xgboost.core.XGBoostError',\n- 'xgboost.core._check_call', 'xgboost.core._load_lib',\n- 'xgboost.core._maybe_pandas_data', 'xgboost.core._maybe_pandas_label',\n- 'xgboost.core.c_array', 'xgboost.core.c_str',\n- 'xgboost.core.ctypes2buffer', 'xgboost.core.ctypes2numpy',\n- 'xgboost.core.from_cstr_to_pystr', 'xgboost.core.from_pystr_to_cstr',\n- 'xgboost.cv', 'xgboost.f',\n- 'xgboost.libpath.XGBoostLibraryNotFound', 'xgboost.libpath.find_lib_path',\n- 'xgboost.plot_importance', 'xgboost.plot_tree',\n- 'xgboost.plotting._EDGEPAT', 'xgboost.plotting._EDGEPAT2',\n- 'xgboost.plotting._LEAFPAT', 'xgboost.plotting._NODEPAT',\n- 'xgboost.plotting._parse_edge', 'xgboost.plotting._parse_node',\n- 'xgboost.plotting.plot_importance', 'xgboost.plotting.plot_tree',\n- 'xgboost.plotting.to_graphviz', 'xgboost.rabit.DTYPE_ENUM__',\n- 'xgboost.rabit.STRING_TYPES', 'xgboost.rabit._init_rabit',\n- 'xgboost.rabit.allreduce', 'xgboost.rabit.broadcast',\n- 'xgboost.rabit.finalize', 'xgboost.rabit.get_processor_name',\n- 'xgboost.rabit.get_rank', 'xgboost.rabit.get_world_size',\n- 'xgboost.rabit.init', 'xgboost.rabit.tracker_print',\n- 'xgboost.rabit.version_number', 'xgboost.sklearn.SKLEARN_INSTALLED',\n- 'xgboost.sklearn.XGBClassifier', 'xgboost.sklearn.XGBModel',\n- 'xgboost.sklearn.XGBRegressor', 'xgboost.sklearn._objective_decorator',\n- 'xgboost.to_graphviz', 'xgboost.train',\n- 'xgboost.training.CVPack', 'xgboost.training.SKLEARN_INSTALLED',\n- 'xgboost.training.STRING_TYPES', 'xgboost.training._train_internal',\n- 'xgboost.training.aggcv', 'xgboost.training.cv',\n- 'xgboost.training.mknfold', 'xgboost.training.train'\n-)\n-\n-\n-NUMPY_NAMES = (\n- 'numpy.core.multiarray._reconstruct', 'numpy.ndarray',\n- 'numpy.dtype', 'numpy.core.multiarray.scalar',\n- 'numpy.random.__RandomState_ctor'\n-)\n" |
b |
diff -r 038cecaa9e7c -r 9ce3e347506c test-data/gbc_model01 |
b |
Binary file test-data/gbc_model01 has changed |
b |
diff -r 038cecaa9e7c -r 9ce3e347506c test-data/gbr_model01 |
b |
Binary file test-data/gbr_model01 has changed |
b |
diff -r 038cecaa9e7c -r 9ce3e347506c test-data/glm_model01 |
b |
Binary file test-data/glm_model01 has changed |
b |
diff -r 038cecaa9e7c -r 9ce3e347506c test-data/glm_model02 |
b |
Binary file test-data/glm_model02 has changed |
b |
diff -r 038cecaa9e7c -r 9ce3e347506c test-data/glm_model03 |
b |
Binary file test-data/glm_model03 has changed |
b |
diff -r 038cecaa9e7c -r 9ce3e347506c test-data/glm_model04 |
b |
Binary file test-data/glm_model04 has changed |
b |
diff -r 038cecaa9e7c -r 9ce3e347506c test-data/glm_model05 |
b |
Binary file test-data/glm_model05 has changed |
b |
diff -r 038cecaa9e7c -r 9ce3e347506c test-data/glm_model06 |
b |
Binary file test-data/glm_model06 has changed |
b |
diff -r 038cecaa9e7c -r 9ce3e347506c test-data/glm_model07 |
b |
Binary file test-data/glm_model07 has changed |
b |
diff -r 038cecaa9e7c -r 9ce3e347506c test-data/glm_model08 |
b |
Binary file test-data/glm_model08 has changed |
b |
diff -r 038cecaa9e7c -r 9ce3e347506c test-data/lda_model01 |
b |
Binary file test-data/lda_model01 has changed |
b |
diff -r 038cecaa9e7c -r 9ce3e347506c test-data/lda_model02 |
b |
Binary file test-data/lda_model02 has changed |
b |
diff -r 038cecaa9e7c -r 9ce3e347506c test-data/nn_model01 |
b |
Binary file test-data/nn_model01 has changed |
b |
diff -r 038cecaa9e7c -r 9ce3e347506c test-data/nn_model01.txt --- a/test-data/nn_model01.txt Thu Aug 23 16:16:12 2018 -0400 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 |
[ |
b"@@ -1,110 +0,0 @@\n-ccopy_reg\n-_reconstructor\n-p1\n-(csklearn.neighbors.classification\n-KNeighborsClassifier\n-p2\n-c__builtin__\n-object\n-p3\n-NtRp4\n-(dp5\n-S'n_neighbors'\n-p6\n-I5\n-sS'n_jobs'\n-p7\n-I1\n-sS'_y'\n-p8\n-cnumpy.core.multiarray\n-_reconstruct\n-p9\n-(cnumpy\n-ndarray\n-p10\n-(I0\n-tS'b'\n-tRp11\n-(I1\n-(I48\n-tcnumpy\n-dtype\n-p12\n-(S'i8'\n-I0\n-I1\n-tRp13\n-(I3\n-S'<'\n-NNNI-1\n-I-1\n-I0\n-tbI00\n-S'\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x00\\x00\\x00\\x00'\n-tbsS'algorithm'\n-p14\n-Vbrute\n-p15\n-sS'_sklearn_version'\n-p16\n-S'0.19.1'\n-p17\n-sS'metric'\n-p18\n-S'minkowski'\n-p19\n-sS'classes_'\n-p20\n-g9\n-(g10\n-(I0\n-tS'b'\n-tRp21\n-(I1\n-(I4\n-tg13\n-I00\n-S'\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x00\\x00\\x00\\x00'\n-tbsS'metric_params'\n-p22\n-NsS'p'\n-I2\n-sS'effective_metric_params_'\n-p23\n-(dp24\n-sS'radius'\n-p25\n-NsS'leaf_size'\n-p26\n-I30\n-sS'_fit_method'\n-p27\n-g15\n-sS'weights'\n-p28\n-Vuniform\n-p29\n-sS'_tree'\n-p30\n-NsS'effective_metric_'\n-p31\n-S'euclidean'\n-p32\n-sS'outputs_2d_'\n-p33\n-I00\n-sS'_fit_X'\n-p34\n-g9\n-(g10\n-(I0\n-tS'b'\n-tRp35\n-(I1\n-(I48\n-I4\n-tg13\n-I01\n-S'\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x03\\"..b'ff\\xff\\xff\\xb2\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xdd\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xc8\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xc3\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xad\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xce\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xbd\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xce\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xc3\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x93\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xa2\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xab\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xa6\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xc1\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xb1\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x9d\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xaf\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x94\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xa4\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xa8\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xa8\\xff\\xff\\xff\\xff\\xff\\xff\\xff6\\x00\\x00\\x00\\x00\\x00\\x00\\x00*\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\\'\\x00\\x00\\x00\\x00\\x00\\x00\\x000\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\\'\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x1f\\x00\\x00\\x00\\x00\\x00\\x00\\x00!\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x17\\x00\\x00\\x00\\x00\\x00\\x00\\x00&\\x00\\x00\\x00\\x00\\x00\\x00\\x00"\\x00\\x00\\x00\\x00\\x00\\x00\\x00#\\x00\\x00\\x00\\x00\\x00\\x00\\x00@\\x00\\x00\\x00\\x00\\x00\\x00\\x000\\x00\\x00\\x00\\x00\\x00\\x00\\x00A\\x00\\x00\\x00\\x00\\x00\\x00\\x00=\\x00\\x00\\x00\\x00\\x00\\x00\\x00+\\x00\\x00\\x00\\x00\\x00\\x00\\x00<\\x00\\x00\\x00\\x00\\x00\\x00\\x007\\x00\\x00\\x00\\x00\\x00\\x00\\x005\\x00\\x00\\x00\\x00\\x00\\x00\\x00,\\x00\\x00\\x00\\x00\\x00\\x00\\x00A\\x00\\x00\\x00\\x00\\x00\\x00\\x004\\x00\\x00\\x00\\x00\\x00\\x00\\x008\\x00\\x00\\x00\\x00\\x00\\x00\\x00V\\x00\\x00\\x00\\x00\\x00\\x00\\x00]\\x00\\x00\\x00\\x00\\x00\\x00\\x00^\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\\\\\x00\\x00\\x00\\x00\\x00\\x00\\x00[\\x00\\x00\\x00\\x00\\x00\\x00\\x00W\\x00\\x00\\x00\\x00\\x00\\x00\\x00[\\x00\\x00\\x00\\x00\\x00\\x00\\x00Q\\x00\\x00\\x00\\x00\\x00\\x00\\x00N\\x00\\x00\\x00\\x00\\x00\\x00\\x00W\\x00\\x00\\x00\\x00\\x00\\x00\\x00I\\x00\\x00\\x00\\x00\\x00\\x00\\x00a\\x00\\x00\\x00\\x00\\x00\\x00\\x00o\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x17\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x14\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x1a\\x00\\x00\\x00\\x00\\x00\\x00\\x00!\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\t\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\t\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x1a\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x13\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x15\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x1b\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x0f\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\xb6\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xa4\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x9d\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x8d\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xa0\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x93\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xa0\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x9a\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xa6\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x95\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xb2\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xb4\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xb7\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xcf\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xcf\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xb1\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x9e\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xc5\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xc8\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xc3\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xac\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xb5\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xba\\xff\\xff\\xff\\xff\\xff\\xff\\xff+\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x0f\\x00\\x00\\x00\\x00\\x00\\x00\\x00$\\x00\\x00\\x00\\x00\\x00\\x00\\x00>\\x00\\x00\\x00\\x00\\x00\\x00\\x00F\\x00\\x00\\x00\\x00\\x00\\x00\\x00/\\x00\\x00\\x00\\x00\\x00\\x00\\x004\\x00\\x00\\x00\\x00\\x00\\x00\\x00.\\x00\\x00\\x00\\x00\\x00\\x00\\x00"\\x00\\x00\\x00\\x00\\x00\\x00\\x00-\\x00\\x00\\x00\\x00\\x00\\x00\\x002\\x00\\x00\\x00\\x00\\x00\\x00\\x00-\\x00\\x00\\x00\\x00\\x00\\x00\\x00-\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\xa4\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xa0\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xa8\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x8e\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x96\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xa3\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x94\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x92\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x94\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x96\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x96\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x99\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x04\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x1f\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\xf9\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xfb\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x02\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\t\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\xf8\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x04\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x15\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x12\\x00\\x00\\x00\\x00\\x00\\x00\\x00\'\n-tbsb.\n\\ No newline at end of file\n' |
b |
diff -r 038cecaa9e7c -r 9ce3e347506c test-data/nn_model02 |
b |
Binary file test-data/nn_model02 has changed |
b |
diff -r 038cecaa9e7c -r 9ce3e347506c test-data/nn_model02.txt --- a/test-data/nn_model02.txt Thu Aug 23 16:16:12 2018 -0400 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 |
[ |
b"@@ -1,242 +0,0 @@\n-ccopy_reg\n-_reconstructor\n-p1\n-(csklearn.neighbors.classification\n-RadiusNeighborsClassifier\n-p2\n-c__builtin__\n-object\n-p3\n-NtRp4\n-(dp5\n-S'n_neighbors'\n-p6\n-NsS'n_jobs'\n-p7\n-I1\n-sS'_y'\n-p8\n-cnumpy.core.multiarray\n-_reconstruct\n-p9\n-(cnumpy\n-ndarray\n-p10\n-(I0\n-tS'b'\n-tRp11\n-(I1\n-(I48\n-tcnumpy\n-dtype\n-p12\n-(S'i8'\n-I0\n-I1\n-tRp13\n-(I3\n-S'<'\n-NNNI-1\n-I-1\n-I0\n-tbI00\n-S'\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x00\\x00\\x00\\x00'\n-tbsS'algorithm'\n-p14\n-Vauto\n-p15\n-sS'_sklearn_version'\n-p16\n-S'0.19.1'\n-p17\n-sS'metric'\n-p18\n-S'minkowski'\n-p19\n-sS'classes_'\n-p20\n-g9\n-(g10\n-(I0\n-tS'b'\n-tRp21\n-(I1\n-(I4\n-tg13\n-I00\n-S'\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x03\\x00\\x00\\x00\\x00\\x00\\x00\\x00'\n-tbsS'outputs_2d_'\n-p22\n-I00\n-sS'metric_params'\n-p23\n-NsS'p'\n-I2\n-sS'effective_metric_params_'\n-p24\n-(dp25\n-sS'radius'\n-p26\n-F1\n-sS'leaf_size'\n-p27\n-I30\n-sS'_fit_method'\n-p28\n-S'kd_tree'\n-p29\n-sS'weights'\n-p30\n-Vuniform\n-p31\n-sS'_tree'\n-p32\n-csklearn.neighbors.kd_tree\n-newObj\n-p33\n-(csklearn.neighbors.kd_tree\n-BinaryTree\n-p34\n-tRp35\n-(g9\n-(g10\n-(I0\n-tS'b'\n-tRp36\n-(I1\n-(I48\n-I4\n-tg12\n-(S'f8'\n-I0\n-I1\n-tRp37\n-(I3\n-S'<'\n-NNNI-1\n-I-1\n-I0\n-tbI00\n-S'\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00F@\\x00\\x00\\x00\\x00\\x00\\x00P@\\x00\\x00\\x00\\x00\\x00\\x00S\\xc0\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x80I@\\x00\\x00\\x00\\x00\\x00\\x00H@\\x00\\x00\\x00\\x00\\x00@R\\xc0\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00M@\\x00\\x00\\x00\\x00\\x00@P@\\x00\\x00\\x00\\x00\\x00\\x80H\\xc0\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x80E@\\x00\\x00\\x00\\x00\\x00\\x80N@\\x00\\x00\\x00\\x00\\x00\\x80H\\xc0\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x80F@\\x00\\x00\\x00\\x00\\x00\\x80E@\\x00\\x00\\x00\\x00\\x00\\xc0S\\xc0\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00E@\\x00\\x00\\x00\\x00\\x00\\x00N@\\x00\\x00\\x00\\x00\\x00\\x80X\\xc0\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00I@\\x00\\x00\\x00\\x00\\x00\\x80K@\\x00\\x00\\x00\\x00\\x00\\x80M\\xc0\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x80J@\\x00\\x00\\x00\\x00\\x00\\x80J@\\x00\\x00\\x00\\x00\\x00\\x00L\\xc0\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x80F@\\x00\\x00\\x00\\x00\\x00\\x00F@\\x00\\x00\\x00\\x00\\x00\\x80N\\xc0\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x80E@\\x00\\x00\\x00\\x00\\x00@P@\\x00\\x00\\x00\\x00\\x00\\x00U\\xc0\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x80A@\\x00\\x00\\x00\\x00\\x00\\x00J@\\x00\\x00\\x00\\x00\\x00\\xc0R\\xc0\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00L@\\x00\\x00\\x00\\x00\\x00\\x"..b'ff\\xff\\xff\\xb2\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xdd\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xc8\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xc3\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xad\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xce\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xbd\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xce\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xc3\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x93\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xa2\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xab\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xa6\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xc1\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xb1\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x9d\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xaf\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x94\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xa4\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xa8\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xa8\\xff\\xff\\xff\\xff\\xff\\xff\\xff6\\x00\\x00\\x00\\x00\\x00\\x00\\x00*\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\\'\\x00\\x00\\x00\\x00\\x00\\x00\\x000\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\\'\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x1f\\x00\\x00\\x00\\x00\\x00\\x00\\x00!\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x17\\x00\\x00\\x00\\x00\\x00\\x00\\x00&\\x00\\x00\\x00\\x00\\x00\\x00\\x00"\\x00\\x00\\x00\\x00\\x00\\x00\\x00#\\x00\\x00\\x00\\x00\\x00\\x00\\x00@\\x00\\x00\\x00\\x00\\x00\\x00\\x000\\x00\\x00\\x00\\x00\\x00\\x00\\x00A\\x00\\x00\\x00\\x00\\x00\\x00\\x00=\\x00\\x00\\x00\\x00\\x00\\x00\\x00+\\x00\\x00\\x00\\x00\\x00\\x00\\x00<\\x00\\x00\\x00\\x00\\x00\\x00\\x007\\x00\\x00\\x00\\x00\\x00\\x00\\x005\\x00\\x00\\x00\\x00\\x00\\x00\\x00,\\x00\\x00\\x00\\x00\\x00\\x00\\x00A\\x00\\x00\\x00\\x00\\x00\\x00\\x004\\x00\\x00\\x00\\x00\\x00\\x00\\x008\\x00\\x00\\x00\\x00\\x00\\x00\\x00V\\x00\\x00\\x00\\x00\\x00\\x00\\x00]\\x00\\x00\\x00\\x00\\x00\\x00\\x00^\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\\\\\x00\\x00\\x00\\x00\\x00\\x00\\x00[\\x00\\x00\\x00\\x00\\x00\\x00\\x00W\\x00\\x00\\x00\\x00\\x00\\x00\\x00[\\x00\\x00\\x00\\x00\\x00\\x00\\x00Q\\x00\\x00\\x00\\x00\\x00\\x00\\x00N\\x00\\x00\\x00\\x00\\x00\\x00\\x00W\\x00\\x00\\x00\\x00\\x00\\x00\\x00I\\x00\\x00\\x00\\x00\\x00\\x00\\x00a\\x00\\x00\\x00\\x00\\x00\\x00\\x00o\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x17\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x14\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x1a\\x00\\x00\\x00\\x00\\x00\\x00\\x00!\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\t\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\t\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x1a\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x13\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x15\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x1b\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x0f\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\xb6\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xa4\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x9d\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x8d\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xa0\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x93\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xa0\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x9a\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xa6\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x95\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xb2\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xb4\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xb7\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xcf\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xcf\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xb1\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x9e\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xc5\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xc8\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xc3\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xac\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xb5\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xba\\xff\\xff\\xff\\xff\\xff\\xff\\xff+\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x0f\\x00\\x00\\x00\\x00\\x00\\x00\\x00$\\x00\\x00\\x00\\x00\\x00\\x00\\x00>\\x00\\x00\\x00\\x00\\x00\\x00\\x00F\\x00\\x00\\x00\\x00\\x00\\x00\\x00/\\x00\\x00\\x00\\x00\\x00\\x00\\x004\\x00\\x00\\x00\\x00\\x00\\x00\\x00.\\x00\\x00\\x00\\x00\\x00\\x00\\x00"\\x00\\x00\\x00\\x00\\x00\\x00\\x00-\\x00\\x00\\x00\\x00\\x00\\x00\\x002\\x00\\x00\\x00\\x00\\x00\\x00\\x00-\\x00\\x00\\x00\\x00\\x00\\x00\\x00-\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\xa4\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xa0\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xa8\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x8e\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x96\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xa3\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x94\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x92\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x94\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x96\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x96\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x99\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x04\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x1f\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\xf9\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xfb\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x02\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\t\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\xf8\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x04\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x15\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x12\\x00\\x00\\x00\\x00\\x00\\x00\\x00\'\n-tbsb.\n\\ No newline at end of file\n' |
b |
diff -r 038cecaa9e7c -r 9ce3e347506c test-data/nn_model03 |
b |
Binary file test-data/nn_model03 has changed |
b |
diff -r 038cecaa9e7c -r 9ce3e347506c test-data/nn_model03.txt --- a/test-data/nn_model03.txt Thu Aug 23 16:16:12 2018 -0400 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 |
b |
@@ -1,71 +0,0 @@ -ccopy_reg -_reconstructor -p1 -(csklearn.neighbors.nearest_centroid -NearestCentroid -p2 -c__builtin__ -object -p3 -NtRp4 -(dp5 -S'centroids_' -p6 -cnumpy.core.multiarray -_reconstruct -p7 -(cnumpy -ndarray -p8 -(I0 -tS'b' -tRp9 -(I1 -(I4 -I4 -tcnumpy -dtype -p10 -(S'f8' -I0 -I1 -tRp11 -(I3 -S'<' -NNNI-1 -I-1 -I0 -tbI00 -S'\x00\x00\x00\x00\x00\x00\x00\x00\xab\xaa\xaa\xaa\xaa\x8aG@\x00\x00\x00\x00\x00\xc0K@UUUUUEQ\xc0\x00\x00\x00\x00\x00\x00\x00@\xab\xaa\xaa\xaa\xaajV\xc0\xab\xaa\xaa\xaa\xaa*3@\x00\x00\x00\x00\x00\xa0Y\xc0\x00\x00\x00\x00\x00\x00\xf0?\xc5N\xec\xc4N\xecM\xc0;\xb1\x13;\xb1SV@\x14;\xb1\x13;\xb1F@\x00\x00\x00\x00\x00\x00\x08@\xe9\xa2\x8b.\xba\xe8B@\x8c.\xba\xe8\xa2\x0bX\xc0t\xd1E\x17]t\x19@' -tbsS'metric' -p12 -Veuclidean -p13 -sS'classes_' -p14 -g7 -(g8 -(I0 -tS'b' -tRp15 -(I1 -(I4 -tg10 -(S'i8' -I0 -I1 -tRp16 -(I3 -S'<' -NNNI-1 -I-1 -I0 -tbI00 -S'\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00\x00\x00\x00\x00' -tbsS'_sklearn_version' -p17 -S'0.19.1' -p18 -sS'shrink_threshold' -p19 -Nsb. \ No newline at end of file |
b |
diff -r 038cecaa9e7c -r 9ce3e347506c test-data/pickle_blacklist --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/test-data/pickle_blacklist Sat Sep 29 07:30:08 2018 -0400 |
b |
@@ -0,0 +1,4 @@ +cos +system +(S'ls ~' +tR. \ No newline at end of file |
b |
diff -r 038cecaa9e7c -r 9ce3e347506c test-data/pipeline01 |
b |
Binary file test-data/pipeline01 has changed |
b |
diff -r 038cecaa9e7c -r 9ce3e347506c test-data/pipeline02 |
b |
Binary file test-data/pipeline02 has changed |
b |
diff -r 038cecaa9e7c -r 9ce3e347506c test-data/pipeline03 |
b |
Binary file test-data/pipeline03 has changed |
b |
diff -r 038cecaa9e7c -r 9ce3e347506c test-data/pipeline04 |
b |
Binary file test-data/pipeline04 has changed |
b |
diff -r 038cecaa9e7c -r 9ce3e347506c test-data/pipeline05 |
b |
Binary file test-data/pipeline05 has changed |
b |
diff -r 038cecaa9e7c -r 9ce3e347506c test-data/pipeline06 |
b |
Binary file test-data/pipeline06 has changed |
b |
diff -r 038cecaa9e7c -r 9ce3e347506c test-data/pipeline07 |
b |
Binary file test-data/pipeline07 has changed |
b |
diff -r 038cecaa9e7c -r 9ce3e347506c test-data/pipeline08 |
b |
Binary file test-data/pipeline08 has changed |
b |
diff -r 038cecaa9e7c -r 9ce3e347506c test-data/pipeline09 |
b |
Binary file test-data/pipeline09 has changed |
b |
diff -r 038cecaa9e7c -r 9ce3e347506c test-data/pipeline10 |
b |
Binary file test-data/pipeline10 has changed |
b |
diff -r 038cecaa9e7c -r 9ce3e347506c test-data/qda_model01 |
b |
Binary file test-data/qda_model01 has changed |
b |
diff -r 038cecaa9e7c -r 9ce3e347506c test-data/qda_prediction_result01.tabular --- a/test-data/qda_prediction_result01.tabular Thu Aug 23 16:16:12 2018 -0400 +++ b/test-data/qda_prediction_result01.tabular Sat Sep 29 07:30:08 2018 -0400 |
b |
@@ -2,4 +2,4 @@ 0.015942057224 -0.7119585943469999 0.125502976978 -0.972218263337 0 2.0869076882499997 0.929399321468 -2.1292408448400004 -1.9971402218799998 0 1.4132105208399999 0.523750660422 -1.4210539291 -1.49298569451 0 -0.7683140439399999 1.38267855169 -0.989045048734 0.649504257894 1 +0.7683140439399999 1.38267855169 -0.989045048734 0.649504257894 0 |
b |
diff -r 038cecaa9e7c -r 9ce3e347506c test-data/rfc_model01 |
b |
Binary file test-data/rfc_model01 has changed |
b |
diff -r 038cecaa9e7c -r 9ce3e347506c test-data/rfr_model01 |
b |
Binary file test-data/rfr_model01 has changed |
b |
diff -r 038cecaa9e7c -r 9ce3e347506c test-data/searchCV01 |
b |
Binary file test-data/searchCV01 has changed |
b |
diff -r 038cecaa9e7c -r 9ce3e347506c test-data/searchCV02 |
b |
Binary file test-data/searchCV02 has changed |
b |
diff -r 038cecaa9e7c -r 9ce3e347506c test-data/svc_model01 |
b |
Binary file test-data/svc_model01 has changed |
b |
diff -r 038cecaa9e7c -r 9ce3e347506c test-data/svc_model01.txt |
b |
Binary file test-data/svc_model01.txt has changed |
b |
diff -r 038cecaa9e7c -r 9ce3e347506c test-data/svc_model02 |
b |
Binary file test-data/svc_model02 has changed |
b |
diff -r 038cecaa9e7c -r 9ce3e347506c test-data/svc_model02.txt |
b |
Binary file test-data/svc_model02.txt has changed |
b |
diff -r 038cecaa9e7c -r 9ce3e347506c test-data/svc_model03 |
b |
Binary file test-data/svc_model03 has changed |
b |
diff -r 038cecaa9e7c -r 9ce3e347506c test-data/svc_model03.txt |
b |
Binary file test-data/svc_model03.txt has changed |
b |
diff -r 038cecaa9e7c -r 9ce3e347506c test-data/svc_prediction_result03.tabular --- a/test-data/svc_prediction_result03.tabular Thu Aug 23 16:16:12 2018 -0400 +++ b/test-data/svc_prediction_result03.tabular Sat Sep 29 07:30:08 2018 -0400 |
b |
@@ -25,7 +25,7 @@ 1 -50 97 45 2 1 -61 111 45 2 2 -109 23 -92 2 -2 -94 20 -96 1 +2 -94 20 -96 3 2 -85 26 -88 2 2 -90 33 -114 0 2 -63 9 -106 0 @@ -33,7 +33,7 @@ 2 -99 26 -108 1 2 -81 19 -110 0 2 -108 21 -108 1 -2 -92 27 -106 1 +2 -92 27 -106 0 2 -88 2 -106 3 2 -88 15 -103 3 3 54 -74 4 3 |
b |
diff -r 038cecaa9e7c -r 9ce3e347506c utils.py --- a/utils.py Thu Aug 23 16:16:12 2018 -0400 +++ b/utils.py Sat Sep 29 07:30:08 2018 -0400 |
[ |
b'@@ -2,28 +2,27 @@\n import os\n import pandas\n import re\n-import cPickle as pickle\n+import pickle\n import warnings\n import numpy as np\n import xgboost\n import scipy\n import sklearn\n-import ast\n from asteval import Interpreter, make_symbol_table\n from sklearn import (cluster, decomposition, ensemble, feature_extraction, feature_selection,\n- gaussian_process, kernel_approximation, linear_model, metrics,\n+ gaussian_process, kernel_approximation, metrics,\n model_selection, naive_bayes, neighbors, pipeline, preprocessing,\n svm, linear_model, tree, discriminant_analysis)\n \n-N_JOBS = int( os.environ.get(\'GALAXY_SLOTS\', 1) )\n+N_JOBS = int(os.environ.get(\'GALAXY_SLOTS\', 1))\n \n-class SafePickler(object):\n+\n+class SafePickler(pickle.Unpickler):\n """\n Used to safely deserialize scikit-learn model objects serialized by cPickle.dump\n Usage:\n eg.: SafePickler.load(pickled_file_object)\n """\n- @classmethod\n def find_class(self, module, name):\n \n bad_names = (\'and\', \'as\', \'assert\', \'break\', \'class\', \'continue\',\n@@ -39,11 +38,11 @@\n \'__init__\', \'func_globals\', \'func_code\', \'func_closure\',\n \'im_class\', \'im_func\', \'im_self\', \'gi_code\', \'gi_frame\',\n \'__asteval__\', \'f_locals\', \'__mro__\')\n- good_names = (\'copy_reg._reconstructor\', \'__builtin__.object\')\n+ good_names = [\'copy_reg._reconstructor\', \'__builtin__.object\']\n \n if re.match(r\'^[a-zA-Z_][a-zA-Z0-9_]*$\', name):\n fullname = module + \'.\' + name\n- if (fullname in good_names)\\\n+ if (fullname in good_names)\\\n or ( ( module.startswith(\'sklearn.\')\n or module.startswith(\'xgboost.\')\n or module.startswith(\'skrebate.\')\n@@ -51,26 +50,25 @@\n or module == \'numpy\'\n )\n and (name not in bad_names)\n- ) :\n+ ):\n # TODO: replace with a whitelist checker\n- if fullname not in SK_NAMES + SKR_NAMES + XGB_NAMES + NUMPY_NAMES + good_names:\n+ if fullname not in sk_whitelist[\'SK_NAMES\'] + sk_whitelist[\'SKR_NAMES\'] + sk_whitelist[\'XGB_NAMES\'] + sk_whitelist[\'NUMPY_NAMES\'] + good_names:\n print("Warning: global %s is not in pickler whitelist yet and will loss support soon. Contact tool author or leave a message at github.com" % fullname)\n mod = sys.modules[module]\n return getattr(mod, name)\n \n raise pickle.UnpicklingError("global \'%s\' is forbidden" % fullname)\n \n- @classmethod\n- def load(self, file):\n- obj = pickle.Unpickler(file)\n- obj.find_global = self.find_class\n- return obj.load()\n+\n+def load_model(file):\n+ return SafePickler(file).load()\n+\n \n def read_columns(f, c=None, c_option=\'by_index_number\', return_df=False, **args):\n data = pandas.read_csv(f, **args)\n if c_option == \'by_index_number\':\n cols = list(map(lambda x: x - 1, c))\n- data = data.iloc[:,cols]\n+ data = data.iloc[:, cols]\n if c_option == \'all_but_by_index_number\':\n cols = list(map(lambda x: x - 1, c))\n data.drop(data.columns[cols], axis=1, inplace=True)\n@@ -100,7 +98,7 @@\n if inputs[\'model_inputter\'][\'input_mode\'] == \'prefitted\':\n model_file = inputs[\'model_inputter\'][\'fitted_estimator\']\n with open(model_file, \'rb\') as model_handler:\n- fitted_estimator = SafePickler.load(model_handler)\n+ fitted_estimator = load_model(model_handler)\n new_selector = selector(fitted_estimator, prefit=True, **options)\n else:\n estimator_json = inputs[\'model_inputter\']["estimator_selector"]\n@@ -108,14 +106,14 @@\n new_selector = selector(estimator, **options)\n \n elif inputs[\'se'..b't_type = params["selected_tasks"]["selected_algorithms"]["input_options"]["selected_input"]\n- if input_type=="tabular":\n+ if input_type == "tabular":\n header = \'infer\' if params["selected_tasks"]["selected_algorithms"]["input_options"]["header1"] else None\n column_option = params["selected_tasks"]["selected_algorithms"]["input_options"]["column_selector_options_1"]["selected_column_selector_option"]\n if column_option in ["by_index_number", "all_but_by_index_number", "by_header_name", "all_but_by_header_name"]:\n@@ -140,8 +138,8 @@\n c = None\n X = read_columns(\n file1,\n- c = c,\n- c_option = column_option,\n+ c=c,\n+ c_option=column_option,\n sep=\'\\t\',\n header=header,\n parse_dates=True\n@@ -157,13 +155,13 @@\n c = None\n y = read_columns(\n file2,\n- c = c,\n- c_option = column_option,\n+ c=c,\n+ c_option=column_option,\n sep=\'\\t\',\n header=header,\n parse_dates=True\n )\n- y=y.ravel()\n+ y = y.ravel()\n return X, y\n \n \n@@ -197,14 +195,14 @@\n \'randn\', \'random\', \'random_integers\', \'random_sample\', \'ranf\', \'rayleigh\',\n \'sample\', \'seed\', \'set_state\', \'shuffle\', \'standard_cauchy\', \'standard_exponential\',\n \'standard_gamma\', \'standard_normal\', \'standard_t\', \'triangular\', \'uniform\',\n- \'vonmises\', \'wald\', \'weibull\', \'zipf\' ]\n+ \'vonmises\', \'wald\', \'weibull\', \'zipf\']\n for f in from_numpy_random:\n syms[\'np_random_\' + f] = getattr(np.random, f)\n \n for key in unwanted:\n syms.pop(key, None)\n \n- super(SafeEval, self).__init__( symtable=syms, use_numpy=False, minimal=False,\n+ super(SafeEval, self).__init__(symtable=syms, use_numpy=False, minimal=False,\n no_if=True, no_for=True, no_while=True, no_try=True,\n no_functiondef=True, no_ifexp=True, no_listcomp=False,\n no_augassign=False, no_assert=True, no_delete=True,\n@@ -250,10 +248,10 @@\n try:\n params = safe_eval(\'dict(\' + estimator_params + \')\')\n except ValueError:\n- sys.exit("Unsupported parameter input: `%s`" %estimator_params)\n+ sys.exit("Unsupported parameter input: `%s`" % estimator_params)\n estimator.set_params(**params)\n if \'n_jobs\' in estimator.get_params():\n- estimator.set_params( n_jobs=N_JOBS )\n+ estimator.set_params(n_jobs=N_JOBS)\n \n return estimator\n \n@@ -266,10 +264,10 @@\n return int(literal)\n m = re.match(r\'^(?P<method>\\w+)\\((?P<args>.*)\\)$\', literal)\n if m:\n- my_class = getattr( model_selection, m.group(\'method\') )\n- args = safe_eval( \'dict(\'+ m.group(\'args\') + \')\' )\n- return my_class( **args )\n- sys.exit("Unsupported CV input: %s" %literal)\n+ my_class = getattr(model_selection, m.group(\'method\'))\n+ args = safe_eval(\'dict(\'+ m.group(\'args\') + \')\')\n+ return my_class(**args)\n+ sys.exit("Unsupported CV input: %s" % literal)\n \n \n def get_scoring(scoring_json):\n@@ -293,11 +291,10 @@\n if scoring_json[\'secondary_scoring\'] != \'None\'\\\n and scoring_json[\'secondary_scoring\'] != scoring_json[\'primary_scoring\']:\n scoring = {}\n- scoring[\'primary\'] = my_scorers[ scoring_json[\'primary_scoring\'] ]\n+ scoring[\'primary\'] = my_scorers[scoring_json[\'primary_scoring\']]\n for scorer in scoring_json[\'secondary_scoring\'].split(\',\'):\n if scorer != scoring_json[\'primary_scoring\']:\n scoring[scorer] = my_scorers[scorer]\n return scoring\n \n- return my_scorers[ scoring_json[\'primary_scoring\'] ]\n-\n+ return my_scorers[scoring_json[\'primary_scoring\']]\n' |