Repository 'blockclust_workflow'
hg clone https://toolshed.g2.bx.psu.edu/repos/rnateam/blockclust_workflow

Changeset 0:b2040467dad2 (2014-05-06)
Next changeset 1:25afbac62e0d (2014-05-07)
Commit message:
Uploaded
added:
blockclust_workflow.ga
readme.rst
repository_dependencies.xml
b
diff -r 000000000000 -r b2040467dad2 blockclust_workflow.ga
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/blockclust_workflow.ga Tue May 06 12:51:51 2014 -0400
[
b'@@ -0,0 +1,461 @@\n+{\n+    "a_galaxy_workflow": "true", \n+    "annotation": "", \n+    "format-version": "0.1", \n+    "name": "BlockClustWorkflow", \n+    "steps": {\n+        "0": {\n+            "annotation": "", \n+            "id": 0, \n+            "input_connections": {}, \n+            "inputs": [\n+                {\n+                    "description": "", \n+                    "name": "Input BAM file"\n+                }\n+            ], \n+            "name": "Input dataset", \n+            "outputs": [], \n+            "position": {\n+                "left": 267, \n+                "top": 257.5\n+            }, \n+            "tool_errors": null, \n+            "tool_id": null, \n+            "tool_state": "{\\"name\\": \\"Input BAM file\\"}", \n+            "tool_version": null, \n+            "type": "data_input", \n+            "user_outputs": []\n+        }, \n+        "1": {\n+            "annotation": "", \n+            "id": 1, \n+            "input_connections": {\n+                "tool_mode|reads_bam": {\n+                    "id": 0, \n+                    "output_name": "output"\n+                }\n+            }, \n+            "inputs": [], \n+            "name": "BlockClust", \n+            "outputs": [\n+                {\n+                    "name": "tags_bed", \n+                    "type": "bed"\n+                }, \n+                {\n+                    "name": "hclust_plot", \n+                    "type": "pdf"\n+                }, \n+                {\n+                    "name": "clusters", \n+                    "type": "bed"\n+                }, \n+                {\n+                    "name": "model_based_pred_bed", \n+                    "type": "bed"\n+                }, \n+                {\n+                    "name": "nearest_neighbour_pred_bed", \n+                    "type": "bed"\n+                }, \n+                {\n+                    "name": "sim_tab_out", \n+                    "type": "tabular"\n+                }, \n+                {\n+                    "name": "cluster_dist", \n+                    "type": "pdf"\n+                }, \n+                {\n+                    "name": "cluster_hclust", \n+                    "type": "pdf"\n+                }\n+            ], \n+            "position": {\n+                "left": 524, \n+                "top": 323\n+            }, \n+            "post_job_actions": {\n+                "HideDatasetActioncluster_dist": {\n+                    "action_arguments": {}, \n+                    "action_type": "HideDatasetAction", \n+                    "output_name": "cluster_dist"\n+                }, \n+                "HideDatasetActioncluster_hclust": {\n+                    "action_arguments": {}, \n+                    "action_type": "HideDatasetAction", \n+                    "output_name": "cluster_hclust"\n+                }, \n+                "HideDatasetActionclusters": {\n+                    "action_arguments": {}, \n+                    "action_type": "HideDatasetAction", \n+                    "output_name": "clusters"\n+                }, \n+                "HideDatasetActionhclust_plot": {\n+                    "action_arguments": {}, \n+                    "action_type": "HideDatasetAction", \n+                    "output_name": "hclust_plot"\n+                }, \n+                "HideDatasetActionmodel_based_pred_bed": {\n+                    "action_arguments": {}, \n+                    "action_type": "HideDatasetAction", \n+                    "output_name": "model_based_pred_bed"\n+                }, \n+                "HideDatasetActionnearest_neighbour_pred_bed": {\n+                    "action_arguments": {}, \n+                    "action_type": "HideDatasetAction", \n+                    "output_name": "nearest_neighbour_pred_bed"\n+                }, \n+                "HideDatasetActionsim_tab_out": {\n+                    "action_arguments": {}, \n+                    "action_type": "HideDatasetAction", \n+                    "output_name": "sim_tab_out"\n+                }, \n+                "H'..b' \n+            "tool_version": "1.1.0.2", \n+            "type": "tool", \n+            "user_outputs": []\n+        }, \n+        "7": {\n+            "annotation": "", \n+            "id": 7, \n+            "input_connections": {\n+                "tool_mode|clusters_bed": {\n+                    "id": 4, \n+                    "output_name": "clusters"\n+                }, \n+                "tool_mode|cmsearch_out": {\n+                    "id": 6, \n+                    "output_name": "outfile"\n+                }, \n+                "tool_mode|sim_tab_in": {\n+                    "id": 4, \n+                    "output_name": "sim_tab_out"\n+                }\n+            }, \n+            "inputs": [], \n+            "name": "BlockClust", \n+            "outputs": [\n+                {\n+                    "name": "tags_bed", \n+                    "type": "bed"\n+                }, \n+                {\n+                    "name": "hclust_plot", \n+                    "type": "pdf"\n+                }, \n+                {\n+                    "name": "clusters", \n+                    "type": "bed"\n+                }, \n+                {\n+                    "name": "model_based_pred_bed", \n+                    "type": "bed"\n+                }, \n+                {\n+                    "name": "nearest_neighbour_pred_bed", \n+                    "type": "bed"\n+                }, \n+                {\n+                    "name": "sim_tab_out", \n+                    "type": "tabular"\n+                }, \n+                {\n+                    "name": "cluster_dist", \n+                    "type": "pdf"\n+                }, \n+                {\n+                    "name": "cluster_hclust", \n+                    "type": "pdf"\n+                }\n+            ], \n+            "position": {\n+                "left": 2049, \n+                "top": 433\n+            }, \n+            "post_job_actions": {\n+                "HideDatasetActionclusters": {\n+                    "action_arguments": {}, \n+                    "action_type": "HideDatasetAction", \n+                    "output_name": "clusters"\n+                }, \n+                "HideDatasetActionhclust_plot": {\n+                    "action_arguments": {}, \n+                    "action_type": "HideDatasetAction", \n+                    "output_name": "hclust_plot"\n+                }, \n+                "HideDatasetActionmodel_based_pred_bed": {\n+                    "action_arguments": {}, \n+                    "action_type": "HideDatasetAction", \n+                    "output_name": "model_based_pred_bed"\n+                }, \n+                "HideDatasetActionnearest_neighbour_pred_bed": {\n+                    "action_arguments": {}, \n+                    "action_type": "HideDatasetAction", \n+                    "output_name": "nearest_neighbour_pred_bed"\n+                }, \n+                "HideDatasetActionsim_tab_out": {\n+                    "action_arguments": {}, \n+                    "action_type": "HideDatasetAction", \n+                    "output_name": "sim_tab_out"\n+                }, \n+                "HideDatasetActiontags_bed": {\n+                    "action_arguments": {}, \n+                    "action_type": "HideDatasetAction", \n+                    "output_name": "tags_bed"\n+                }\n+            }, \n+            "tool_errors": null, \n+            "tool_id": "testtoolshed.g2.bx.psu.edu/repos/rnateam/blockclust/blockclust/1.0.0", \n+            "tool_state": "{\\"tool_mode\\": \\"{\\\\\\"cmsearch_out\\\\\\": null, \\\\\\"operation\\\\\\": \\\\\\"post\\\\\\", \\\\\\"clusters_bed\\\\\\": null, \\\\\\"__current_case__\\\\\\": 2, \\\\\\"sim_tab_in\\\\\\": null}\\", \\"__rerun_remap_job_id__\\": null, \\"mode\\": \\"{\\\\\\"cmsearch_out\\\\\\": null, \\\\\\"operation\\\\\\": \\\\\\"post\\\\\\", \\\\\\"clusters_bed\\\\\\": null, \\\\\\"__current_case__\\\\\\": 2, \\\\\\"sim_tab_in\\\\\\": null}\\", \\"__page__\\": 0}", \n+            "tool_version": "1.0.0", \n+            "type": "tool", \n+            "user_outputs": []\n+        }\n+    }\n+}\n\\ No newline at end of file\n'
b
diff -r 000000000000 -r b2040467dad2 readme.rst
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/readme.rst Tue May 06 12:51:51 2014 -0400
b
b"@@ -0,0 +1,278 @@\n+\n+\n+This package is a Galaxy workflow for BlockClust pipeline.\n+\n+\n+======\n+Galaxy\n+======\n+\n+`Galaxy <http://galaxyproject.org/>`_ is an open, web-based platform for data intensive research.\n+All tools can be combined in workflows without any need of programming skills. \n+Furthermore the platform can be extended with more tools at any time.\n+Each tool has its own information about what it does and how the input is supposed to look like.\n+You can make data available for Galaxy by uploading local files or downloading online content.\n+Inputfiles, workflowsteps and results are stored in a history where you can view them or reaccess them later.\n+It is possible to share workflows and histories with other users or make the public available.\n+Saved workflows can be used with new input files or just to rerun an analyses which ensures repeatability.\n+\n+\n+\n+Getting Started\n+===============\n+\n+BlockClust can be installed on all common Unix systems. \n+However, it is developed on Linux and I don't have access to OS X. You are welcome to help improving this documentation, just contact_ me.\n+\n+For any additional information, especially cluster configuration or general Galaxy_ questions, \n+please have a look at the Galaxy Wiki.\n+\n+- http://wiki.galaxyproject.org/\n+\n+- http://wiki.galaxyproject.org/Admin/\n+\n+- http://galaxyproject.org/search/web/\n+\n+.. _contact: https://github.com/bgruening\n+.. _Galaxy: http://galaxyproject.org/\n+\n+Prerequisites::\n+\n+* Python 2.6 or 2.7\n+* standard C compiler, C++ and Fortran compiler\n+* Autotools\n+* CMake\n+* cairo development files (used for PNG depictions)\n+* python development files\n+* Java Runtime Environment (JRE, used by OPSIN and NPLS)\n+\n+To install all of the prerequisites you can run the following command, depending on your OS:\n+\n+- Debian based systems: apt-get install build-essential gfortran cmake mercurial libcairo2-dev python-dev\n+- Fedora: yum install make automake gcc gcc-c++ gcc-gfortran cmake mercurial libcairo2-devel python-devel\n+- OS X (MacPorts_): port install gcc cmake automake mercurial cairo-devel\n+\n+.. _MacPorts: http://www.macports.org/\n+\n+\n+===================\n+Galaxy installation\n+===================\n+\n+\n+0. Create a sand-boxed Python using virtualenv_ (not necessary but recommended)::\n+\n+        wget https://raw.github.com/pypa/virtualenv/master/virtualenv.py\n+\tpython ./virtualenv.py --no-site-packages galaxy_env\n+\t. ./galaxy_env/bin/activate\n+\n+.. _virtualenv: http://www.virtualenv.org/\n+\n+\n+1. Clone the latest `Galaxy platform`_::\n+\n+\thg clone https://bitbucket.org/galaxy/galaxy-central/\n+\n+.. _Galaxy platform: http://wiki.galaxyproject.org/Admin/Get%20Galaxy\n+\n+2. Navigate to the galaxy-central folder and update it::\n+\t\n+\tcd ~/galaxy-central\n+\thg pull\n+\thg update\n+   \n+   This step is not necessary if you have a fresh checkout. Anyway, it is good to know ;)\n+\n+3. Create folders for toolshed and dependencies::\n+\n+\tmkdir ~/shed_tools\n+\tmkdir ~/galaxy-central/tool_deps\n+\n+4. Create configuration file::\n+\n+\tcp ~/galaxy-central/universe_wsgi.ini.sample ~/galaxy-central/universe_wsgi.ini\n+\n+5. Open universe_wsgi.ini and change the dependencies directory::\n+\n+\tLINUX: gedit ~/galaxy-central/universe_wsgi.ini\n+\tOS X: open -a TextEdit ~/galaxy-central/universe_wsgi.ini\n+\n+6. Search for ``tool_dependency_dir = None`` and change it to ``tool_dependency_dir = ./tool_deps``, remove the ``#`` if needed\n+\n+7. Remove the ``#`` in front of ``tool_config_file`` and ``tool_path``\n+\n+8. (Re-)Start the galaxy daemon::\n+\n+\tsh run.sh --reload\n+\t\n+   In deamon mode all logs will be written to main.log in your Galaxy Home directory. You can also use::\n+   \n+\trun.sh   \n+\n+   During the first startup Galaxy will prepare your database. That can take some time. Have a look at the log file if you want to know what happens.\n+\n+After launching galaxy is accessible via the browser at ``http://localhost:8080/``.\n+\n+\n+\n+=======================\n+Tool Shed configuration\n+=======================\n"..b"lysis  > blockclust_workflow\n+- install chemicaltoolbox\n+\n+.. _admin page: http://localhost:8080/admin\n+\n+\n+\n+===============\n+Troubleshooting\n+===============\n+\n+If you have any trouble or the installation did not finish properly, do not hesitate to contact me. However, if the \n+installation fails during the Galaxy installation, you can have a look at the `Galaxy wiki`_. If the ChemicalToolBoX installation fails, \n+you can try to run::\n+\n+\tpython ./scripts/api/repair_tool_shed_repository.py --api YOUR_API_KEY -l http://localhost:8080 --url http://toolshed.g2.bx.psu.edu/ -o rnateam -r e9b2400cc569 --name blockclust_workflow\n+\n+That will rerun all failed installation routines. Alternatively, you can navigate to the ChemicalToolBoX repository in \n+your browser and repair manually: \n+Top Panel \xe2\x86\x92 Admin \xe2\x86\x92 Manage installed tool shed repositories \xe2\x86\x92 chemicaltoolbox \xe2\x86\x92 Repository Actions \xe2\x86\x92 Repair repository\n+\n+------\n+\n+\n+On slow computers and during the compilation of large software libraries, like R, \n+the Tool Shed can run into a timeout and kills the installation.\n+That problem is known and should be fixed in the near future.\n+\n+If you encouter a timeout or 'hung' during the installation you can increase the ``threadpool_kill_thread_limit`` in your universe_wsgi.ini file.\n+\n+\n+------\n+\n+**Database locking errors**\n+\n+Please note that Galaxy per default uses a SQLite database. Sqlite is not intended for production use. \n+With multiple users or complex components, like that workflow, you will see database locking errors. \n+We highly recommend to use PostgreSQL for any kind of production system.\n+\n+\n+.. _Galaxy wiki: http://wiki.galaxyproject.org/\n+\n+\n+Workflows\n+=========\n+\n+An example workflow is located in the `Tool Shed`::\n+\n+\t  http://testtoolshed.g2.bx.psu.edu/view/rnateam/blockclust_workflow\n+\n+You can install the workflow with the API::\n+\n+\tpython ./scripts/api/install_tool_shed_repositories.py --api YOUR_API_KEY -l http://localhost:8080 --url http://toolshed.g2.bx.psu.edu/ -o rnateam -r e9b2400cc569 --name blockclust_workflow --tool-deps --repository-deps --panel-section-name BlockClust\n+\n+or as described above via webbrowser. You have now successfully installed the workflow, \n+to import it to all your users you need to go to the admin panel, choose the worklow and import it.\n+For more information have a look at the Galaxy wiki::\n+\n+\thttp://wiki.galaxyproject.org/ToolShedWorkflowSharing#Finding_workflows_in_tool_shed_repositories\n+\n+Please **note** that Galaxy per default uses a SQLite database. Sqlite is not intended for production use. \n+With multiple users or complex components, like that workflow, you will see database locking errors. \n+We highly recommend to use PostgreSQL for any kind of production system.\n+\n+\n+\n+Sample Data\n+===========\n+\n+\n+\n+Citation\n+========\n+\n+If you use this workflow directly, or a derivative of it, or the associated\n+wrappers for Galaxy, in work leading to a scientific publication,\n+please cite:\n+\n+P. Videm  at al...\n+\n+\n+\n+Additional References\n+=====================\n+\n+\n+\n+Availability\n+============\n+\n+This workflow is available on the main Galaxy Tool Shed:\n+\n+ http://testtoolshed.g2.bx.psu.edu/view/rnateam/blockclust_workflow \n+\n+Development is being done on github:\n+\n+https://github.com/bgruening/galaxytools/tree/master/workflows/blockclust\n+\n+\n+Dependencies\n+============\n+\n+These dependencies should be resolved automatically via the Galaxy Tool Shed:\n+\n+* http://testtoolshed.g2.bx.psu.edu/view/iuc/package_samtools_0_1_19 \n+* http://testtoolshed.g2.bx.psu.edu/view/iuc/package_r_3_0_1\n+* http://testtoolshed.g2.bx.psu.edu/view/rnateam/package_segemehl_0_1_6 \n+* http://testtoolshed.g2.bx.psu.edu/view/iuc/msa_datatypes \n+* http://testtoolshed.g2.bx.psu.edu/view/iuc/package_infernal_1_1rc4 \n+* http://testtoolshed.g2.bx.psu.edu/view/rnateam/blockbuster \n+* http://testtoolshed.g2.bx.psu.edu/view/bgruening/package_eden_1_1\n+* http://testtoolshed.g2.bx.psu.edu/view/iuc/package_mcl_12_135 \n"
b
diff -r 000000000000 -r b2040467dad2 repository_dependencies.xml
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/repository_dependencies.xml Tue May 06 12:51:51 2014 -0400
b
@@ -0,0 +1,8 @@
+<?xml version="1.0"?>
+<repositories description="This workflow requires a number of different repositories.">
+    <repository changeset_revision="768a67f6ae18" name="blockclust" owner="rnateam" toolshed="http://toolshed.g2.bx.psu.edu" />
+    <repository changeset_revision="e97db054a88d" name="segemehl" owner="rnateam" toolshed="http://toolshed.g2.bx.psu.edu" />
+    <repository changeset_revision="6ae946589f04" name="blockbuster" owner="rnateam" toolshed="http://toolshed.g2.bx.psu.edu" />
+    <repository changeset_revision="55bb96edfc07" name="infernal" owner="bgruening" toolshed="http://toolshed.g2.bx.psu.edu" />
+    <!--<repository name="graphclust" owner="rnateam" />-->
+</repositories>