Repository 'tool_factory_2'
hg clone https://toolshed.g2.bx.psu.edu/repos/fubar/tool_factory_2

Changeset 41:f8c1694190f0 (2020-08-16)
Previous changeset 40:51fa77152988 (2020-08-13) Next changeset 42:b938475235e3 (2020-08-16)
Commit message:
Uploaded
modified:
toolfactory/README.md
added:
toolfactory/.github/workflows/commit.yml
toolfactory/.gitignore
toolfactory/.shed.yml
toolfactory/TF_example_wf.ga
toolfactory/docker/Dockerfile
toolfactory/docker/TF_example_wf.ga
toolfactory/docker/TF_example_wf2.ga
toolfactory/docker/dockerfile.seq
toolfactory/docker/my_tool_list.yml
toolfactory/docker/post-start-actions.sh
toolfactory/docker/startgaldock.sh
toolfactory/docker/startup
toolfactory/docker/startup.sh
toolfactory/html_dir.py
removed:
toolfactory/tftesthistory.tar.gz
b
diff -r 51fa77152988 -r f8c1694190f0 toolfactory/.github/workflows/commit.yml
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/toolfactory/.github/workflows/commit.yml Sun Aug 16 08:11:10 2020 -0400
[
@@ -0,0 +1,68 @@
+name: Galaxy Tool Linting and Tests for PR
+# run planemo on a git repository containing a single tool
+# as a github action. Does NOT run flake8. So, bite me.
+# ross lazarus august 2020
+on: [pull_request,push]
+env:
+  GALAXY_REPO: https://github.com/galaxyproject/galaxy
+  GALAXY_RELEASE: release_20.05
+jobs:
+  setup:
+    name: setup environment and python
+    runs-on: ubuntu-latest
+    strategy:
+      matrix:
+        python-version: [3.7]
+    steps:
+    - name: Print github context properties
+      run: |
+        echo 'event: ${{ github.event_name }}'
+        echo 'sha: ${{ github.sha }}'
+        echo 'ref: ${{ github.ref }}'
+        echo 'head_ref: ${{ github.head_ref }}'
+        echo 'base_ref: ${{ github.base_ref }}'
+        echo 'event.before: ${{ github.event.before }}'
+        echo 'event.after: ${{ github.event.after }}'
+    - uses: actions/setup-python@v1
+      with:
+        python-version: ${{ matrix.python-version }}
+    - uses: actions/checkout@v2
+      with:
+    # planemo does not seem to want to install the requirement galaxyxml 
+    # into the venv it manages at tool testing so do it the old skool way
+        repository: 'galaxyproject/galaxy'
+        path: 'galaxy'
+    - name: make venv ready for this galaxy and planemo 
+      run:  |
+        python3 -m venv $GITHUB_WORKSPACE/galaxy/.venv
+        . $GITHUB_WORKSPACE/galaxy/.venv/bin/activate
+        pip install --upgrade pip
+        pip install wheel
+        pip install -r $GITHUB_WORKSPACE/galaxy/requirements.txt
+        # pip install galaxyxml # currently includes a patched working version awaiting PR merge
+    - name: Upgrade pip
+      run: pip install --upgrade pip
+    # Install the `wheel` package so that when installing other packages which
+    # are not available as wheels, pip will build a wheel for them, which can be cached.
+    - name: Install wheel
+      run: pip install wheel
+    - name: Install Planemo and flake8
+      run: pip install planemo flake8 flake8-import-order
+    # galaxyxml temporarily removed until PR accepted
+    - uses: actions/checkout@v2
+      with:
+        fetch-depth: 1
+    - name: flake8
+      run: flake8 --ignore=E2,E3,E4,E5,W3,W505
+    - name: Planemo lint
+      run: planemo lint .
+    - name: Planemo test tool
+      run: planemo test --galaxy_root $GITHUB_WORKSPACE/galaxy --test_output tool_test_output.html --skip_venv --test_output_json tool_test_output.json --galaxy_python_version ${{ matrix.python-version }}  .
+    - name: Copy artifacts into place
+      run: |
+        mkdir upload
+        mv tool_test_output.json tool_test_output.html upload/
+    - uses: actions/upload-artifact@v2.0.1
+      with:
+        name: 'All tool test results'
+        path: upload
b
diff -r 51fa77152988 -r f8c1694190f0 toolfactory/.gitignore
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/toolfactory/.gitignore Sun Aug 16 08:11:10 2020 -0400
[
@@ -0,0 +1,129 @@
+# Byte-compiled / optimized / DLL files
+__pycache__/
+*.py[cod]
+*$py.class
+
+# C extensions
+*.so
+
+# Distribution / packaging
+.Python
+build/
+develop-eggs/
+dist/
+downloads/
+eggs/
+.eggs/
+lib/
+lib64/
+parts/
+sdist/
+var/
+wheels/
+pip-wheel-metadata/
+share/python-wheels/
+*.egg-info/
+.installed.cfg
+*.egg
+MANIFEST
+
+# PyInstaller
+#  Usually these files are written by a python script from a template
+#  before PyInstaller builds the exe, so as to inject date/other infos into it.
+*.manifest
+*.spec
+
+# Installer logs
+pip-log.txt
+pip-delete-this-directory.txt
+
+# Unit test / coverage reports
+htmlcov/
+.tox/
+.nox/
+.coverage
+.coverage.*
+.cache
+nosetests.xml
+coverage.xml
+*.cover
+*.py,cover
+.hypothesis/
+.pytest_cache/
+
+# Translations
+*.mo
+*.pot
+
+# Django stuff:
+*.log
+local_settings.py
+db.sqlite3
+db.sqlite3-journal
+
+# Flask stuff:
+instance/
+.webassets-cache
+
+# Scrapy stuff:
+.scrapy
+
+# Sphinx documentation
+docs/_build/
+
+# PyBuilder
+target/
+
+# Jupyter Notebook
+.ipynb_checkpoints
+
+# IPython
+profile_default/
+ipython_config.py
+
+# pyenv
+.python-version
+
+# pipenv
+#   According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
+#   However, in case of collaboration, if having platform-specific dependencies or dependencies
+#   having no cross-platform support, pipenv may install dependencies that don't work, or not
+#   install all needed dependencies.
+#Pipfile.lock
+
+# PEP 582; used by e.g. github.com/David-OConnor/pyflow
+__pypackages__/
+
+# Celery stuff
+celerybeat-schedule
+celerybeat.pid
+
+# SageMath parsed files
+*.sage.py
+
+# Environments
+.env
+.venv
+env/
+venv/
+ENV/
+env.bak/
+venv.bak/
+
+# Spyder project settings
+.spyderproject
+.spyproject
+
+# Rope project settings
+.ropeproject
+
+# mkdocs documentation
+/site
+
+# mypy
+.mypy_cache/
+.dmypy.json
+dmypy.json
+
+# Pyre type checker
+.pyre/
b
diff -r 51fa77152988 -r f8c1694190f0 toolfactory/.shed.yml
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/toolfactory/.shed.yml Sun Aug 16 08:11:10 2020 -0400
b
@@ -0,0 +1,13 @@
+name: toolfactory
+owner: fubar
+description: ToolFactory - tool to make Galaxy tools ready for the toolshed
+homepage_url: https://github.com/fubar2/toolfactory
+long_description: |
+    ToolFactory - turn executable packages and R/python/perl/bash scripts into ordinary Galaxy tools
+
+    Creating re-usable tools from scripts: The Galaxy Tool Factory Ross Lazarus; Antony Kaspi; Mark Ziemann; The Galaxy Team 
+    Bioinformatics 2012; doi: 10.1093/bioinformatics/bts573
+remote_repository_url: https://github.com/fubar2/toolfactory
+type: tool_dependency_definition
+categories:
+- Tool Generators
b
diff -r 51fa77152988 -r f8c1694190f0 toolfactory/README.md
--- a/toolfactory/README.md Thu Aug 13 02:55:26 2020 -0400
+++ b/toolfactory/README.md Sun Aug 16 08:11:10 2020 -0400
[
@@ -1,27 +1,11 @@
 Note as at August 8 2020
 
-Until a PR is accepted, galaxyxml requires a small tweak - you will need to install
-```
-https://github.com/fubar2/galaxyxml
-```
-instead into the virtualenv used to run galaxy and when using planemo, the
-
-```
---skip_venv 
-```
-
-flag must be used.
-
 
 *WARNING before you start*
 
  Install this tool on a private Galaxy ONLY
  Please NEVER on a public or production instance
  
-Updated august 2014 by John Chilton adding citation support
-
-Updated august 8 2014 to fix bugs reported by Marius van den Beek
-
 Please cite the resource at
 http://bioinformatics.oxfordjournals.org/cgi/reprint/bts573?ijkey=lczQh1sWrMwdYWJ&keytype=ref
 if you use this tool in your published work.
@@ -38,7 +22,6 @@
 
 It can be turned into an ordinary Galaxy tool in minutes, using a Galaxy tool.
 
-
 **Automated generation of new Galaxy tools for installation into any Galaxy**
 
 A test is generated using small sample test data inputs and parameter settings you supply.
@@ -71,11 +54,6 @@
 *tgz* - as the name suggests, it's an archive ready to upload to a
 Galaxy ToolShed as a new tool repository.
 
-Until tgz is added to the galaxy distribution datatypes you will need to add the following:
-    <datatype extension="tgz" type="galaxy.datatypes.binary:Binary" subclass="true" mimetype="multipart/x-gzip" display_in_upload="true" />
-
-to::
- [galaxyroot]/config/datatypes_conf.xml
 
 Once it's in a ToolShed, it can be installed into any local Galaxy server
 from the server administrative interface.
@@ -129,7 +107,7 @@
 **What it does** 
 
 This is a tool factory for simple scripts in python, R and
-perl currently. Functional tests are automatically generated. How cool is that.
+perl currently. Functional tests are automatically generated. 
 
 LIMITED to simple scripts that read one input from the history. Optionally can
 write one new history dataset, and optionally collect any number of outputs
b
diff -r 51fa77152988 -r f8c1694190f0 toolfactory/TF_example_wf.ga
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/toolfactory/TF_example_wf.ga Sun Aug 16 08:11:10 2020 -0400
[
b'@@ -0,0 +1,458 @@\n+{\n+    "a_galaxy_workflow": "true",\n+    "annotation": "",\n+    "format-version": "0.1",\n+    "name": "TF examples",\n+    "steps": {\n+        "0": {\n+            "annotation": "a fasta file - gc ratio will be estimated",\n+            "content_id": null,\n+            "errors": null,\n+            "id": 0,\n+            "input_connections": {},\n+            "inputs": [\n+                {\n+                    "description": "a fasta file - gc ratio will be estimated",\n+                    "name": "phiX.fasta"\n+                }\n+            ],\n+            "label": "phiX.fasta",\n+            "name": "Input dataset",\n+            "outputs": [],\n+            "position": {\n+                "bottom": 231,\n+                "height": 61,\n+                "left": 393,\n+                "right": 593,\n+                "top": 170,\n+                "width": 200,\n+                "x": 393,\n+                "y": 170\n+            },\n+            "tool_id": null,\n+            "tool_state": "{\\"optional\\": false}",\n+            "tool_version": null,\n+            "type": "data_input",\n+            "uuid": "7e2e24c8-2327-4893-a5b3-6b696a6ecd33",\n+            "workflow_outputs": [\n+                {\n+                    "label": null,\n+                    "output_name": "output",\n+                    "uuid": "f5ce2386-b80c-4691-a0a7-abeec8854461"\n+                }\n+            ]\n+        },\n+        "1": {\n+            "annotation": "Any text file to be reversed lots of times and bjorked once.",\n+            "content_id": null,\n+            "errors": null,\n+            "id": 1,\n+            "input_connections": {},\n+            "inputs": [\n+                {\n+                    "description": "Any text file to be reversed lots of times and bjorked once.",\n+                    "name": "rgToolFactory2.py"\n+                }\n+            ],\n+            "label": "rgToolFactory2.py",\n+            "name": "Input dataset",\n+            "outputs": [],\n+            "position": {\n+                "bottom": 371,\n+                "height": 81,\n+                "left": 393,\n+                "right": 593,\n+                "top": 290,\n+                "width": 200,\n+                "x": 393,\n+                "y": 290\n+            },\n+            "tool_id": null,\n+            "tool_state": "{\\"optional\\": false}",\n+            "tool_version": null,\n+            "type": "data_input",\n+            "uuid": "f530b390-2424-4aae-8bd9-dd9d30277561",\n+            "workflow_outputs": [\n+                {\n+                    "label": null,\n+                    "output_name": "output",\n+                    "uuid": "3d2a1699-50af-46e1-8981-dc6c3de2cf6c"\n+                }\n+            ]\n+        },\n+        "2": {\n+            "annotation": "",\n+            "content_id": "rgTF2",\n+            "errors": null,\n+            "id": 2,\n+            "input_connections": {\n+                "ppass|history_inputs_0|input_files": {\n+                    "id": 0,\n+                    "output_name": "output"\n+                }\n+            },\n+            "inputs": [],\n+            "label": null,\n+            "name": "toolfactory",\n+            "outputs": [\n+                {\n+                    "name": "TF_run_report",\n+                    "type": "input"\n+                },\n+                {\n+                    "name": "new_tool",\n+                    "type": "tgz"\n+                }\n+            ],\n+            "position": {\n+                "bottom": 372,\n+                "height": 202,\n+                "left": 613,\n+                "right": 813,\n+                "top": 170,\n+                "width": 200,\n+                "x": 613,\n+                "y": 170\n+            },\n+            "post_job_actions": {},\n+            "tool_id": "rgTF2",\n+            "tool_state": "{\\"__input_ext\\": \\"input\\", \\"chromInfo\\": \\"/home/ross/galaxy/tool-data/shared/ucsc/chrom/?.len\\", \\"interexe\\": {\\"interpreter\\": \\"perl\\", \\"__current_case__\\": 4, \\"interpreter_version\\": \\"\\'..b'\n+                "height": 242,\n+                "left": 833,\n+                "right": 1033,\n+                "top": 170,\n+                "width": 200,\n+                "x": 833,\n+                "y": 170\n+            },\n+            "post_job_actions": {},\n+            "tool_id": "rgTF2",\n+            "tool_state": "{\\"__input_ext\\": \\"input\\", \\"chromInfo\\": \\"/home/ross/galaxy/tool-data/shared/ucsc/chrom/?.len\\", \\"interexe\\": {\\"interpreter\\": \\"python\\", \\"__current_case__\\": 2, \\"interpreter_version\\": \\"\\", \\"exe_package_version\\": \\"\\", \\"dynScript\\": \\"import argparse\\\\nimport tarfile\\\\nimport os\\\\nimport tempfile\\\\nimport subprocess\\\\n\\\\n\\\\\\"\\\\\\"\\\\\\"\\\\nplanemo test --no_cleanup --no_dependency_resolution --skip_venv --galaxy_root ~/galaxy ~/galaxy/tools/tool_makers/pyrevargparse/ &> pyrevargparse\\\\n\\\\\\"\\\\\\"\\\\\\"\\\\n\\\\nparser = argparse.ArgumentParser()\\\\na = parser.add_argument\\\\na(\'--tooltgz\',default=\'\')\\\\na(\'--report\',default=None)\\\\na(\'--toolout\',default=None)\\\\na(\'--galaxy_root\',default=None)\\\\nargs = parser.parse_args()\\\\ntoolname = args.toolout.split(os.sep)[-1]\\\\ntoolpath = os.path.join(args.galaxy_root,args.toolout)\\\\ntf = tarfile.open(args.tooltgz,\\\\\\"r:gz\\\\\\")\\\\ntf.extractall(toolpath)\\\\ncl = \\\\\\"planemo test --skip_venv --galaxy_root %s %s\\\\\\" % (args.galaxy_root,toolpath)\\\\ncll = cl.split(\' \')\\\\nsto = open(args.report, \'w\')\\\\np = subprocess.run(cll, shell=False, stdout=sto)\\\\nretval = p.returncode\\\\nsto.close()\\\\n\\"}, \\"makeMode\\": {\\"make_Tool\\": \\"yes\\", \\"__current_case__\\": 0, \\"tool_version\\": \\"0.01\\", \\"tool_desc\\": \\"Tool to test toolshed tool archives generated by the tool factory.\\", \\"help_text\\": \\"**What it Does**\\\\n\\\\nGiven a toolshed tgz file generated by a tool factory run, this will unpack it and run planemo test, returning the planemo stdout as a report\\\\nIt was generated using the tool factory.\\", \\"citations\\": []}, \\"ppass\\": {\\"parampass\\": \\"argparse\\", \\"__current_case__\\": 0, \\"history_inputs\\": [{\\"__index__\\": 0, \\"input_files\\": {\\"__class__\\": \\"ConnectedValue\\"}, \\"input_formats\\": [\\"tgz\\"], \\"input_label\\": \\"tool toolshed tgz archive from history\\", \\"input_help\\": \\"Run planemo test on a tool shed tool archive tgz format file generated by the ToolFactory or Planemo\\", \\"input_CL\\": \\"tooltgz\\"}], \\"history_outputs\\": [{\\"__index__\\": 0, \\"history_name\\": \\"report\\", \\"history_format\\": \\"txt\\", \\"history_CL\\": \\"report\\"}], \\"edit_params\\": \\"yes\\", \\"additional_parameters\\": [{\\"__index__\\": 0, \\"param_name\\": \\"toolout\\", \\"param_type\\": \\"text\\", \\"param_value\\": \\"tools/toolmakers/planemotest\\", \\"param_label\\": \\"output path under galaxy root\\", \\"param_help\\": \\"This is where the tgz file will be extracted and tested by planemo\\", \\"param_CL\\": \\"toolout\\", \\"param_CLprefixed\\": \\"\\"}, {\\"__index__\\": 1, \\"param_name\\": \\"galaxy_root\\", \\"param_type\\": \\"text\\", \\"param_value\\": \\"/home/ross/galaxy\\", \\"param_label\\": \\"Galaxy source root directory to use for running planemo\\", \\"param_help\\": \\"This will form the galaxy_root parameter for rnning planemo using an existing Galaxy source tree, and the tgz will be extracted at a path relative to that rootu\\", \\"param_CL\\": \\"galaxy_root\\", \\"param_CLprefixed\\": \\"\\"}]}, \\"tool_name\\": \\"planemotest\\", \\"__page__\\": null, \\"__rerun_remap_job_id__\\": null}",\n+            "tool_version": "2.00",\n+            "type": "tool",\n+            "uuid": "b9bfb1a4-4c0c-4d39-9e74-223da72f8abc",\n+            "workflow_outputs": [\n+                {\n+                    "label": null,\n+                    "output_name": "TF_run_report",\n+                    "uuid": "09ba44ea-4da8-46f5-a411-ca054ccedd3b"\n+                },\n+                {\n+                    "label": null,\n+                    "output_name": "new_tool",\n+                    "uuid": "50a8ff4a-702a-4983-8202-8a79c0a3c978"\n+                }\n+            ]\n+        }\n+    },\n+    "tags": [],\n+    "uuid": "321a7f9f-c287-453c-807a-43afd948770e",\n+    "version": 1\n+}\n'
b
diff -r 51fa77152988 -r f8c1694190f0 toolfactory/docker/Dockerfile
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/toolfactory/docker/Dockerfile Sun Aug 16 08:11:10 2020 -0400
[
@@ -0,0 +1,46 @@
+# Galaxy - Toolshed Docker image
+# Derived from bgruening stable galaxy 
+# VERSION       0.2
+# patches startup so the below will install a workflow dropped into $GALAXY_ROOT/workflows
+# use docker run -p 8080:80  -d -e GALAXY_AUTO_UPDATE_WORKFLOWS=/galaxy-central/workflows/TF_sample_wf.ga -v /home/ubuntu/galaxy_storage/:/export/  toolfactory
+FROM bgruening/galaxy-stable
+MAINTAINER Ross Lazarus ross.lazarus@gmail.com
+
+ENV GALAXY_CONFIG_BRAND=ToolFactory
+ENV GALAXY_LOGGING="full"
+
+# RUN apt-get update -y && apt-get install -y build-essential gcc apt-utils
+# needed for planemo build
+
+RUN /galaxy_venv/bin/python -m pip install --upgrade pip
+# RUN /galaxy_venv/bin/python -m pip install planemo
+
+# RUN add-tool-shed --url 'http://testtoolshed.g2.bx.psu.edu/' --name 'Test Tool Shed'
+ADD my_tool_list.yml $GALAXY_ROOT/config/toolfactory_tools.yaml
+ENV GALAXY_AUTO_UPDATE_TOOLS=$GALAXY_ROOT/config/toolfactory_tools.yaml
+
+# Add workflows to the Docker image
+RUN mkdir -p $GALAXY_ROOT/workflows
+ADD TF_example_wf.ga $GALAXY_ROOT/workflows/TF_example_wf.ga
+ADD post-start-actions.sh /export/post-start-actions.sh
+RUN chmod a+xr /export/post-start-actions.sh \
+ && chmod a+xr $GALAXY_ROOT/workflows/TF_example_wf.ga 
+
+# Add Galaxy interactive tours
+# ADD ./rna-workbench-tours/* $GALAXY_ROOT/config/plugins/tours/
+# Add data library definition file
+# ADD library_data.yaml $GALAXY_ROOT/library_data.yaml
+# cleanup dance
+RUN apt-get autoremove -y && apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* && rm -rf ~/.cache/ \
+    && find $GALAXY_ROOT/ -name '*.pyc' -delete | true \
+    && find /usr/lib/ -name '*.pyc' -delete | true \
+    && find /var/log/ -name '*.log' -delete | true \
+    && find $GALAXY_VIRTUAL_ENV -name '*.pyc' -delete | true \
+    && rm -rf /tmp/* /root/.cache/ /var/cache/* $GALAXY_ROOT/client/node_modules/ $GALAXY_VIRTUAL_ENV/src/ /home/galaxy/.cache/ /home/galaxy/.npm
+ENV GALAXY_DEFAULT_ADMIN_USER=''
+VOLUME ["/export/"]
+EXPOSE :80
+EXPOSE :21
+EXPOSE :8800
+CMD ["/usr/bin/startup"]
+
b
diff -r 51fa77152988 -r f8c1694190f0 toolfactory/docker/TF_example_wf.ga
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/toolfactory/docker/TF_example_wf.ga Sun Aug 16 08:11:10 2020 -0400
[
b'@@ -0,0 +1,458 @@\n+{\n+    "a_galaxy_workflow": "true",\n+    "annotation": "",\n+    "format-version": "0.1",\n+    "name": "TF examples",\n+    "steps": {\n+        "0": {\n+            "annotation": "a fasta file - gc ratio will be estimated",\n+            "content_id": null,\n+            "errors": null,\n+            "id": 0,\n+            "input_connections": {},\n+            "inputs": [\n+                {\n+                    "description": "a fasta file - gc ratio will be estimated",\n+                    "name": "phiX.fasta"\n+                }\n+            ],\n+            "label": "phiX.fasta",\n+            "name": "Input dataset",\n+            "outputs": [],\n+            "position": {\n+                "bottom": 231,\n+                "height": 61,\n+                "left": 393,\n+                "right": 593,\n+                "top": 170,\n+                "width": 200,\n+                "x": 393,\n+                "y": 170\n+            },\n+            "tool_id": null,\n+            "tool_state": "{\\"optional\\": false}",\n+            "tool_version": null,\n+            "type": "data_input",\n+            "uuid": "7e2e24c8-2327-4893-a5b3-6b696a6ecd33",\n+            "workflow_outputs": [\n+                {\n+                    "label": null,\n+                    "output_name": "output",\n+                    "uuid": "f5ce2386-b80c-4691-a0a7-abeec8854461"\n+                }\n+            ]\n+        },\n+        "1": {\n+            "annotation": "Any text file to be reversed lots of times and bjorked once.",\n+            "content_id": null,\n+            "errors": null,\n+            "id": 1,\n+            "input_connections": {},\n+            "inputs": [\n+                {\n+                    "description": "Any text file to be reversed lots of times and bjorked once.",\n+                    "name": "rgToolFactory2.py"\n+                }\n+            ],\n+            "label": "rgToolFactory2.py",\n+            "name": "Input dataset",\n+            "outputs": [],\n+            "position": {\n+                "bottom": 371,\n+                "height": 81,\n+                "left": 393,\n+                "right": 593,\n+                "top": 290,\n+                "width": 200,\n+                "x": 393,\n+                "y": 290\n+            },\n+            "tool_id": null,\n+            "tool_state": "{\\"optional\\": false}",\n+            "tool_version": null,\n+            "type": "data_input",\n+            "uuid": "f530b390-2424-4aae-8bd9-dd9d30277561",\n+            "workflow_outputs": [\n+                {\n+                    "label": null,\n+                    "output_name": "output",\n+                    "uuid": "3d2a1699-50af-46e1-8981-dc6c3de2cf6c"\n+                }\n+            ]\n+        },\n+        "2": {\n+            "annotation": "",\n+            "content_id": "rgTF2",\n+            "errors": null,\n+            "id": 2,\n+            "input_connections": {\n+                "ppass|history_inputs_0|input_files": {\n+                    "id": 0,\n+                    "output_name": "output"\n+                }\n+            },\n+            "inputs": [],\n+            "label": null,\n+            "name": "toolfactory",\n+            "outputs": [\n+                {\n+                    "name": "TF_run_report",\n+                    "type": "input"\n+                },\n+                {\n+                    "name": "new_tool",\n+                    "type": "tgz"\n+                }\n+            ],\n+            "position": {\n+                "bottom": 372,\n+                "height": 202,\n+                "left": 613,\n+                "right": 813,\n+                "top": 170,\n+                "width": 200,\n+                "x": 613,\n+                "y": 170\n+            },\n+            "post_job_actions": {},\n+            "tool_id": "rgTF2",\n+            "tool_state": "{\\"__input_ext\\": \\"input\\", \\"chromInfo\\": \\"/home/ross/galaxy/tool-data/shared/ucsc/chrom/?.len\\", \\"interexe\\": {\\"interpreter\\": \\"perl\\", \\"__current_case__\\": 4, \\"interpreter_version\\": \\"\\'..b'\n+                "height": 242,\n+                "left": 833,\n+                "right": 1033,\n+                "top": 170,\n+                "width": 200,\n+                "x": 833,\n+                "y": 170\n+            },\n+            "post_job_actions": {},\n+            "tool_id": "rgTF2",\n+            "tool_state": "{\\"__input_ext\\": \\"input\\", \\"chromInfo\\": \\"/home/ross/galaxy/tool-data/shared/ucsc/chrom/?.len\\", \\"interexe\\": {\\"interpreter\\": \\"python\\", \\"__current_case__\\": 2, \\"interpreter_version\\": \\"\\", \\"exe_package_version\\": \\"\\", \\"dynScript\\": \\"import argparse\\\\nimport tarfile\\\\nimport os\\\\nimport tempfile\\\\nimport subprocess\\\\n\\\\n\\\\\\"\\\\\\"\\\\\\"\\\\nplanemo test --no_cleanup --no_dependency_resolution --skip_venv --galaxy_root ~/galaxy ~/galaxy/tools/tool_makers/pyrevargparse/ &> pyrevargparse\\\\n\\\\\\"\\\\\\"\\\\\\"\\\\n\\\\nparser = argparse.ArgumentParser()\\\\na = parser.add_argument\\\\na(\'--tooltgz\',default=\'\')\\\\na(\'--report\',default=None)\\\\na(\'--toolout\',default=None)\\\\na(\'--galaxy_root\',default=None)\\\\nargs = parser.parse_args()\\\\ntoolname = args.toolout.split(os.sep)[-1]\\\\ntoolpath = os.path.join(args.galaxy_root,args.toolout)\\\\ntf = tarfile.open(args.tooltgz,\\\\\\"r:gz\\\\\\")\\\\ntf.extractall(toolpath)\\\\ncl = \\\\\\"planemo test --skip_venv --galaxy_root %s %s\\\\\\" % (args.galaxy_root,toolpath)\\\\ncll = cl.split(\' \')\\\\nsto = open(args.report, \'w\')\\\\np = subprocess.run(cll, shell=False, stdout=sto)\\\\nretval = p.returncode\\\\nsto.close()\\\\n\\"}, \\"makeMode\\": {\\"make_Tool\\": \\"yes\\", \\"__current_case__\\": 0, \\"tool_version\\": \\"0.01\\", \\"tool_desc\\": \\"Tool to test toolshed tool archives generated by the tool factory.\\", \\"help_text\\": \\"**What it Does**\\\\n\\\\nGiven a toolshed tgz file generated by a tool factory run, this will unpack it and run planemo test, returning the planemo stdout as a report\\\\nIt was generated using the tool factory.\\", \\"citations\\": []}, \\"ppass\\": {\\"parampass\\": \\"argparse\\", \\"__current_case__\\": 0, \\"history_inputs\\": [{\\"__index__\\": 0, \\"input_files\\": {\\"__class__\\": \\"ConnectedValue\\"}, \\"input_formats\\": [\\"tgz\\"], \\"input_label\\": \\"tool toolshed tgz archive from history\\", \\"input_help\\": \\"Run planemo test on a tool shed tool archive tgz format file generated by the ToolFactory or Planemo\\", \\"input_CL\\": \\"tooltgz\\"}], \\"history_outputs\\": [{\\"__index__\\": 0, \\"history_name\\": \\"report\\", \\"history_format\\": \\"txt\\", \\"history_CL\\": \\"report\\"}], \\"edit_params\\": \\"yes\\", \\"additional_parameters\\": [{\\"__index__\\": 0, \\"param_name\\": \\"toolout\\", \\"param_type\\": \\"text\\", \\"param_value\\": \\"tools/toolmakers/planemotest\\", \\"param_label\\": \\"output path under galaxy root\\", \\"param_help\\": \\"This is where the tgz file will be extracted and tested by planemo\\", \\"param_CL\\": \\"toolout\\", \\"param_CLprefixed\\": \\"\\"}, {\\"__index__\\": 1, \\"param_name\\": \\"galaxy_root\\", \\"param_type\\": \\"text\\", \\"param_value\\": \\"/home/ross/galaxy\\", \\"param_label\\": \\"Galaxy source root directory to use for running planemo\\", \\"param_help\\": \\"This will form the galaxy_root parameter for rnning planemo using an existing Galaxy source tree, and the tgz will be extracted at a path relative to that rootu\\", \\"param_CL\\": \\"galaxy_root\\", \\"param_CLprefixed\\": \\"\\"}]}, \\"tool_name\\": \\"planemotest\\", \\"__page__\\": null, \\"__rerun_remap_job_id__\\": null}",\n+            "tool_version": "2.00",\n+            "type": "tool",\n+            "uuid": "b9bfb1a4-4c0c-4d39-9e74-223da72f8abc",\n+            "workflow_outputs": [\n+                {\n+                    "label": null,\n+                    "output_name": "TF_run_report",\n+                    "uuid": "09ba44ea-4da8-46f5-a411-ca054ccedd3b"\n+                },\n+                {\n+                    "label": null,\n+                    "output_name": "new_tool",\n+                    "uuid": "50a8ff4a-702a-4983-8202-8a79c0a3c978"\n+                }\n+            ]\n+        }\n+    },\n+    "tags": [],\n+    "uuid": "321a7f9f-c287-453c-807a-43afd948770e",\n+    "version": 1\n+}\n'
b
diff -r 51fa77152988 -r f8c1694190f0 toolfactory/docker/TF_example_wf2.ga
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/toolfactory/docker/TF_example_wf2.ga Sun Aug 16 08:11:10 2020 -0400
[
b'@@ -0,0 +1,500 @@\n+{\n+    "a_galaxy_workflow": "true",\n+    "annotation": "",\n+    "format-version": "0.1",\n+    "name": "TF examples (imported from uploaded file)",\n+    "steps": {\n+        "0": {\n+            "annotation": "a fasta file - gc ratio will be estimated",\n+            "content_id": null,\n+            "errors": null,\n+            "id": 0,\n+            "input_connections": {},\n+            "inputs": [\n+                {\n+                    "description": "a fasta file - gc ratio will be estimated",\n+                    "name": "phiX.fasta"\n+                }\n+            ],\n+            "label": "phiX.fasta",\n+            "name": "Input dataset",\n+            "outputs": [],\n+            "position": {\n+                "bottom": 231,\n+                "height": 61,\n+                "left": 393,\n+                "right": 593,\n+                "top": 170,\n+                "width": 200,\n+                "x": 393,\n+                "y": 170\n+            },\n+            "tool_id": null,\n+            "tool_state": "{\\"optional\\": false}",\n+            "tool_version": null,\n+            "type": "data_input",\n+            "uuid": "7e2e24c8-2327-4893-a5b3-6b696a6ecd33",\n+            "workflow_outputs": [\n+                {\n+                    "label": null,\n+                    "output_name": "output",\n+                    "uuid": "f5ce2386-b80c-4691-a0a7-abeec8854461"\n+                }\n+            ]\n+        },\n+        "1": {\n+            "annotation": "Any text file to be reversed lots of times and bjorked once.",\n+            "content_id": null,\n+            "errors": null,\n+            "id": 1,\n+            "input_connections": {},\n+            "inputs": [\n+                {\n+                    "description": "Any text file to be reversed lots of times and bjorked once.",\n+                    "name": "rgToolFactory2.py"\n+                }\n+            ],\n+            "label": "rgToolFactory2.py",\n+            "name": "Input dataset",\n+            "outputs": [],\n+            "position": {\n+                "bottom": 371,\n+                "height": 81,\n+                "left": 393,\n+                "right": 593,\n+                "top": 290,\n+                "width": 200,\n+                "x": 393,\n+                "y": 290\n+            },\n+            "tool_id": null,\n+            "tool_state": "{\\"optional\\": false}",\n+            "tool_version": null,\n+            "type": "data_input",\n+            "uuid": "f530b390-2424-4aae-8bd9-dd9d30277561",\n+            "workflow_outputs": [\n+                {\n+                    "label": null,\n+                    "output_name": "output",\n+                    "uuid": "3d2a1699-50af-46e1-8981-dc6c3de2cf6c"\n+                }\n+            ]\n+        },\n+        "2": {\n+            "annotation": "",\n+            "content_id": "toolshed.g2.bx.psu.edu/repos/fubar/tool_factory_2/rgTF2/2.00",\n+            "errors": null,\n+            "id": 2,\n+            "input_connections": {\n+                "ppass|history_inputs_0|input_files": {\n+                    "id": 0,\n+                    "output_name": "output"\n+                }\n+            },\n+            "inputs": [],\n+            "label": null,\n+            "name": "toolfactory",\n+            "outputs": [\n+                {\n+                    "name": "TF_run_report",\n+                    "type": "input"\n+                },\n+                {\n+                    "name": "new_tool",\n+                    "type": "tgz"\n+                }\n+            ],\n+            "position": {\n+                "bottom": 372,\n+                "height": 202,\n+                "left": 613,\n+                "right": 813,\n+                "top": 170,\n+                "width": 200,\n+                "x": 613,\n+                "y": 170\n+            },\n+            "post_job_actions": {},\n+            "tool_id": "toolshed.g2.bx.psu.edu/repos/fubar/tool_factory_2/rgTF2/2.00",\n+            "tool_shed_repository": {\n+                "changeset_revision": "51fa7'..b'bx.psu.edu/repos/fubar/tool_factory_2/rgTF2/2.00",\n+            "tool_shed_repository": {\n+                "changeset_revision": "51fa77152988",\n+                "name": "tool_factory_2",\n+                "owner": "fubar",\n+                "tool_shed": "toolshed.g2.bx.psu.edu"\n+            },\n+            "tool_state": "{\\"__input_ext\\": \\"input\\", \\"chromInfo\\": \\"/home/ross/galaxy/tool-data/shared/ucsc/chrom/?.len\\", \\"interexe\\": {\\"interpreter\\": \\"python\\", \\"__current_case__\\": 2, \\"interpreter_version\\": \\"\\", \\"exe_package_version\\": \\"\\", \\"dynScript\\": \\"import argparse\\\\nimport tarfile\\\\nimport os\\\\nimport tempfile\\\\nimport subprocess\\\\n\\\\n\\\\\\"\\\\\\"\\\\\\"\\\\nplanemo test --no_cleanup --no_dependency_resolution --skip_venv --galaxy_root ~/galaxy ~/galaxy/tools/tool_makers/pyrevargparse/ &> pyrevargparse\\\\n\\\\\\"\\\\\\"\\\\\\"\\\\n\\\\nparser = argparse.ArgumentParser()\\\\na = parser.add_argument\\\\na(\'--tooltgz\',default=\'\')\\\\na(\'--report\',default=None)\\\\na(\'--toolout\',default=None)\\\\na(\'--galaxy_root\',default=None)\\\\nargs = parser.parse_args()\\\\ntoolname = args.toolout.split(os.sep)[-1]\\\\ntoolpath = os.path.join(args.galaxy_root,args.toolout)\\\\ntf = tarfile.open(args.tooltgz,\\\\\\"r:gz\\\\\\")\\\\ntf.extractall(toolpath)\\\\ncl = \\\\\\"planemo test --skip_venv --galaxy_root %s %s\\\\\\" % (args.galaxy_root,toolpath)\\\\ncll = cl.split(\' \')\\\\nsto = open(args.report, \'w\')\\\\np = subprocess.run(cll, shell=False, stdout=sto)\\\\nretval = p.returncode\\\\nsto.close()\\\\n\\"}, \\"makeMode\\": {\\"make_Tool\\": \\"yes\\", \\"__current_case__\\": 0, \\"tool_version\\": \\"0.01\\", \\"tool_desc\\": \\"Tool to test toolshed tool archives generated by the tool factory.\\", \\"help_text\\": \\"**What it Does**\\\\n\\\\nGiven a toolshed tgz file generated by a tool factory run, this will unpack it and run planemo test, returning the planemo stdout as a report\\\\nIt was generated using the tool factory.\\", \\"citations\\": []}, \\"ppass\\": {\\"parampass\\": \\"argparse\\", \\"__current_case__\\": 0, \\"history_inputs\\": [{\\"__index__\\": 0, \\"input_files\\": {\\"__class__\\": \\"ConnectedValue\\"}, \\"input_formats\\": [\\"tgz\\"], \\"input_label\\": \\"tool toolshed tgz archive from history\\", \\"input_help\\": \\"Run planemo test on a tool shed tool archive tgz format file generated by the ToolFactory or Planemo\\", \\"input_CL\\": \\"tooltgz\\"}], \\"history_outputs\\": [{\\"__index__\\": 0, \\"history_name\\": \\"report\\", \\"history_format\\": \\"txt\\", \\"history_CL\\": \\"report\\"}], \\"edit_params\\": \\"yes\\", \\"additional_parameters\\": [{\\"__index__\\": 0, \\"param_name\\": \\"toolout\\", \\"param_type\\": \\"text\\", \\"param_value\\": \\"tools/toolmakers/planemotest\\", \\"param_label\\": \\"output path under galaxy root\\", \\"param_help\\": \\"This is where the tgz file will be extracted and tested by planemo\\", \\"param_CL\\": \\"toolout\\", \\"param_CLprefixed\\": \\"\\"}, {\\"__index__\\": 1, \\"param_name\\": \\"galaxy_root\\", \\"param_type\\": \\"text\\", \\"param_value\\": \\"/home/ross/galaxy\\", \\"param_label\\": \\"Galaxy source root directory to use for running planemo\\", \\"param_help\\": \\"This will form the galaxy_root parameter for rnning planemo using an existing Galaxy source tree, and the tgz will be extracted at a path relative to that rootu\\", \\"param_CL\\": \\"galaxy_root\\", \\"param_CLprefixed\\": \\"\\"}]}, \\"tool_name\\": \\"planemotest\\", \\"__page__\\": null, \\"__rerun_remap_job_id__\\": null}",\n+            "tool_version": "2.00",\n+            "type": "tool",\n+            "uuid": "b9bfb1a4-4c0c-4d39-9e74-223da72f8abc",\n+            "workflow_outputs": [\n+                {\n+                    "label": null,\n+                    "output_name": "TF_run_report",\n+                    "uuid": "09ba44ea-4da8-46f5-a411-ca054ccedd3b"\n+                },\n+                {\n+                    "label": null,\n+                    "output_name": "new_tool",\n+                    "uuid": "50a8ff4a-702a-4983-8202-8a79c0a3c978"\n+                }\n+            ]\n+        }\n+    },\n+    "tags": [],\n+    "uuid": "321a7f9f-c287-453c-807a-43afd948770e",\n+    "version": 0\n+}\n'
b
diff -r 51fa77152988 -r f8c1694190f0 toolfactory/docker/dockerfile.seq
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/toolfactory/docker/dockerfile.seq Sun Aug 16 08:11:10 2020 -0400
b
@@ -0,0 +1,25 @@
+# Galaxy - Toolshed docker
+
+FROM quay.io/bgruening/galaxy:19.01
+
+MAINTAINER Björn A. Grüning, bjoern.gruening@gmail.com
+
+ENV GALAXY_CONFIG_BRAND ToolFactory
+ENV GALAXY_CONFIG_SANITIZE_ALL_HTML false
+
+# Install tools
+#ADD data_managers.yaml $GALAXY_ROOT/data_managers.yaml
+#RUN install-tools $GALAXY_ROOT/data_managers.yaml && \
+#    /tool_deps/_conda/bin/conda clean --tarballs && \
+#    rm /export/galaxy-central/ -rf
+ADD my_tool_list.yml $GALAXY_ROOT/tools1.yaml
+RUN install-tools $GALAXY_ROOT/tools1.yaml && \
+    /tool_deps/_conda/bin/conda clean --tarballs && \
+    rm /export/galaxy-central/ -rf
+
+ADD TF_example_wf.ga $GALAXY_HOME/workflows/TF_example_wf.ga
+
+ADD post-start-actions.sh /export/post-start-actions.sh
+RUN chmod a+x /export/post-start-actions.sh
+
+
b
diff -r 51fa77152988 -r f8c1694190f0 toolfactory/docker/my_tool_list.yml
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/toolfactory/docker/my_tool_list.yml Sun Aug 16 08:11:10 2020 -0400
b
@@ -0,0 +1,9 @@
+install_resolver_dependencies: true
+install_tool_dependencies: false
+tools:
+- name: tool_factory_2
+  owner: fubar
+  tool_panel_section_label: 'Make new Tools'
+  tool_shed_url: https://toolshed.g2.bx.psu.edu


b
diff -r 51fa77152988 -r f8c1694190f0 toolfactory/docker/post-start-actions.sh
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/toolfactory/docker/post-start-actions.sh Sun Aug 16 08:11:10 2020 -0400
b
@@ -0,0 +1,5 @@
+#!/bin/bash
+# hook to install tf demo workflow
+echo "#### post start actions.sh hook happening"
+chown $GALAXY_USER $GALAXY_ROOT/workflows/TF_example_wf.ga
+workflow-install -w $GALAXY_ROOT/workflows/TF_example_wf.ga -g http://localhost -a fakekey --publish_workflows
b
diff -r 51fa77152988 -r f8c1694190f0 toolfactory/docker/startgaldock.sh
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/toolfactory/docker/startgaldock.sh Sun Aug 16 08:11:10 2020 -0400
b
@@ -0,0 +1,1 @@
+docker run -d -p 8080:80 -v /home/ubuntu/galaxy_storage/:/export/  toolfactory
b
diff -r 51fa77152988 -r f8c1694190f0 toolfactory/docker/startup
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/toolfactory/docker/startup Sun Aug 16 08:11:10 2020 -0400
[
b'@@ -0,0 +1,460 @@\n+#!/usr/bin/env bash\n+\n+# Migration path for old images that had the tool_deps under /export/galaxy-central/tool_deps/\n+\n+if [ -d "/export/galaxy-central/tool_deps/" ] && [ ! -L "/export/galaxy-central/tool_deps/" ]; then\n+    mkdir -p /export/tool_deps/\n+    mv /export/galaxy-central/tool_deps /export/\n+    ln -s /export/tool_deps/ $GALAXY_ROOT/\n+fi\n+\n+# This is needed for Docker compose to have a unified alias for the main container.\n+# Modifying /etc/hosts can only happen during runtime not during build-time\n+echo "127.0.0.1      galaxy" >> /etc/hosts\n+\n+# Set number of Galaxy handlers via GALAXY_HANDLER_NUMPROCS or default to 2\n+ansible localhost -m ini_file -a "dest=/etc/supervisor/conf.d/galaxy.conf section=program:handler option=numprocs value=${GALAXY_HANDLER_NUMPROCS:-2}" &> /dev/null\n+\n+# If the Galaxy config file is not in the expected place, copy from the sample\n+# and hope for the best (that the admin has done all the setup through env vars.)\n+if [ ! -f $GALAXY_CONFIG_FILE ]\n+  then\n+  # this should succesfully copy either .yml or .ini sample file to the expected location\n+  cp /export/config/galaxy${GALAXY_CONFIG_FILE: -4}.sample $GALAXY_CONFIG_FILE\n+fi\n+\n+# Configure proxy prefix filtering\n+if [[ ! -z $PROXY_PREFIX ]]\n+    then\n+    if [ ${GALAXY_CONFIG_FILE: -4} == ".ini" ]\n+        then\n+        ansible localhost -m ini_file -a "dest=${GALAXY_CONFIG_FILE} section=filter:proxy-prefix option=prefix value=${PROXY_PREFIX}" &> /dev/null\n+        ansible localhost -m ini_file -a "dest=${GALAXY_CONFIG_FILE} section=app:main option=filter-with value=proxy-prefix" &> /dev/null\n+    else\n+        ansible localhost -m lineinfile -a "path=${GALAXY_CONFIG_FILE} regexp=\'^  module:\' state=absent" &> /dev/null\n+        ansible localhost -m lineinfile -a "path=${GALAXY_CONFIG_FILE} regexp=\'^  socket:\' state=absent" &> /dev/null\n+        ansible localhost -m lineinfile -a "path=${GALAXY_CONFIG_FILE} regexp=\'^  mount:\' state=absent" &> /dev/null\n+        ansible localhost -m lineinfile -a "path=${GALAXY_CONFIG_FILE} regexp=\'^  manage-script-name:\' state=absent" &> /dev/null\n+        ansible localhost -m lineinfile -a "path=${GALAXY_CONFIG_FILE} insertafter=\'^uwsgi:\' line=\'  manage-script-name: true\'" &> /dev/null\n+        ansible localhost -m lineinfile -a "path=${GALAXY_CONFIG_FILE} insertafter=\'^uwsgi:\' line=\'  mount: ${PROXY_PREFIX}=galaxy.webapps.galaxy.buildapp:uwsgi_app()\'" &> /dev/null\n+        ansible localhost -m lineinfile -a "path=${GALAXY_CONFIG_FILE} insertafter=\'^uwsgi:\' line=\'  socket: unix:///srv/galaxy/var/uwsgi.sock\'" &> /dev/null\n+\n+        # Also set SCRIPT_NAME. It\'s not always necessary due to manage-script-name: true in galaxy.yml, but it makes life easier in this container + it does no harm\n+        ansible localhost -m lineinfile -a "path=/etc/nginx/conf.d/uwsgi.conf regexp=\'^    uwsgi_param SCRIPT_NAME\' state=absent" &> /dev/null\n+        ansible localhost -m lineinfile -a "path=/etc/nginx/conf.d/uwsgi.conf insertafter=\'^    include uwsgi_params\' line=\'    uwsgi_param SCRIPT_NAME ${PROXY_PREFIX};\'" &> /dev/null\n+    fi\n+\n+    ansible localhost -m ini_file -a "dest=${GALAXY_CONFIG_DIR}/reports_wsgi.ini section=filter:proxy-prefix option=prefix value=${PROXY_PREFIX}/reports" &> /dev/null\n+    ansible localhost -m ini_file -a "dest=${GALAXY_CONFIG_DIR}/reports_wsgi.ini section=app:main option=filter-with value=proxy-prefix" &> /dev/null\n+\n+    # Fix path to html assets\n+    ansible localhost -m replace -a "dest=$GALAXY_CONFIG_DIR/web/welcome.html regexp=\'(href=\\"|\\\')[/\\\\w]*(/static)\' replace=\'\\\\1${PROXY_PREFIX}\\\\2\'" &> /dev/null\n+\n+    # Set some other vars based on that prefix\n+    if [ "x$GALAXY_CONFIG_COOKIE_PATH" == "x" ]\n+        then\n+        export GALAXY_CONFIG_COOKIE_PATH="$PROXY_PREFIX"\n+    fi\n+    if [ "x$GALAXY_CONFIG_DYNAMIC_PROXY_PREFIX" == "x" ]\n+        then\n+        export GALAXY_CONFIG_DYNAMIC_PROXY_PREFIX="$PROXY_PREFIX/gie_proxy"\n+    fi\n+\n+    # Change the defaults '..b'recommends -y\n+    apt-get autoremove -y && apt-get clean && rm -rf /var/lib/apt/lists/*\n+else\n+    echo "Disable Galaxy Interactive Environments. Start with --privileged to enable IE\'s."\n+    export GALAXY_CONFIG_INTERACTIVE_ENVIRONMENT_PLUGINS_DIRECTORY=""\n+    start_supervisor\n+fi\n+\n+if [ "$USE_HTTPS_LETSENCRYPT" != "False" ]\n+then\n+    echo "Settting up letsencrypt"\n+    ansible-playbook -c local /ansible/provision.yml \\\n+    --extra-vars gather_facts=False \\\n+    --extra-vars galaxy_extras_config_ssl=True \\\n+    --extra-vars galaxy_extras_config_ssl_method=letsencrypt \\\n+    --extra-vars galaxy_extras_galaxy_domain="GALAXY_CONFIG_GALAXY_INFRASTRUCTURE_URL" \\\n+    --extra-vars galaxy_extras_config_nginx_upload=False \\\n+    --tags https\n+fi\n+if [ "$USE_HTTPS" != "False" ]\n+then\n+    if [ -f /export/server.key -a -f /export/server.crt ]\n+    then\n+        echo "Copying SSL keys"\n+        ansible-playbook -c local /ansible/provision.yml \\\n+        --extra-vars gather_facts=False \\\n+        --extra-vars galaxy_extras_config_ssl=True \\\n+        --extra-vars galaxy_extras_config_ssl_method=own \\\n+        --extra-vars src_nginx_ssl_certificate_key=/export/server.key \\\n+        --extra-vars src_nginx_ssl_certificate=/export/server.crt \\\n+        --extra-vars galaxy_extras_config_nginx_upload=False \\\n+        --tags https\n+    else\n+        echo "Setting up self-signed SSL keys"\n+        ansible-playbook -c local /ansible/provision.yml \\\n+        --extra-vars gather_facts=False \\\n+        --extra-vars galaxy_extras_config_ssl=True \\\n+        --extra-vars galaxy_extras_config_ssl_method=self-signed \\\n+        --extra-vars galaxy_extras_config_nginx_upload=False \\\n+        --tags https\n+    fi\n+fi\n+\n+# In case the user wants the default admin to be created, do so.\n+if [[ ! -z $GALAXY_DEFAULT_ADMIN_USER ]]\n+    then\n+        echo "Creating admin user $GALAXY_DEFAULT_ADMIN_USER with key $GALAXY_DEFAULT_ADMIN_KEY and password $GALAXY_DEFAULT_ADMIN_PASSWORD if not existing"\n+        python /usr/local/bin/create_galaxy_user.py --user "$GALAXY_DEFAULT_ADMIN_EMAIL" --password "$GALAXY_DEFAULT_ADMIN_PASSWORD" \\\n+        -c "$GALAXY_CONFIG_FILE" --username "$GALAXY_DEFAULT_ADMIN_USER" --key "$GALAXY_DEFAULT_ADMIN_KEY"\n+    # If there is a need to execute actions that would require a live galaxy instance, such as adding workflows, setting quotas, adding more users, etc.\n+    # then place a file with that logic named post-start-actions.sh on the /export/ directory, it should have access to all environment variables\n+    # visible here.\n+    # The file needs to be executable (chmod a+x post-start-actions.sh)\n+fi    \n+if [ -x /export/post-start-actions.sh ]\n+    then\n+   # uses ephemeris, present in docker-galaxy-stable, to wait for the local instance\n+   /tool_deps/_conda/bin/galaxy-wait -g http://127.0.0.1 -v --timeout 120 > $GALAXY_LOGS_DIR/post-start-actions.log &&\n+   /export/post-start-actions.sh >> $GALAXY_LOGS_DIR/post-start-actions.log &\n+fi\n+\n+\n+# Reinstall tools if the user want to\n+if [[ ! -z $GALAXY_AUTO_UPDATE_TOOLS ]]\n+    then\n+        /tool_deps/_conda/bin/galaxy-wait -g http://127.0.0.1 -v --timeout 120 > /home/galaxy/logs/post-start-actions.log &&\n+        OLDIFS=$IFS\n+        IFS=\',\'\n+        for TOOL_YML in `echo "$GALAXY_AUTO_UPDATE_TOOLS"`\n+        do\n+            echo "Installing tools from $TOOL_YML"\n+            /tool_deps/_conda/bin/shed-tools install -g "http://127.0.0.1" -a "$GALAXY_DEFAULT_ADMIN_KEY" -t "$TOOL_YML"\n+            /tool_deps/_conda/bin/conda clean --tarballs --yes\n+        done\n+        IFS=$OLDIFS\n+fi\n+\n+# migrate custom IEs or Visualisations (Galaxy plugins)\n+# this is needed for by the new client build system\n+python3 ${GALAXY_ROOT}/scripts/plugin_staging.py\n+\n+# Enable verbose output\n+if [ `echo ${GALAXY_LOGGING:-\'no\'} | tr [:upper:] [:lower:]` = "full" ]\n+    then\n+        tail -f /var/log/supervisor/* /var/log/nginx/* $GALAXY_LOGS_DIR/*.log\n+    else\n+        tail -f $GALAXY_LOGS_DIR/*.log\n+fi\n+\n'
b
diff -r 51fa77152988 -r f8c1694190f0 toolfactory/docker/startup.sh
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/toolfactory/docker/startup.sh Sun Aug 16 08:11:10 2020 -0400
[
b'@@ -0,0 +1,462 @@\n+#!/usr/bin/env bash\n+\n+# Migration path for old images that had the tool_deps under /export/galaxy-central/tool_deps/\n+\n+if [ -d "/export/galaxy-central/tool_deps/" ] && [ ! -L "/export/galaxy-central/tool_deps/" ]; then\n+    mkdir -p /export/tool_deps/\n+    mv /export/galaxy-central/tool_deps /export/\n+    ln -s /export/tool_deps/ $GALAXY_ROOT/\n+fi\n+\n+# This is needed for Docker compose to have a unified alias for the main container.\n+# Modifying /etc/hosts can only happen during runtime not during build-time\n+echo "127.0.0.1      galaxy" >> /etc/hosts\n+\n+# Set number of Galaxy handlers via GALAXY_HANDLER_NUMPROCS or default to 2\n+ansible localhost -m ini_file -a "dest=/etc/supervisor/conf.d/galaxy.conf section=program:handler option=numprocs value=${GALAXY_HANDLER_NUMPROCS:-2}" &> /dev/null\n+\n+# If the Galaxy config file is not in the expected place, copy from the sample\n+# and hope for the best (that the admin has done all the setup through env vars.)\n+if [ ! -f $GALAXY_CONFIG_FILE ]\n+  then\n+  # this should succesfully copy either .yml or .ini sample file to the expected location\n+  cp /export/config/galaxy${GALAXY_CONFIG_FILE: -4}.sample $GALAXY_CONFIG_FILE\n+fi\n+\n+# Configure proxy prefix filtering\n+if [[ ! -z $PROXY_PREFIX ]]\n+    then\n+    if [ ${GALAXY_CONFIG_FILE: -4} == ".ini" ]\n+        then\n+        ansible localhost -m ini_file -a "dest=${GALAXY_CONFIG_FILE} section=filter:proxy-prefix option=prefix value=${PROXY_PREFIX}" &> /dev/null\n+        ansible localhost -m ini_file -a "dest=${GALAXY_CONFIG_FILE} section=app:main option=filter-with value=proxy-prefix" &> /dev/null\n+    else\n+        ansible localhost -m lineinfile -a "path=${GALAXY_CONFIG_FILE} regexp=\'^  module:\' state=absent" &> /dev/null\n+        ansible localhost -m lineinfile -a "path=${GALAXY_CONFIG_FILE} regexp=\'^  socket:\' state=absent" &> /dev/null\n+        ansible localhost -m lineinfile -a "path=${GALAXY_CONFIG_FILE} regexp=\'^  mount:\' state=absent" &> /dev/null\n+        ansible localhost -m lineinfile -a "path=${GALAXY_CONFIG_FILE} regexp=\'^  manage-script-name:\' state=absent" &> /dev/null\n+        ansible localhost -m lineinfile -a "path=${GALAXY_CONFIG_FILE} insertafter=\'^uwsgi:\' line=\'  manage-script-name: true\'" &> /dev/null\n+        ansible localhost -m lineinfile -a "path=${GALAXY_CONFIG_FILE} insertafter=\'^uwsgi:\' line=\'  mount: ${PROXY_PREFIX}=galaxy.webapps.galaxy.buildapp:uwsgi_app()\'" &> /dev/null\n+        ansible localhost -m lineinfile -a "path=${GALAXY_CONFIG_FILE} insertafter=\'^uwsgi:\' line=\'  socket: unix:///srv/galaxy/var/uwsgi.sock\'" &> /dev/null\n+\n+        # Also set SCRIPT_NAME. It\'s not always necessary due to manage-script-name: true in galaxy.yml, but it makes life easier in this container + it does no harm\n+        ansible localhost -m lineinfile -a "path=/etc/nginx/conf.d/uwsgi.conf regexp=\'^    uwsgi_param SCRIPT_NAME\' state=absent" &> /dev/null\n+        ansible localhost -m lineinfile -a "path=/etc/nginx/conf.d/uwsgi.conf insertafter=\'^    include uwsgi_params\' line=\'    uwsgi_param SCRIPT_NAME ${PROXY_PREFIX};\'" &> /dev/null\n+    fi\n+\n+    ansible localhost -m ini_file -a "dest=${GALAXY_CONFIG_DIR}/reports_wsgi.ini section=filter:proxy-prefix option=prefix value=${PROXY_PREFIX}/reports" &> /dev/null\n+    ansible localhost -m ini_file -a "dest=${GALAXY_CONFIG_DIR}/reports_wsgi.ini section=app:main option=filter-with value=proxy-prefix" &> /dev/null\n+\n+    # Fix path to html assets\n+    ansible localhost -m replace -a "dest=$GALAXY_CONFIG_DIR/web/welcome.html regexp=\'(href=\\"|\\\')[/\\\\w]*(/static)\' replace=\'\\\\1${PROXY_PREFIX}\\\\2\'" &> /dev/null\n+\n+    # Set some other vars based on that prefix\n+    if [ "x$GALAXY_CONFIG_COOKIE_PATH" == "x" ]\n+        then\n+        export GALAXY_CONFIG_COOKIE_PATH="$PROXY_PREFIX"\n+    fi\n+    if [ "x$GALAXY_CONFIG_DYNAMIC_PROXY_PREFIX" == "x" ]\n+        then\n+        export GALAXY_CONFIG_DYNAMIC_PROXY_PREFIX="$PROXY_PREFIX/gie_proxy"\n+    fi\n+\n+    # Change the defaults '..b'active Environments. Start with --privileged to enable IE\'s."\n+    export GALAXY_CONFIG_INTERACTIVE_ENVIRONMENT_PLUGINS_DIRECTORY=""\n+    start_supervisor\n+fi\n+\n+if [ "$USE_HTTPS_LETSENCRYPT" != "False" ]\n+then\n+    echo "Settting up letsencrypt"\n+    ansible-playbook -c local /ansible/provision.yml \\\n+    --extra-vars gather_facts=False \\\n+    --extra-vars galaxy_extras_config_ssl=True \\\n+    --extra-vars galaxy_extras_config_ssl_method=letsencrypt \\\n+    --extra-vars galaxy_extras_galaxy_domain="GALAXY_CONFIG_GALAXY_INFRASTRUCTURE_URL" \\\n+    --extra-vars galaxy_extras_config_nginx_upload=False \\\n+    --tags https\n+fi\n+if [ "$USE_HTTPS" != "False" ]\n+then\n+    if [ -f /export/server.key -a -f /export/server.crt ]\n+    then\n+        echo "Copying SSL keys"\n+        ansible-playbook -c local /ansible/provision.yml \\\n+        --extra-vars gather_facts=False \\\n+        --extra-vars galaxy_extras_config_ssl=True \\\n+        --extra-vars galaxy_extras_config_ssl_method=own \\\n+        --extra-vars src_nginx_ssl_certificate_key=/export/server.key \\\n+        --extra-vars src_nginx_ssl_certificate=/export/server.crt \\\n+        --extra-vars galaxy_extras_config_nginx_upload=False \\\n+        --tags https\n+    else\n+        echo "Setting up self-signed SSL keys"\n+        ansible-playbook -c local /ansible/provision.yml \\\n+        --extra-vars gather_facts=False \\\n+        --extra-vars galaxy_extras_config_ssl=True \\\n+        --extra-vars galaxy_extras_config_ssl_method=self-signed \\\n+        --extra-vars galaxy_extras_config_nginx_upload=False \\\n+        --tags https\n+    fi\n+fi\n+\n+# In case the user wants the default admin to be created, do so.\n+if [[ ! -z $GALAXY_DEFAULT_ADMIN_USER ]]\n+    then\n+        echo "Creating admin user $GALAXY_DEFAULT_ADMIN_USER with key $GALAXY_DEFAULT_ADMIN_KEY and password $GALAXY_DEFAULT_ADMIN_PASSWORD if not existing"\n+        python /usr/local/bin/create_galaxy_user.py --user "$GALAXY_DEFAULT_ADMIN_EMAIL" --password "$GALAXY_DEFAULT_ADMIN_PASSWORD" \\\n+        -c "$GALAXY_CONFIG_FILE" --username "$GALAXY_DEFAULT_ADMIN_USER" --key "$GALAXY_DEFAULT_ADMIN_KEY"\n+fi\n+# If there is a need to execute actions that would require a live galaxy instance, such as adding workflows, setting quotas, adding more users, etc.\n+# then place a file with that logic named post-start-actions.sh on the /export/ directory, it should have access to all environment variables\n+# visible here.\n+# The file needs to be executable (chmod a+x post-start-actions.sh)\n+# uses ephemeris, present in docker-galaxy-stable, to wait for the local instance\n+\n+if [[ -f /export/post-start-actions.sh ]] \n+    then \n+       /tool_deps/_conda/bin/galaxy-wait -g http://127.0.0.1 -v --timeout 120 > $GALAXY_LOGS_DIR/post-start-actions.log\n+       /export/post-start-actions.sh >> $GALAXY_LOGS_DIR/post-start-actions.log &\n+    else\n+       echo "No /export/post-start-actions.sh found or not executable so not running" >> $GALAXY_LOGS_DIR/post-start-actions.log\n+fi\n+\n+\n+# Reinstall tools if the user want to\n+if [[ ! -z $GALAXY_AUTO_UPDATE_TOOLS ]]\n+    then\n+        /tool_deps/_conda/bin/galaxy-wait -g http://127.0.0.1 -v --timeout 120 > /home/galaxy/logs/post-start-actions.log &&\n+        OLDIFS=$IFS\n+        IFS=\',\'\n+        for TOOL_YML in `echo "$GALAXY_AUTO_UPDATE_TOOLS"`\n+        do\n+            echo "Installing tools from $TOOL_YML"\n+            /tool_deps/_conda/bin/shed-tools install -g "http://127.0.0.1" -a "$GALAXY_DEFAULT_ADMIN_KEY" -t "$TOOL_YML"\n+            /tool_deps/_conda/bin/conda clean --tarballs --yes\n+        done\n+        IFS=$OLDIFS\n+fi\n+\n+# migrate custom IEs or Visualisations (Galaxy plugins)\n+# this is needed for by the new client build system\n+python3 ${GALAXY_ROOT}/scripts/plugin_staging.py\n+\n+# Enable verbose output\n+if [ `echo ${GALAXY_LOGGING:-\'no\'} | tr [:upper:] [:lower:]` = "full" ]\n+    then\n+        tail -f /var/log/supervisor/* /var/log/nginx/* $GALAXY_LOGS_DIR/*.log\n+    else\n+        tail -f $GALAXY_LOGS_DIR/*.log\n+fi\n'
b
diff -r 51fa77152988 -r f8c1694190f0 toolfactory/html_dir.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/toolfactory/html_dir.py Sun Aug 16 08:11:10 2020 -0400
[
b'@@ -0,0 +1,180 @@\n+\n+class tooloutHTMLifyer(self):\n+\n+    def compressPDF(self,inpdf=None,thumbformat=\'png\'):\n+        """need absolute path to pdf\n+           note that GS gets confoozled if no $TMP or $TEMP\n+           so we set it\n+        """\n+        assert os.path.isfile(inpdf), "## Input %s supplied to %s compressPDF not found" % (inpdf,self.myName)\n+        hlog = os.path.join(self.opts.output_dir,"compress_%s.txt" % os.path.basename(inpdf))\n+        sto = open(hlog,\'a\')\n+        our_env = os.environ.copy()\n+        our_tmp = our_env.get(\'TMP\',None)\n+        if not our_tmp:\n+            our_tmp = our_env.get(\'TEMP\',None)\n+        if not (our_tmp and os.path.exists(our_tmp)):\n+            newtmp = os.path.join(self.opts.output_dir,\'tmp\')\n+            try:\n+                os.mkdir(newtmp)\n+            except:\n+                sto.write(\'## WARNING - cannot make %s - it may exist or permissions need fixing\\n\' % newtmp)\n+            our_env[\'TEMP\'] = newtmp\n+            if not self.temp_warned:\n+               sto.write(\'## WARNING - no $TMP or $TEMP!!! Please fix - using %s temporarily\\n\' % newtmp)\n+               self.temp_warned = True          \n+        outpdf = \'%s_compressed\' % inpdf\n+        cl = ["gs", "-sDEVICE=pdfwrite", "-dNOPAUSE", "-dUseCIEColor", "-dBATCH","-dPDFSETTINGS=/printer", "-sOutputFile=%s" % outpdf,inpdf]\n+        x = subprocess.Popen(cl,stdout=sto,stderr=sto,cwd=self.opts.output_dir,env=our_env)\n+        retval1 = x.wait()\n+        sto.close()\n+        if retval1 == 0:\n+            os.unlink(inpdf)\n+            shutil.move(outpdf,inpdf)\n+            os.unlink(hlog)\n+        hlog = os.path.join(self.opts.output_dir,"thumbnail_%s.txt" % os.path.basename(inpdf))\n+        sto = open(hlog,\'w\')\n+        outpng = \'%s.%s\' % (os.path.splitext(inpdf)[0],thumbformat)\n+        if self.useGM:        \n+            cl2 = [\'gm\', \'convert\', inpdf, outpng]\n+        else: # assume imagemagick\n+            cl2 = [\'convert\', inpdf, outpng]\n+        x = subprocess.Popen(cl2,stdout=sto,stderr=sto,cwd=self.opts.output_dir,env=our_env)\n+        retval2 = x.wait()\n+        sto.close()\n+        if retval2 == 0:\n+             os.unlink(hlog)\n+        retval = retval1 or retval2\n+        return retval\n+\n+\n+    def getfSize(self,fpath,outpath):\n+        """\n+        format a nice file size string\n+        """\n+        size = \'\'\n+        fp = os.path.join(outpath,fpath)\n+        if os.path.isfile(fp):\n+            size = \'0 B\'\n+            n = float(os.path.getsize(fp))\n+            if n > 2**20:\n+                size = \'%1.1f MB\' % (n/2**20)\n+            elif n > 2**10:\n+                size = \'%1.1f KB\' % (n/2**10)\n+            elif n > 0:\n+                size = \'%d B\' % (int(n))\n+        return size\n+\n+    def makeHtml(self):\n+        """ Create an HTML file content to list all the artifacts found in the output_dir\n+        """\n+\n+        galhtmlprefix = """<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd"> \n+        <html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en" lang="en"> \n+        <head> <meta http-equiv="Content-Type" content="text/html; charset=utf-8" /> \n+        <meta name="generator" content="Galaxy %s tool output - see http://g2.trac.bx.psu.edu/" /> \n+        <title></title> \n+        <link rel="stylesheet" href="/static/style/base.css" type="text/css" /> \n+        </head> \n+        <body> \n+        <div class="toolFormBody"> \n+        """ \n+        galhtmlattr = """<hr/><div class="infomessage">This tool (%s) was generated by the <a href="https://bitbucket.org/fubar/galaxytoolfactory/overview">Galaxy Tool Factory</a></div><br/>""" \n+        galhtmlpostfix = """</div></body></html>\\n"""\n+\n+        flist = os.listdir(self.opts.output_dir)\n+        flist = [x for x in flist if x != \'Rplots.pdf\']\n+        flist.sort()\n+        html = []\n+        html.append(galhtmlprefix % progname)\n+        html.append(\'<div class="infomessage">Galaxy T'..b'% (fname,fname,sfsize))\n+                else:\n+                    fhtml.append(\'<tr><td><a href="%s">%s</a></td><td>%s</td></tr>\' % (fname,fname,sfsize))\n+            for logfname in logfiles: # expect at least tlog - if more\n+                if os.path.abspath(logfname) == os.path.abspath(self.tlog): # handled later\n+                    sectionname = \'All tool run\'\n+                    if (len(logfiles) > 1):\n+                        sectionname = \'Other\'\n+                    ourpdfs = pdflist\n+                else:\n+                    realname = os.path.basename(logfname)\n+                    sectionname = os.path.splitext(realname)[0].split(\'_\')[0] # break in case _ added to log\n+                    ourpdfs = [x for x in pdflist if os.path.basename(x[0]).split(\'_\')[0] == sectionname]\n+                    pdflist = [x for x in pdflist if os.path.basename(x[0]).split(\'_\')[0] != sectionname] # remove\n+                nacross = 1\n+                npdf = len(ourpdfs)\n+\n+                if npdf > 0:\n+                    nacross = math.sqrt(npdf) ## int(round(math.log(npdf,2)))\n+                    if int(nacross)**2 != npdf:\n+                        nacross += 1\n+                    nacross = int(nacross)\n+                    width = min(400,int(1200/nacross))\n+                    html.append(\'<div class="toolFormTitle">%s images and outputs</div>\' % sectionname)\n+                    html.append(\'(Click on a thumbnail image to download the corresponding original PDF image)<br/>\')\n+                    ntogo = nacross # counter for table row padding with empty cells\n+                    html.append(\'<div><table class="simple" cellpadding="2" cellspacing="2">\\n<tr>\')\n+                    for i,paths in enumerate(ourpdfs): \n+                        fname,thumb = paths\n+                        s= """<td><a href="%s"><img src="%s" title="Click to download a PDF of %s" hspace="5" width="%d" \n+                           alt="Image called %s"/></a></td>\\n""" % (fname,thumb,fname,width,fname)\n+                        if ((i+1) % nacross == 0):\n+                            s += \'</tr>\\n\'\n+                            ntogo = 0\n+                            if i < (npdf - 1): # more to come\n+                               s += \'<tr>\'\n+                               ntogo = nacross\n+                        else:\n+                            ntogo -= 1\n+                        html.append(s)\n+                    if html[-1].strip().endswith(\'</tr>\'):\n+                        html.append(\'</table></div>\\n\')\n+                    else:\n+                        if ntogo > 0: # pad\n+                           html.append(\'<td>&nbsp;</td>\'*ntogo)\n+                        html.append(\'</tr></table></div>\\n\')\n+                logt = open(logfname,\'r\').readlines()\n+                logtext = [x for x in logt if x.strip() > \'\']\n+                html.append(\'<div class="toolFormTitle">%s log output</div>\' % sectionname)\n+                if len(logtext) > 1:\n+                    html.append(\'\\n<pre>\\n\')\n+                    html += logtext\n+                    html.append(\'\\n</pre>\\n\')\n+                else:\n+                    html.append(\'%s is empty<br/>\' % logfname)\n+        if len(fhtml) > 0:\n+           fhtml.insert(0,\'<div><table class="colored" cellpadding="3" cellspacing="3"><tr><th>Output File Name (click to view)</th><th>Size</th></tr>\\n\')\n+           fhtml.append(\'</table></div><br/>\')\n+           html.append(\'<div class="toolFormTitle">All output files available for downloading</div>\\n\')\n+           html += fhtml # add all non-pdf files to the end of the display\n+        else:\n+            html.append(\'<div class="warningmessagelarge">### Error - %s returned no files - please confirm that parameters are sane</div>\' % self.opts.interpreter)\n+        html.append(galhtmlpostfix)\n+        htmlf = file(self.opts.output_html,\'w\')\n+        htmlf.write(\'\\n\'.join(html))\n+        htmlf.write(\'\\n\')\n+        htmlf.close()\n+        self.html = html\n+\n+\n'
b
diff -r 51fa77152988 -r f8c1694190f0 toolfactory/tftesthistory.tar.gz
b
Binary file toolfactory/tftesthistory.tar.gz has changed