comparison env/lib/python3.9/site-packages/planemo/galaxy/config.py @ 0:4f3585e2f14b draft default tip

"planemo upload commit 60cee0fc7c0cda8592644e1aad72851dec82c959"
author shellac
date Mon, 22 Mar 2021 18:12:50 +0000 (2021-03-22)
parents
children
comparison
equal deleted inserted replaced
-1:000000000000 0:4f3585e2f14b
1 """Abstractions for setting up a Galaxy instance."""
2 from __future__ import absolute_import
3 from __future__ import print_function
4
5 import abc
6 import contextlib
7 import os
8 import random
9 import shutil
10 import threading
11 from string import Template
12 from tempfile import mkdtemp
13
14 from galaxy.containers.docker_model import DockerVolume
15 from galaxy.tool_util.deps import docker_util
16 from galaxy.util.commands import argv_to_str
17 from pkg_resources import parse_version
18 from six import (
19 add_metaclass,
20 iteritems
21 )
22 from six.moves import shlex_quote
23
24 from planemo import git
25 from planemo.config import OptionSource
26 from planemo.deps import ensure_dependency_resolvers_conf_configured
27 from planemo.docker import docker_host_args
28 from planemo.galaxy.workflows import remote_runnable_to_workflow_id
29 from planemo.io import (
30 communicate,
31 kill_pid_file,
32 shell,
33 shell_join,
34 untar_to,
35 wait_on,
36 warn,
37 write_file,
38 )
39 from planemo.mulled import build_involucro_context
40 from planemo.shed import tool_shed_url
41 from planemo.virtualenv import DEFAULT_PYTHON_VERSION
42 from .api import (
43 DEFAULT_ADMIN_API_KEY,
44 gi,
45 user_api_key,
46 )
47 from .distro_tools import (
48 DISTRO_TOOLS_ID_TO_PATH
49 )
50 from .run import (
51 setup_common_startup_args,
52 setup_venv,
53 )
54 from .workflows import (
55 find_tool_ids,
56 import_workflow,
57 install_shed_repos,
58 )
59
60
61 NO_TEST_DATA_MESSAGE = (
62 "planemo couldn't find a target test-data directory, you should likely "
63 "create a test-data directory or pass an explicit path using --test_data."
64 )
65
66 WEB_SERVER_CONFIG_TEMPLATE = """
67 [server:${server_name}]
68 use = egg:Paste#http
69 port = ${port}
70 host = ${host}
71 use_threadpool = True
72 threadpool_kill_thread_limit = 10800
73 [app:main]
74 paste.app_factory = galaxy.web.buildapp:app_factory
75 static_dir = static/
76 """
77
78 TOOL_CONF_TEMPLATE = """<toolbox>
79 <tool file="data_source/upload.xml" />
80 ${tool_definition}
81 </toolbox>
82 """
83
84 SHED_TOOL_CONF_TEMPLATE = """<?xml version="1.0"?>
85 <toolbox tool_path="${shed_tool_path}">
86 </toolbox>
87 """
88
89 SHED_DATA_MANAGER_CONF_TEMPLATE = """<?xml version="1.0"?>
90 <data_managers>
91 </data_managers>
92 """
93
94 EMPTY_JOB_METRICS_TEMPLATE = """<?xml version="1.0"?>
95 <job_metrics>
96 </job_metrics>
97 """
98
99 TOOL_SHEDS_CONF = """<tool_sheds>
100 <tool_shed name="Target Shed" url="${shed_target_url}" />
101 </tool_sheds>
102 """
103
104 JOB_CONFIG_LOCAL = """<job_conf>
105 <plugins>
106 <plugin id="planemo_runner" type="runner" load="galaxy.jobs.runners.local:LocalJobRunner" workers="4"/>
107 </plugins>
108 <handlers>
109 <handler id="main"/>
110 </handlers>
111 <destinations default="planemo_dest">
112 <destination id="planemo_dest" runner="planemo_runner">
113 <param id="require_container">${require_container}</param>
114 <param id="docker_enabled">${docker_enable}</param>
115 <param id="docker_sudo">${docker_sudo}</param>
116 <param id="docker_sudo_cmd">${docker_sudo_cmd}</param>
117 <param id="docker_cmd">${docker_cmd}</param>
118 ${docker_host_param}
119 </destination>
120 <destination id="upload_dest" runner="planemo_runner">
121 <param id="docker_enabled">false</param>
122 </destination>
123 </destinations>
124 <tools>
125 <tool id="upload1" destination="upload_dest" />
126 </tools>
127 </job_conf>
128 """
129
130 LOGGING_TEMPLATE = """
131 ## Configure Python loggers.
132 [loggers]
133 keys = root,paste,displayapperrors,galaxydeps,galaxymasterapikey,galaxy
134
135 [handlers]
136 keys = console
137
138 [formatters]
139 keys = generic
140
141 [logger_root]
142 level = WARN
143 handlers = console
144
145 [logger_paste]
146 level = WARN
147 handlers = console
148 qualname = paste
149 propagate = 0
150
151 [logger_galaxydeps]
152 level = DEBUG
153 handlers = console
154 qualname = galaxy.tools.deps
155 propagate = 0
156
157 [logger_galaxymasterapikey]
158 level = WARN
159 handlers = console
160 qualname = galaxy.web.framework.webapp
161 propagate = 0
162
163 [logger_displayapperrors]
164 level = ERROR
165 handlers =
166 qualname = galaxy.datatypes.display_applications.application
167 propagate = 0
168
169 [logger_galaxy]
170 level = ${log_level}
171 handlers = console
172 qualname = galaxy
173 propagate = 0
174
175 [handler_console]
176 class = StreamHandler
177 args = (sys.stderr,)
178 level = DEBUG
179 formatter = generic
180
181 [formatter_generic]
182 format = %(asctime)s %(levelname)-5.5s [%(name)s] %(message)s
183 """
184
185 REFGENIE_CONFIG_TEMPLATE = """
186 config_version: 0.3
187 genome_folder: '%s'
188 genome_servers: ['http://refgenomes.databio.org']
189 genomes: null
190 """
191
192 EMPTY_TOOL_CONF_TEMPLATE = """<toolbox></toolbox>"""
193
194 DEFAULT_GALAXY_BRANCH = "master"
195 DEFAULT_GALAXY_SOURCE = "https://github.com/galaxyproject/galaxy"
196 CWL_GALAXY_SOURCE = "https://github.com/common-workflow-language/galaxy"
197
198 DATABASE_LOCATION_TEMPLATE = "sqlite:///%s?isolation_level=IMMEDIATE"
199
200 COMMAND_STARTUP_COMMAND = './scripts/common_startup.sh ${COMMON_STARTUP_ARGS}'
201
202 CLEANUP_IGNORE_ERRORS = True
203 DEFAULT_GALAXY_BRAND = 'Configured by Planemo'
204 DEFAULT_TOOL_INSTALL_TIMEOUT = 60 * 60 * 1
205 UNINITIALIZED = object()
206
207
208 @contextlib.contextmanager
209 def galaxy_config(ctx, runnables, **kwds):
210 """Set up a ``GalaxyConfig`` in an auto-cleaned context."""
211 c = local_galaxy_config
212 if kwds.get("dockerize", False):
213 c = docker_galaxy_config
214 elif kwds.get("external", False):
215 c = external_galaxy_config
216 log_thread = None
217 try:
218 with c(ctx, runnables, **kwds) as config:
219 if kwds.get('daemon'):
220 log_thread = threading.Thread(target=read_log, args=(ctx, config.log_file))
221 log_thread.daemon = True
222 log_thread.start()
223 yield config
224 finally:
225 if log_thread:
226 log_thread.join(1)
227
228
229 def read_log(ctx, log_path):
230 log_fh = None
231 e = threading.Event()
232 try:
233 while e:
234 if os.path.exists(log_path):
235 if not log_fh:
236 # Open in append so we start at the end of the log file
237 log_fh = open(log_path, 'a+')
238 log_lines = log_fh.read()
239 if log_lines:
240 ctx.log(log_lines)
241 e.wait(1)
242 finally:
243 if log_fh:
244 log_fh.close()
245
246
247 def simple_docker_volume(path):
248 path = os.path.abspath(path)
249 return DockerVolume("%s:%s:rw" % (path, path))
250
251
252 @contextlib.contextmanager
253 def docker_galaxy_config(ctx, runnables, for_tests=False, **kwds):
254 """Set up a ``GalaxyConfig`` for Docker container."""
255 test_data_dir = _find_test_data(runnables, **kwds)
256
257 with _config_directory(ctx, **kwds) as config_directory:
258 def config_join(*args):
259 return os.path.join(config_directory, *args)
260
261 ensure_dependency_resolvers_conf_configured(ctx, kwds, os.path.join(config_directory, "resolvers_conf.xml"))
262 _handle_job_metrics(config_directory, kwds)
263 _handle_refgenie_config(config_directory, kwds)
264
265 shed_tool_conf = "config/shed_tool_conf.xml"
266 all_tool_paths = _all_tool_paths(runnables, **kwds)
267
268 tool_directories = set([]) # Things to mount...
269 for tool_path in all_tool_paths:
270 directory = os.path.dirname(os.path.normpath(tool_path))
271 if os.path.exists(directory):
272 tool_directories.add(directory)
273
274 # TODO: remap these.
275 tool_volumes = []
276 for tool_directory in tool_directories:
277 volume = simple_docker_volume(tool_directory)
278 tool_volumes.append(volume)
279
280 empty_tool_conf = config_join("empty_tool_conf.xml")
281
282 tool_conf = config_join("tool_conf.xml")
283
284 shed_tool_path = kwds.get("shed_tool_path") or config_join("shed_tools")
285 _ensure_directory(shed_tool_path)
286
287 sheds_config_path = _configure_sheds_config_file(
288 ctx, config_directory, **kwds
289 )
290 port = _get_port(kwds)
291 properties = _shared_galaxy_properties(config_directory, kwds, for_tests=for_tests)
292 _handle_container_resolution(ctx, kwds, properties)
293 master_api_key = _get_master_api_key(kwds)
294
295 template_args = dict(
296 shed_tool_path=shed_tool_path,
297 tool_conf=tool_conf,
298 )
299 tool_config_file = "%s,%s" % (tool_conf, shed_tool_conf)
300
301 _write_tool_conf(ctx, all_tool_paths, tool_conf)
302 write_file(empty_tool_conf, EMPTY_TOOL_CONF_TEMPLATE)
303
304 properties.update(dict(
305 tool_config_file=tool_config_file,
306 tool_sheds_config_file=sheds_config_path,
307 migrated_tools_config=empty_tool_conf,
308 ))
309
310 server_name = "planemo%d" % random.randint(0, 100000)
311
312 # Value substitutions in Galaxy properties - for consistency with
313 # non-Dockerized version.
314 template_args = dict(
315 )
316 env = _build_env_for_galaxy(properties, template_args)
317 env["NONUSE"] = "nodejs,proftp,reports"
318 if ctx.verbose:
319 env["GALAXY_LOGGING"] = "full"
320
321 # TODO: setup FTP upload dir and disable FTP server in container.
322 _build_test_env(properties, env)
323
324 docker_target_kwds = docker_host_args(**kwds)
325 volumes = tool_volumes + [simple_docker_volume(config_directory)]
326 export_directory = kwds.get("export_directory", None)
327 if export_directory is not None:
328 volumes.append(DockerVolume("%s:/export:rw" % export_directory))
329
330 # TODO: Allow this to real Docker volumes and allow multiple.
331 extra_volume = kwds.get("docker_extra_volume")
332 if extra_volume:
333 volumes.append(simple_docker_volume(extra_volume))
334 yield DockerGalaxyConfig(
335 ctx,
336 config_directory,
337 env,
338 test_data_dir,
339 port,
340 server_name,
341 master_api_key,
342 runnables,
343 docker_target_kwds=docker_target_kwds,
344 volumes=volumes,
345 export_directory=export_directory,
346 kwds=kwds,
347 )
348
349
350 @contextlib.contextmanager
351 def local_galaxy_config(ctx, runnables, for_tests=False, **kwds):
352 """Set up a ``GalaxyConfig`` in an auto-cleaned context."""
353 test_data_dir = _find_test_data(runnables, **kwds)
354 tool_data_table = _find_tool_data_table(
355 runnables,
356 test_data_dir=test_data_dir,
357 **kwds
358 )
359 data_manager_config_paths = [r.data_manager_conf_path for r in runnables if r.data_manager_conf_path]
360 galaxy_root = _find_galaxy_root(ctx, **kwds)
361 install_galaxy = kwds.get("install_galaxy", False)
362 if galaxy_root is not None:
363 if os.path.isdir(galaxy_root) and not os.listdir(galaxy_root):
364 os.rmdir(galaxy_root)
365 if os.path.isdir(galaxy_root) and install_galaxy:
366 raise Exception("%s is an existing non-empty directory, cannot install Galaxy again" % galaxy_root)
367
368 # Duplicate block in docker variant above.
369 if kwds.get("mulled_containers", False) and not kwds.get("docker", False):
370 if ctx.get_option_source("docker") != OptionSource.cli:
371 kwds["docker"] = True
372 else:
373 raise Exception("Specified no docker and mulled containers together.")
374
375 with _config_directory(ctx, **kwds) as config_directory:
376 def config_join(*args):
377 return os.path.join(config_directory, *args)
378
379 install_env = {}
380 if kwds.get('galaxy_skip_client_build', True):
381 install_env['GALAXY_SKIP_CLIENT_BUILD'] = '1'
382 if galaxy_root is None:
383 galaxy_root = config_join("galaxy-dev")
384 if not os.path.isdir(galaxy_root):
385 _build_eggs_cache(ctx, install_env, kwds)
386 _install_galaxy(ctx, galaxy_root, install_env, kwds)
387
388 if parse_version(kwds.get('galaxy_python_version') or DEFAULT_PYTHON_VERSION) >= parse_version('3'):
389 # on python 3 we use gunicorn,
390 # which requires 'main' as server name
391 server_name = 'main'
392 else:
393 server_name = "planemo%d" % random.randint(0, 100000)
394 # Once we don't have to support earlier than 18.01 - try putting these files
395 # somewhere better than with Galaxy.
396 log_file = "%s.log" % server_name
397 pid_file = "%s.pid" % server_name
398 ensure_dependency_resolvers_conf_configured(ctx, kwds, os.path.join(config_directory, "resolvers_conf.xml"))
399 _handle_job_config_file(config_directory, server_name, kwds)
400 _handle_job_metrics(config_directory, kwds)
401 _handle_refgenie_config(config_directory, kwds)
402 file_path = kwds.get("file_path") or config_join("files")
403 _ensure_directory(file_path)
404
405 tool_dependency_dir = kwds.get("tool_dependency_dir") or config_join("deps")
406 _ensure_directory(tool_dependency_dir)
407
408 shed_tool_conf = kwds.get("shed_tool_conf") or config_join("shed_tools_conf.xml")
409 all_tool_paths = _all_tool_paths(runnables, **kwds)
410 empty_tool_conf = config_join("empty_tool_conf.xml")
411
412 tool_conf = config_join("tool_conf.xml")
413
414 shed_data_manager_config_file = config_join("shed_data_manager_conf.xml")
415
416 shed_tool_path = kwds.get("shed_tool_path") or config_join("shed_tools")
417 _ensure_directory(shed_tool_path)
418
419 sheds_config_path = _configure_sheds_config_file(
420 ctx, config_directory, **kwds
421 )
422
423 database_location = config_join("galaxy.sqlite")
424 master_api_key = _get_master_api_key(kwds)
425 dependency_dir = os.path.join(config_directory, "deps")
426 _ensure_directory(shed_tool_path)
427 port = _get_port(kwds)
428 template_args = dict(
429 port=port,
430 host=kwds.get("host", "127.0.0.1"),
431 server_name=server_name,
432 temp_directory=config_directory,
433 shed_tool_path=shed_tool_path,
434 database_location=database_location,
435 tool_conf=tool_conf,
436 debug=kwds.get("debug", "true"),
437 id_secret=kwds.get("id_secret", "test_secret"),
438 log_level="DEBUG" if ctx.verbose else "INFO",
439 )
440 tool_config_file = "%s,%s" % (tool_conf, shed_tool_conf)
441 # Setup both galaxy_email and older test user test@bx.psu.edu
442 # as admins for command_line, etc...
443 properties = _shared_galaxy_properties(config_directory, kwds, for_tests=for_tests)
444 properties.update(dict(
445 server_name="main",
446 ftp_upload_dir_template="${ftp_upload_dir}",
447 ftp_upload_purge="False",
448 ftp_upload_dir=test_data_dir or os.path.abspath('.'),
449 ftp_upload_site="Test Data",
450 check_upload_content="False",
451 tool_dependency_dir=dependency_dir,
452 file_path=file_path,
453 new_file_path="${temp_directory}/tmp",
454 tool_config_file=tool_config_file,
455 tool_sheds_config_file=sheds_config_path,
456 manage_dependency_relationships="False",
457 job_working_directory="${temp_directory}/job_working_directory",
458 template_cache_path="${temp_directory}/compiled_templates",
459 citation_cache_type="file",
460 citation_cache_data_dir="${temp_directory}/citations/data",
461 citation_cache_lock_dir="${temp_directory}/citations/lock",
462 database_auto_migrate="True",
463 enable_beta_tool_formats="True",
464 id_secret="${id_secret}",
465 log_level="${log_level}",
466 debug="${debug}",
467 watch_tools="auto",
468 default_job_shell="/bin/bash", # For conda dependency resolution
469 tool_data_table_config_path=tool_data_table,
470 data_manager_config_file=",".join(data_manager_config_paths) or None, # without 'or None' may raise IOError in galaxy (see #946)
471 integrated_tool_panel_config=("${temp_directory}/"
472 "integrated_tool_panel_conf.xml"),
473 migrated_tools_config=empty_tool_conf,
474 test_data_dir=test_data_dir, # TODO: make gx respect this
475 shed_data_manager_config_file=shed_data_manager_config_file,
476 ))
477 _handle_container_resolution(ctx, kwds, properties)
478 write_file(config_join("logging.ini"), _sub(LOGGING_TEMPLATE, template_args))
479 properties["database_connection"] = _database_connection(database_location, **kwds)
480
481 _handle_kwd_overrides(properties, kwds)
482
483 # TODO: consider following property
484 # watch_tool = False
485 # datatypes_config_file = config/datatypes_conf.xml
486 # welcome_url = /static/welcome.html
487 # logo_url = /
488 # sanitize_all_html = True
489 # serve_xss_vulnerable_mimetypes = False
490 # track_jobs_in_database = None
491 # outputs_to_working_directory = False
492 # retry_job_output_collection = 0
493
494 env = _build_env_for_galaxy(properties, template_args)
495 env.update(install_env)
496 _build_test_env(properties, env)
497 env['GALAXY_TEST_SHED_TOOL_CONF'] = shed_tool_conf
498 env['GALAXY_TEST_DBURI'] = properties["database_connection"]
499
500 env["GALAXY_TEST_UPLOAD_ASYNC"] = "false"
501 env["GALAXY_TEST_LOGGING_CONFIG"] = config_join("logging.ini")
502 env["GALAXY_DEVELOPMENT_ENVIRONMENT"] = "1"
503 # Following are needed in 18.01 to prevent Galaxy from changing log and pid.
504 # https://github.com/galaxyproject/planemo/issues/788
505 env["GALAXY_LOG"] = log_file
506 env["GALAXY_PID"] = pid_file
507 web_config = _sub(WEB_SERVER_CONFIG_TEMPLATE, template_args)
508 write_file(config_join("galaxy.ini"), web_config)
509 _write_tool_conf(ctx, all_tool_paths, tool_conf)
510 write_file(empty_tool_conf, EMPTY_TOOL_CONF_TEMPLATE)
511
512 shed_tool_conf_contents = _sub(SHED_TOOL_CONF_TEMPLATE, template_args)
513 # Write a new shed_tool_conf.xml if needed.
514 write_file(shed_tool_conf, shed_tool_conf_contents, force=False)
515
516 write_file(shed_data_manager_config_file, SHED_DATA_MANAGER_CONF_TEMPLATE)
517
518 yield LocalGalaxyConfig(
519 ctx,
520 config_directory,
521 env,
522 test_data_dir,
523 port,
524 server_name,
525 master_api_key,
526 runnables,
527 galaxy_root,
528 kwds,
529 )
530
531
532 def _all_tool_paths(runnables, **kwds):
533 tool_paths = [r.path for r in runnables if r.has_tools and not r.data_manager_conf_path]
534 all_tool_paths = list(tool_paths) + list(kwds.get("extra_tools", []))
535 for runnable in runnables:
536 if runnable.type.name == "galaxy_workflow":
537 tool_ids = find_tool_ids(runnable.path)
538 for tool_id in tool_ids:
539 tool_paths = DISTRO_TOOLS_ID_TO_PATH.get(tool_id)
540 if tool_paths:
541 if isinstance(tool_paths, str):
542 tool_paths = [tool_paths]
543 all_tool_paths.extend(tool_paths)
544
545 return all_tool_paths
546
547
548 def _shared_galaxy_properties(config_directory, kwds, for_tests):
549 """Setup properties useful for local and Docker Galaxy instances.
550
551 Most things related to paths, etc... are very different between Galaxy
552 modalities and many taken care of internally to the container in that mode.
553 But this method sets up API stuff, tool, and job stuff that can be shared.
554 """
555 master_api_key = _get_master_api_key(kwds)
556 user_email = _user_email(kwds)
557 properties = {
558 'master_api_key': master_api_key,
559 'admin_users': "%s,test@bx.psu.edu" % user_email,
560 'expose_dataset_path': "True",
561 'cleanup_job': 'never',
562 'collect_outputs_from': "job_working_directory",
563 'allow_path_paste': "True",
564 'check_migrate_tools': "False",
565 'use_cached_dependency_manager': str(kwds.get("conda_auto_install", False)),
566 'brand': kwds.get("galaxy_brand", DEFAULT_GALAXY_BRAND),
567 'strict_cwl_validation': str(not kwds.get("non_strict_cwl", False)),
568 }
569 if kwds.get("galaxy_single_user", True):
570 properties['single_user'] = user_email
571
572 if for_tests:
573 empty_dir = os.path.join(config_directory, "empty")
574 _ensure_directory(empty_dir)
575 properties["tour_config_dir"] = empty_dir
576 properties["interactive_environment_plugins_directory"] = empty_dir
577 properties["visualization_plugins_directory"] = empty_dir
578 properties["refgenie_config_file"] = kwds.get('refgenie_config_file', '')
579 return properties
580
581
582 @contextlib.contextmanager
583 def external_galaxy_config(ctx, runnables, for_tests=False, **kwds):
584 yield BaseGalaxyConfig(
585 ctx=ctx,
586 galaxy_url=kwds.get("galaxy_url", None),
587 master_api_key=_get_master_api_key(kwds),
588 user_api_key=kwds.get("galaxy_user_key", None),
589 runnables=runnables,
590 kwds=kwds
591 )
592
593
594 def _get_master_api_key(kwds):
595 master_api_key = kwds.get("galaxy_admin_key") or DEFAULT_ADMIN_API_KEY
596 return master_api_key
597
598
599 def _get_port(kwds):
600 port = int(kwds.get("port", 9090))
601 return port
602
603
604 def _user_email(kwds):
605 user_email = kwds.get("galaxy_email")
606 return user_email
607
608
609 @contextlib.contextmanager
610 def _config_directory(ctx, **kwds):
611 config_directory = kwds.get("config_directory", None)
612 created_config_directory = False
613 if not config_directory:
614 created_config_directory = True
615 config_directory = os.path.realpath(mkdtemp())
616 ctx.vlog("Created directory for Galaxy configuration [%s]" % config_directory)
617 try:
618 yield config_directory
619 finally:
620 cleanup = not kwds.get("no_cleanup", False)
621 if created_config_directory and cleanup:
622 shutil.rmtree(config_directory)
623
624
625 @add_metaclass(abc.ABCMeta)
626 class GalaxyInterface(object):
627 """Abstraction around a Galaxy instance.
628
629 Description of a Galaxy instance and how to interact with it - this could
630 potentially be a remote, already running instance or an instance Planemo manages
631 to execute some task(s).
632 """
633
634 @abc.abstractproperty
635 def gi(self):
636 """Return an admin bioblend Galaxy instance for API interactions."""
637
638 @abc.abstractproperty
639 def user_gi(self):
640 """Return a user-backed bioblend Galaxy instance for API interactions."""
641
642 @abc.abstractmethod
643 def install_repo(self, *args, **kwds):
644 """Install specified tool shed repository."""
645
646 @abc.abstractproperty
647 def tool_shed_client(self):
648 """Return a admin bioblend tool shed client."""
649
650 @abc.abstractmethod
651 def wait_for_all_installed(self):
652 """Wait for all queued up repositories installs to complete."""
653
654 @abc.abstractmethod
655 def install_workflows(self):
656 """Install all workflows configured with these planemo arguments."""
657
658 @abc.abstractmethod
659 def workflow_id(self, path):
660 """Get installed workflow API ID for input path."""
661
662 @abc.abstractproperty
663 def version_major(self):
664 """Return target Galaxy version."""
665
666 @abc.abstractproperty
667 def user_api_config(self):
668 """Return the API indicated configuration for user session.
669
670 Calling .config.get_config() with admin GI session would yield
671 a different object (admins have different view of Galaxy's
672 configuration).
673 """
674
675 @property
676 def user_is_admin(self):
677 return self.user_api_config["is_admin_user"]
678
679
680 @add_metaclass(abc.ABCMeta)
681 class GalaxyConfig(GalaxyInterface):
682 """Specialization of GalaxyInterface for Galaxy instances Planemo manages itself.
683
684 This assumes more than an API connection is available - Planemo needs to be able to
685 start and stop the Galaxy instance, recover logs, etc... There are currently two
686 implementations - a locally executed Galaxy and one running inside a Docker containe
687 """
688
689 @abc.abstractproperty
690 def kill(self):
691 """Stop the running instance."""
692
693 @abc.abstractmethod
694 def startup_command(self, ctx, **kwds):
695 """Return a shell command used to startup this instance.
696
697 Among other common planmo kwds, this should respect the
698 ``daemon`` keyword.
699 """
700
701 @abc.abstractproperty
702 def log_contents(self):
703 """Retrieve text of log for running Galaxy instance."""
704
705 @abc.abstractmethod
706 def cleanup(self):
707 """Cleanup allocated resources to run this instance."""
708
709 @abc.abstractproperty
710 def use_path_paste(self):
711 """Use path paste to upload data.
712
713 This will only be an option if the target user key is an
714 admin user key.
715 """
716
717
718 class BaseGalaxyConfig(GalaxyInterface):
719
720 def __init__(
721 self,
722 ctx,
723 galaxy_url,
724 master_api_key,
725 user_api_key,
726 runnables,
727 kwds,
728 ):
729 self._ctx = ctx
730 self.galaxy_url = galaxy_url
731 self.master_api_key = master_api_key
732 self._user_api_key = user_api_key
733 self.runnables = runnables
734 self._kwds = kwds
735 self._workflow_ids = {}
736
737 self._target_version = UNINITIALIZED
738 self._target_user_config = UNINITIALIZED
739
740 @property
741 def gi(self):
742 assert self.galaxy_url
743 return gi(url=self.galaxy_url, key=self.master_api_key)
744
745 @property
746 def user_gi(self):
747 user_api_key = self.user_api_key
748 assert user_api_key
749 return self._gi_for_key(user_api_key)
750
751 @property
752 def user_api_key(self):
753 # TODO: thread-safe
754 if self._user_api_key is None:
755 # TODO: respect --galaxy_email - seems like a real bug
756 self._user_api_key = user_api_key(self.gi)
757
758 return self._user_api_key
759
760 def _gi_for_key(self, key):
761 assert self.galaxy_url
762 return gi(url=self.galaxy_url, key=key)
763
764 def install_repo(self, *args, **kwds):
765 self.tool_shed_client.install_repository_revision(
766 *args, **kwds
767 )
768
769 @property
770 def tool_shed_client(self):
771 return self.gi.toolShed
772
773 def wait_for_all_installed(self):
774 def status_ready(repo):
775 status = repo["status"]
776 if status in ["Installing", "New"]:
777 return None
778 if status == "Installed":
779 return True
780 raise Exception("Error installing repo status is %s" % status)
781
782 def ready():
783 repos = self.tool_shed_client.get_repositories()
784 ready = all(map(status_ready, repos))
785 return ready or None
786
787 wait_on(ready, "galaxy tool installation", timeout=DEFAULT_TOOL_INSTALL_TIMEOUT)
788
789 def install_workflows(self):
790 for runnable in self.runnables:
791 if runnable.type.name in ["galaxy_workflow", "cwl_workflow"] and not runnable.is_remote_workflow_uri:
792 self._install_workflow(runnable)
793
794 def _install_workflow(self, runnable):
795 if self._kwds.get("shed_install") and (self._kwds.get("engine") != "external_galaxy" or self._kwds.get("galaxy_admin_key")):
796 install_shed_repos(runnable,
797 self.gi,
798 self._kwds.get("ignore_dependency_problems", False),
799 self._kwds.get("install_tool_dependencies", False),
800 self._kwds.get("install_resolver_dependencies", True),
801 self._kwds.get("install_repository_dependencies", True))
802
803 default_from_path = self._kwds.get("workflows_from_path", False)
804 # TODO: Allow serialization so this doesn't need to assume a
805 # shared filesystem with Galaxy server.
806 from_path = default_from_path or (runnable.type.name == "cwl_workflow")
807 workflow = import_workflow(
808 runnable.path, admin_gi=self.gi, user_gi=self.user_gi, from_path=from_path
809 )
810 self._workflow_ids[runnable.path] = workflow["id"]
811
812 def workflow_id_for_runnable(self, runnable):
813 if runnable.is_remote_workflow_uri:
814 workflow_id = remote_runnable_to_workflow_id(runnable)
815 else:
816 workflow_id = self.workflow_id(runnable.path)
817 return workflow_id
818
819 def workflow_id(self, path):
820 return self._workflow_ids[path]
821
822 @property
823 def use_path_paste(self):
824 option = self._kwds.get("paste_test_data_paths")
825 if option is None:
826 return self.default_use_path_paste
827 else:
828 return option
829
830 @property
831 def default_use_path_paste(self):
832 return False
833
834 @property
835 def version_major(self):
836 """Return target Galaxy version."""
837 if self._target_version is UNINITIALIZED:
838 self._target_version = self.user_gi.config.get_version()["version_major"]
839 return self._target_version
840
841 @property
842 def user_api_config(self):
843 """Return the API indicated configuration for user session."""
844 if self._target_user_config is UNINITIALIZED:
845 self._target_user_config = self.user_gi.config.get_config()
846 return self._target_user_config
847
848
849 class BaseManagedGalaxyConfig(BaseGalaxyConfig):
850
851 def __init__(
852 self,
853 ctx,
854 config_directory,
855 env,
856 test_data_dir,
857 port,
858 server_name,
859 master_api_key,
860 runnables,
861 kwds,
862 ):
863 galaxy_url = "http://localhost:%d" % port
864 super(BaseManagedGalaxyConfig, self).__init__(
865 ctx=ctx,
866 galaxy_url=galaxy_url,
867 master_api_key=master_api_key,
868 user_api_key=None,
869 runnables=runnables,
870 kwds=kwds
871 )
872 self.config_directory = config_directory
873 self.env = env
874 self.test_data_dir = test_data_dir
875 self.port = port
876 self.server_name = server_name
877
878
879 class DockerGalaxyConfig(BaseManagedGalaxyConfig):
880 """A :class:`GalaxyConfig` description of a Dockerized Galaxy instance."""
881
882 def __init__(
883 self,
884 ctx,
885 config_directory,
886 env,
887 test_data_dir,
888 port,
889 server_name,
890 master_api_key,
891 runnables,
892 docker_target_kwds,
893 volumes,
894 export_directory,
895 kwds,
896 ):
897 super(DockerGalaxyConfig, self).__init__(
898 ctx,
899 config_directory,
900 env,
901 test_data_dir,
902 port,
903 server_name,
904 master_api_key,
905 runnables,
906 kwds,
907 )
908 self.docker_target_kwds = docker_target_kwds
909 self.volumes = volumes
910 self.export_directory = export_directory
911
912 def kill(self):
913 """Kill planemo container..."""
914 kill_command = docker_util.kill_command(
915 self.server_name,
916 **self.docker_target_kwds
917 )
918 return shell(kill_command)
919
920 def startup_command(self, ctx, **kwds):
921 """Return a shell command used to startup this instance.
922
923 Among other common planmo kwds, this should respect the
924 ``daemon`` keyword.
925 """
926 daemon = kwds.get("daemon", False)
927 daemon_str = "" if not daemon else " -d"
928 docker_run_extras = "-p %s:80%s" % (self.port, daemon_str)
929 env_directives = ["%s='%s'" % item for item in self.env.items()]
930 image = kwds.get("docker_galaxy_image", "bgruening/galaxy-stable")
931 run_command = docker_util.build_docker_run_command(
932 "", image,
933 interactive=False,
934 env_directives=env_directives,
935 working_directory=None,
936 name=self.server_name,
937 run_extra_arguments=docker_run_extras,
938 set_user=False,
939 volumes=self.volumes,
940 **self.docker_target_kwds
941 )
942 chmod_command = [
943 "chmod",
944 "-R",
945 "o+rwx",
946 self.config_directory,
947 ]
948 if self.export_directory:
949 chmod_command.append(self.export_directory)
950
951 return shell_join(
952 argv_to_str(chmod_command),
953 run_command,
954 )
955
956 @property
957 def log_contents(self):
958 logs_command = docker_util.logs_command(
959 self.server_name,
960 **self.docker_target_kwds
961 )
962 output, _ = communicate(
963 logs_command
964 )
965 return output
966
967 def cleanup(self):
968 shutil.rmtree(self.config_directory, CLEANUP_IGNORE_ERRORS)
969
970
971 class LocalGalaxyConfig(BaseManagedGalaxyConfig):
972 """A local, non-containerized implementation of :class:`GalaxyConfig`."""
973
974 def __init__(
975 self,
976 ctx,
977 config_directory,
978 env,
979 test_data_dir,
980 port,
981 server_name,
982 master_api_key,
983 runnables,
984 galaxy_root,
985 kwds,
986 ):
987 super(LocalGalaxyConfig, self).__init__(
988 ctx,
989 config_directory,
990 env,
991 test_data_dir,
992 port,
993 server_name,
994 master_api_key,
995 runnables,
996 kwds,
997 )
998 self.galaxy_root = galaxy_root
999
1000 def kill(self):
1001 if self._ctx.verbose:
1002 shell(["ps", "ax"])
1003 exists = os.path.exists(self.pid_file)
1004 print("Killing pid file [%s]" % self.pid_file)
1005 print("pid_file exists? [%s]" % exists)
1006 if exists:
1007 print("pid_file contents are [%s]" % open(self.pid_file, "r").read())
1008 kill_pid_file(self.pid_file)
1009
1010 def startup_command(self, ctx, **kwds):
1011 """Return a shell command used to startup this instance.
1012
1013 Among other common planemo kwds, this should respect the
1014 ``daemon`` keyword.
1015 """
1016 daemon = kwds.get("daemon", False)
1017 # TODO: Allow running dockerized Galaxy here instead.
1018 setup_venv_command = setup_venv(ctx, kwds)
1019 run_script = "%s $COMMON_STARTUP_ARGS" % shlex_quote(os.path.join(self.galaxy_root, "run.sh"))
1020 if daemon:
1021 run_script += " --daemon"
1022 self.env["GALAXY_RUN_ALL"] = "1"
1023 else:
1024 run_script += " --server-name %s" % shlex_quote(self.server_name)
1025 server_ini = os.path.join(self.config_directory, "galaxy.ini")
1026 self.env["GALAXY_CONFIG_FILE"] = server_ini
1027 if parse_version(kwds.get('galaxy_python_version') or DEFAULT_PYTHON_VERSION) >= parse_version('3'):
1028 # We need to start under gunicorn
1029 self.env['APP_WEBSERVER'] = 'gunicorn'
1030 self.env['GUNICORN_CMD_ARGS'] = "--timeout={timeout} --capture-output --bind={host}:{port} --name={server_name}".format(
1031 timeout=DEFAULT_TOOL_INSTALL_TIMEOUT,
1032 host=kwds.get('host', '127.0.0.1'),
1033 port=kwds['port'],
1034 server_name=self.server_name,
1035 )
1036 cd_to_galaxy_command = ['cd', self.galaxy_root]
1037 return shell_join(
1038 cd_to_galaxy_command,
1039 setup_venv_command,
1040 setup_common_startup_args(),
1041 run_script,
1042 )
1043
1044 @property
1045 def log_file(self):
1046 """Log file used when planemo serves this Galaxy instance."""
1047 file_name = "%s.log" % self.server_name
1048 return os.path.join(self.galaxy_root, file_name)
1049
1050 @property
1051 def pid_file(self):
1052 pid_file_name = "%s.pid" % self.server_name
1053 return os.path.join(self.galaxy_root, pid_file_name)
1054
1055 @property
1056 def log_contents(self):
1057 if not os.path.exists(self.log_file):
1058 return ""
1059 with open(self.log_file, "r") as f:
1060 return f.read()
1061
1062 def cleanup(self):
1063 shutil.rmtree(self.config_directory, CLEANUP_IGNORE_ERRORS)
1064
1065 @property
1066 def default_use_path_paste(self):
1067 # If Planemo started a local, native Galaxy instance assume files URLs can be
1068 # pasted.
1069 return self.user_is_admin
1070
1071
1072 def _database_connection(database_location, **kwds):
1073 default_connection = DATABASE_LOCATION_TEMPLATE % database_location
1074 database_connection = kwds.get("database_connection") or default_connection
1075 return database_connection
1076
1077
1078 def _find_galaxy_root(ctx, **kwds):
1079 root_prop = "galaxy_root"
1080 cwl = kwds.get("cwl", False)
1081 if cwl:
1082 root_prop = "cwl_galaxy_root"
1083 galaxy_root = kwds.get(root_prop, None)
1084 if galaxy_root:
1085 return galaxy_root
1086 else:
1087 par_dir = os.getcwd()
1088 while True:
1089 run = os.path.join(par_dir, "run.sh")
1090 config = os.path.join(par_dir, "config")
1091 if os.path.isfile(run) and os.path.isdir(config):
1092 return par_dir
1093 new_par_dir = os.path.dirname(par_dir)
1094 if new_par_dir == par_dir:
1095 break
1096 par_dir = new_par_dir
1097 return None
1098
1099
1100 def _find_test_data(runnables, **kwds):
1101 test_data_search_path = "."
1102 runnables = [r for r in runnables if r.has_tools]
1103 if len(runnables) > 0:
1104 test_data_search_path = runnables[0].test_data_search_path
1105
1106 # Find test data directory associated with path.
1107 test_data = kwds.get("test_data", None)
1108 if test_data:
1109 return os.path.abspath(test_data)
1110 else:
1111 test_data = _search_tool_path_for(test_data_search_path, "test-data")
1112 if test_data:
1113 return test_data
1114 warn(NO_TEST_DATA_MESSAGE)
1115 return None
1116
1117
1118 def _find_tool_data_table(runnables, test_data_dir, **kwds):
1119 tool_data_search_path = "."
1120 runnables = [r for r in runnables if r.has_tools]
1121 if len(runnables) > 0:
1122 tool_data_search_path = runnables[0].tool_data_search_path
1123
1124 tool_data_table = kwds.get("tool_data_table", None)
1125 if tool_data_table:
1126 return os.path.abspath(tool_data_table)
1127 else:
1128 extra_paths = [test_data_dir] if test_data_dir else []
1129 return _search_tool_path_for(
1130 tool_data_search_path,
1131 "tool_data_table_conf.xml.test",
1132 extra_paths,
1133 ) or _search_tool_path_for( # if all else fails just use sample
1134 tool_data_search_path,
1135 "tool_data_table_conf.xml.sample"
1136 )
1137
1138
1139 def _search_tool_path_for(path, target, extra_paths=None):
1140 """Check for presence of a target in different artifact directories."""
1141 if extra_paths is None:
1142 extra_paths = []
1143 if not os.path.isdir(path):
1144 tool_dir = os.path.dirname(path)
1145 else:
1146 tool_dir = path
1147 possible_dirs = [tool_dir, "."] + extra_paths
1148 for possible_dir in possible_dirs:
1149 possible_path = os.path.join(possible_dir, target)
1150 if os.path.exists(possible_path):
1151 return os.path.abspath(possible_path)
1152 return None
1153
1154
1155 def _configure_sheds_config_file(ctx, config_directory, **kwds):
1156 if "shed_target" not in kwds:
1157 kwds = kwds.copy()
1158 kwds["shed_target"] = "toolshed"
1159 shed_target_url = tool_shed_url(ctx, **kwds)
1160 contents = _sub(TOOL_SHEDS_CONF, {"shed_target_url": shed_target_url})
1161 tool_sheds_conf = os.path.join(config_directory, "tool_sheds_conf.xml")
1162 write_file(tool_sheds_conf, contents)
1163 return tool_sheds_conf
1164
1165
1166 def _tool_conf_entry_for(tool_paths):
1167 tool_definitions = ""
1168 for tool_path in tool_paths:
1169 if os.path.isdir(tool_path):
1170 tool_definitions += '''<tool_dir dir="%s" />''' % tool_path
1171 else:
1172 tool_definitions += '''<tool file="%s" />''' % tool_path
1173 return tool_definitions
1174
1175
1176 def _install_galaxy(ctx, galaxy_root, env, kwds):
1177 if not kwds.get("no_cache_galaxy", False):
1178 _install_galaxy_via_git(ctx, galaxy_root, env, kwds)
1179 else:
1180 _install_galaxy_via_download(ctx, galaxy_root, env, kwds)
1181
1182
1183 def _install_galaxy_via_download(ctx, galaxy_root, env, kwds):
1184 branch = _galaxy_branch(kwds)
1185 untar_to("https://codeload.github.com/galaxyproject/galaxy/tar.gz/" + branch, tar_args=['-xvzf', '-', 'galaxy-' + branch], dest_dir=galaxy_root)
1186 _install_with_command(ctx, galaxy_root, env, kwds)
1187
1188
1189 def _install_galaxy_via_git(ctx, galaxy_root, env, kwds):
1190 gx_repo = _ensure_galaxy_repository_available(ctx, kwds)
1191 branch = _galaxy_branch(kwds)
1192 command = git.command_clone(ctx, gx_repo, galaxy_root, branch=branch)
1193 exit_code = shell(command, env=env)
1194 if exit_code != 0:
1195 raise Exception("Failed to glone Galaxy via git")
1196 _install_with_command(ctx, galaxy_root, env, kwds)
1197
1198
1199 def _build_eggs_cache(ctx, env, kwds):
1200 if kwds.get("no_cache_galaxy", False):
1201 return None
1202 workspace = ctx.workspace
1203 eggs_path = os.path.join(workspace, "gx_eggs")
1204 if not os.path.exists(eggs_path):
1205 os.makedirs(eggs_path)
1206 env["GALAXY_EGGS_PATH"] = eggs_path
1207
1208
1209 def _galaxy_branch(kwds):
1210 branch = kwds.get("galaxy_branch", None)
1211 if branch is None:
1212 cwl = kwds.get("cwl", False)
1213 branch = "cwl-1.0" if cwl else None
1214 if branch is None:
1215 branch = DEFAULT_GALAXY_BRANCH
1216
1217 return branch
1218
1219
1220 def _galaxy_source(kwds):
1221 source = kwds.get("galaxy_source", None)
1222 if source is None:
1223 cwl = kwds.get("cwl", False)
1224 source = CWL_GALAXY_SOURCE if cwl else None
1225 if source is None:
1226 source = DEFAULT_GALAXY_SOURCE
1227
1228 return source
1229
1230
1231 def _install_with_command(ctx, galaxy_root, env, kwds):
1232 setup_venv_command = setup_venv(ctx, kwds)
1233 install_cmd = shell_join(
1234 setup_venv_command,
1235 setup_common_startup_args(),
1236 COMMAND_STARTUP_COMMAND,
1237 )
1238 exit_code = shell(install_cmd, cwd=galaxy_root, env=env)
1239 if exit_code != 0:
1240 raise Exception("Failed to install Galaxy via command [%s]" % install_cmd)
1241 if not os.path.exists(galaxy_root):
1242 raise Exception("Failed to create Galaxy directory [%s]" % galaxy_root)
1243 if not os.path.exists(os.path.join(galaxy_root, "lib")):
1244 raise Exception("Failed to create Galaxy directory [%s], lib missing" % galaxy_root)
1245
1246
1247 def _ensure_galaxy_repository_available(ctx, kwds):
1248 workspace = ctx.workspace
1249 cwl = kwds.get("cwl", False)
1250 galaxy_source = kwds.get('galaxy_source')
1251 if galaxy_source and galaxy_source != DEFAULT_GALAXY_SOURCE:
1252 sanitized_repo_name = "".join(c if c.isalnum() else '_' for c in kwds['galaxy_source']).rstrip()[:255]
1253 gx_repo = os.path.join(workspace, "gx_repo_%s" % sanitized_repo_name)
1254 else:
1255 gx_repo = os.path.join(workspace, "gx_repo")
1256 if cwl:
1257 gx_repo += "_cwl"
1258 if os.path.exists(gx_repo):
1259 # Convert the git repository from bare to mirror, if needed
1260 shell(['git', '--git-dir', gx_repo, 'config', 'remote.origin.fetch', '+refs/*:refs/*'])
1261 shell(['git', '--git-dir', gx_repo, 'config', 'remote.origin.mirror', 'true'])
1262 # Attempt remote update - but don't fail if not interweb, etc...
1263 shell("git --git-dir %s remote update >/dev/null 2>&1" % gx_repo)
1264 else:
1265 remote_repo = _galaxy_source(kwds)
1266 command = git.command_clone(ctx, remote_repo, gx_repo, mirror=True)
1267 shell(command)
1268 return gx_repo
1269
1270
1271 def _build_env_for_galaxy(properties, template_args):
1272 env = {}
1273 for key, value in iteritems(properties):
1274 if value is not None: # Do not override None with empty string
1275 var = "GALAXY_CONFIG_OVERRIDE_%s" % key.upper()
1276 value = _sub(value, template_args)
1277 env[var] = value
1278 return env
1279
1280
1281 def _build_test_env(properties, env):
1282 # Keeping these environment variables around for a little while but
1283 # many are probably not needed as of the following commit.
1284 # https://bitbucket.org/galaxy/galaxy-central/commits/d7dd1f9
1285 test_property_variants = {
1286 'GALAXY_TEST_JOB_CONFIG_FILE': 'job_config_file',
1287 'GALAXY_TEST_MIGRATED_TOOL_CONF': 'migrated_tools_config',
1288 'GALAXY_TEST_TOOL_CONF': 'tool_config_file',
1289 'GALAXY_TEST_FILE_DIR': 'test_data_dir',
1290 'GALAXY_TOOL_DEPENDENCY_DIR': 'tool_dependency_dir',
1291 # Next line would be required for tool shed tests.
1292 # 'GALAXY_TEST_TOOL_DEPENDENCY_DIR': 'tool_dependency_dir',
1293 }
1294 for test_key, gx_key in test_property_variants.items():
1295 value = properties.get(gx_key, None)
1296 if value is not None:
1297 env[test_key] = value
1298
1299
1300 def _handle_job_config_file(config_directory, server_name, kwds):
1301 job_config_file = kwds.get("job_config_file", None)
1302 if not job_config_file:
1303 template_str = JOB_CONFIG_LOCAL
1304 job_config_file = os.path.join(
1305 config_directory,
1306 "job_conf.xml",
1307 )
1308 docker_enable = str(kwds.get("docker", False))
1309 docker_host = kwds.get("docker_host", docker_util.DEFAULT_HOST)
1310 docker_host_param = ""
1311 if docker_host:
1312 docker_host_param = """<param id="docker_host">%s</param>""" % docker_host
1313
1314 conf_contents = Template(template_str).safe_substitute({
1315 "server_name": server_name,
1316 "docker_enable": docker_enable,
1317 "require_container": "false",
1318 "docker_sudo": str(kwds.get("docker_sudo", False)),
1319 "docker_sudo_cmd": str(kwds.get("docker_sudo_cmd", docker_util.DEFAULT_SUDO_COMMAND)),
1320 "docker_cmd": str(kwds.get("docker_cmd", docker_util.DEFAULT_DOCKER_COMMAND)),
1321 "docker_host_param": docker_host_param,
1322 })
1323 write_file(job_config_file, conf_contents)
1324 kwds["job_config_file"] = job_config_file
1325
1326
1327 def _write_tool_conf(ctx, tool_paths, tool_conf_path):
1328 tool_definition = _tool_conf_entry_for(tool_paths)
1329 tool_conf_template_kwds = dict(tool_definition=tool_definition)
1330 tool_conf_contents = _sub(TOOL_CONF_TEMPLATE, tool_conf_template_kwds)
1331 write_file(tool_conf_path, tool_conf_contents)
1332 ctx.vlog(
1333 "Writing tool_conf to path %s with contents [%s]",
1334 tool_conf_path,
1335 tool_conf_contents,
1336 )
1337
1338
1339 def _handle_container_resolution(ctx, kwds, galaxy_properties):
1340 if kwds.get("mulled_containers", False):
1341 galaxy_properties["enable_beta_mulled_containers"] = "True"
1342 involucro_context = build_involucro_context(ctx, **kwds)
1343 galaxy_properties["involucro_auto_init"] = "False" # Use planemo's
1344 galaxy_properties["involucro_path"] = involucro_context.involucro_bin
1345
1346
1347 def _handle_job_metrics(config_directory, kwds):
1348 metrics_conf = os.path.join(config_directory, "job_metrics_conf.xml")
1349 with open(metrics_conf, "w") as fh:
1350 fh.write(EMPTY_JOB_METRICS_TEMPLATE)
1351 kwds["job_metrics_config_file"] = metrics_conf
1352
1353
1354 def _handle_refgenie_config(config_directory, kwds):
1355 refgenie_dir = os.path.join(config_directory, 'refgenie')
1356 _ensure_directory(refgenie_dir)
1357 refgenie_config = os.path.join(refgenie_dir, "genome_config.yaml")
1358 with open(refgenie_config, "w") as fh:
1359 fh.write(REFGENIE_CONFIG_TEMPLATE % (refgenie_dir))
1360 kwds["refgenie_config_file"] = refgenie_config
1361
1362
1363 def _handle_kwd_overrides(properties, kwds):
1364 kwds_gx_properties = [
1365 'job_config_file',
1366 'job_metrics_config_file',
1367 'dependency_resolvers_config_file',
1368 ]
1369 for prop in kwds_gx_properties:
1370 val = kwds.get(prop, None)
1371 if val:
1372 properties[prop] = val
1373
1374
1375 def _sub(template, args):
1376 if template is None:
1377 return ''
1378 return Template(template).safe_substitute(args)
1379
1380
1381 def _ensure_directory(path):
1382 if path is not None and not os.path.exists(path):
1383 os.makedirs(path)
1384
1385
1386 __all__ = (
1387 "DATABASE_LOCATION_TEMPLATE",
1388 "galaxy_config",
1389 )