Mercurial > repos > shellac > sam_consensus_v3
comparison env/lib/python3.9/site-packages/bioblend/galaxy/tools/__init__.py @ 0:4f3585e2f14b draft default tip
"planemo upload commit 60cee0fc7c0cda8592644e1aad72851dec82c959"
author | shellac |
---|---|
date | Mon, 22 Mar 2021 18:12:50 +0000 |
parents | |
children |
comparison
equal
deleted
inserted
replaced
-1:000000000000 | 0:4f3585e2f14b |
---|---|
1 """ | |
2 Contains possible interaction dealing with Galaxy tools. | |
3 """ | |
4 from os.path import basename | |
5 | |
6 from bioblend.galaxy.client import Client | |
7 from bioblend.util import attach_file | |
8 | |
9 | |
10 class ToolClient(Client): | |
11 | |
12 def __init__(self, galaxy_instance): | |
13 self.module = 'tools' | |
14 super().__init__(galaxy_instance) | |
15 | |
16 def get_tools(self, tool_id=None, name=None, trackster=None): | |
17 """ | |
18 Get all tools or filter the specific one(s) via the provided ``name`` | |
19 or ``tool_id``. Provide only one argument, ``name`` or ``tool_id``, | |
20 but not both. | |
21 | |
22 If ``name`` is set and multiple names match the given name, all the | |
23 tools matching the argument will be returned. | |
24 | |
25 :type tool_id: str | |
26 :param tool_id: id of the requested tool | |
27 | |
28 :type name: str | |
29 :param name: name of the requested tool(s) | |
30 | |
31 :type trackster: bool | |
32 :param trackster: whether to return only tools that are compatible with | |
33 Trackster | |
34 | |
35 :rtype: list | |
36 :return: List of tool descriptions. | |
37 | |
38 .. seealso:: bioblend.galaxy.toolshed.get_repositories() | |
39 """ | |
40 if tool_id is not None and name is not None: | |
41 raise ValueError('Provide only one argument between name or tool_id, but not both') | |
42 tools = self._raw_get_tool(in_panel=False, trackster=trackster) | |
43 if tool_id is not None: | |
44 tool = next((_ for _ in tools if _['id'] == tool_id), None) | |
45 tools = [tool] if tool is not None else [] | |
46 elif name is not None: | |
47 tools = [_ for _ in tools if _['name'] == name] | |
48 return tools | |
49 | |
50 def get_tool_panel(self): | |
51 """ | |
52 Get a list of available tool elements in Galaxy's configured toolbox. | |
53 | |
54 :rtype: list | |
55 :return: List containing tools (if not in sections) or tool sections | |
56 with nested tool descriptions. | |
57 | |
58 .. seealso:: bioblend.galaxy.toolshed.get_repositories() | |
59 """ | |
60 return self._raw_get_tool(in_panel=True) | |
61 | |
62 def _raw_get_tool(self, in_panel=None, trackster=None): | |
63 params = {} | |
64 params['in_panel'] = in_panel | |
65 params['trackster'] = trackster | |
66 return self._get(params=params) | |
67 | |
68 def requirements(self, tool_id): | |
69 """ | |
70 Return the resolver status for a specific tool. | |
71 This functionality is available only to Galaxy admins. | |
72 | |
73 :type tool_id: str | |
74 :param tool_id: id of the requested tool | |
75 | |
76 :rtype: list | |
77 :return: List containing a resolver status dict for each tool | |
78 requirement. For example:: | |
79 | |
80 [{'cacheable': False, | |
81 'dependency_resolver': {'auto_init': True, | |
82 'auto_install': False, | |
83 'can_uninstall_dependencies': True, | |
84 'ensure_channels': 'iuc,conda-forge,bioconda,defaults', | |
85 'model_class': 'CondaDependencyResolver', | |
86 'prefix': '/mnt/galaxy/tool_dependencies/_conda', | |
87 'resolver_type': 'conda', | |
88 'resolves_simple_dependencies': True, | |
89 'use_local': False, | |
90 'versionless': False}, | |
91 'dependency_type': 'conda', | |
92 'environment_path': '/mnt/galaxy/tool_dependencies/_conda/envs/__blast@2.10.1', | |
93 'exact': True, | |
94 'model_class': 'MergedCondaDependency', | |
95 'name': 'blast', | |
96 'version': '2.10.1'}] | |
97 """ | |
98 url = self._make_url(tool_id) + '/requirements' | |
99 return self._get(url=url) | |
100 | |
101 def install_dependencies(self, tool_id): | |
102 """ | |
103 Install dependencies for a given tool via a resolver. | |
104 This works only for Conda currently. | |
105 This functionality is available only to Galaxy admins. | |
106 | |
107 :type tool_id: str | |
108 :param tool_id: id of the requested tool | |
109 | |
110 :rtype: dict | |
111 :return: Tool requirement status | |
112 """ | |
113 url = self._make_url(tool_id) + '/install_dependencies' | |
114 return self._post(payload={}, url=url) | |
115 | |
116 def show_tool(self, tool_id, io_details=False, link_details=False): | |
117 """ | |
118 Get details of a given tool. | |
119 | |
120 :type tool_id: str | |
121 :param tool_id: id of the requested tool | |
122 | |
123 :type io_details: bool | |
124 :param io_details: whether to get also input and output details | |
125 | |
126 :type link_details: bool | |
127 :param link_details: whether to get also link details | |
128 | |
129 :rtype: dict | |
130 :return: Information about the tool's interface | |
131 """ | |
132 params = {} | |
133 params['io_details'] = io_details | |
134 params['link_details'] = link_details | |
135 return self._get(id=tool_id, params=params) | |
136 | |
137 def run_tool(self, history_id, tool_id, tool_inputs, input_format='legacy'): | |
138 """ | |
139 Runs tool specified by ``tool_id`` in history indicated | |
140 by ``history_id`` with inputs from ``dict`` ``tool_inputs``. | |
141 | |
142 :type history_id: str | |
143 :param history_id: encoded ID of the history in which to run the tool | |
144 | |
145 :type tool_id: str | |
146 :param tool_id: ID of the tool to be run | |
147 | |
148 :type tool_inputs: dict | |
149 :param tool_inputs: dictionary of input datasets and parameters | |
150 for the tool (see below) | |
151 | |
152 :type input_format: string | |
153 :param input_format: input format for the payload. Possible values are the | |
154 default 'legacy' (where inputs nested inside conditionals | |
155 or repeats are identified with e.g. '<conditional_name>|<input_name>') | |
156 or '21.01' (where inputs inside conditionals or repeats are nested elements). | |
157 | |
158 :rtype: dict | |
159 :return: Information about outputs and job | |
160 For example:: | |
161 | |
162 {'implicit_collections': [], | |
163 'jobs': [{'create_time': '2019-05-08T12:26:16.067372', | |
164 'exit_code': None, | |
165 'id': '7dd125b61b35d782', | |
166 'model_class': 'Job', | |
167 'state': 'new', | |
168 'tool_id': 'cut1', | |
169 'update_time': '2019-05-08T12:26:16.067389'}], | |
170 'output_collections': [], | |
171 'outputs': [{'create_time': '2019-05-08T12:26:15.997739', | |
172 'data_type': 'galaxy.datatypes.tabular.Tabular', | |
173 'deleted': False, | |
174 'file_ext': 'tabular', | |
175 'file_size': 0, | |
176 'genome_build': '?', | |
177 'hda_ldda': 'hda', | |
178 'hid': 42, | |
179 'history_content_type': 'dataset', | |
180 'history_id': 'df8fe5ddadbf3ab1', | |
181 'id': 'aeb65580396167f3', | |
182 'metadata_column_names': None, | |
183 'metadata_column_types': None, | |
184 'metadata_columns': None, | |
185 'metadata_comment_lines': None, | |
186 'metadata_data_lines': None, | |
187 'metadata_dbkey': '?', | |
188 'metadata_delimiter': '\t', | |
189 'misc_blurb': 'queued', | |
190 'misc_info': None, | |
191 'model_class': 'HistoryDatasetAssociation', | |
192 'name': 'Cut on data 1', | |
193 'output_name': 'out_file1', | |
194 'peek': None, | |
195 'purged': False, | |
196 'state': 'new', | |
197 'tags': [], | |
198 'update_time': '2019-05-08T12:26:16.069798', | |
199 'uuid': 'd91d10af-7546-45be-baa9-902010661466', | |
200 'visible': True}]} | |
201 | |
202 The ``tool_inputs`` dict should contain input datasets and parameters | |
203 in the (largely undocumented) format used by the Galaxy API. | |
204 Some examples can be found in `Galaxy's API test suite | |
205 <https://github.com/galaxyproject/galaxy/blob/dev/lib/galaxy_test/api/test_tools.py>`_. | |
206 """ | |
207 payload = {} | |
208 payload["history_id"] = history_id | |
209 payload["tool_id"] = tool_id | |
210 payload["input_format"] = input_format | |
211 try: | |
212 payload["inputs"] = tool_inputs.to_dict() | |
213 except AttributeError: | |
214 payload["inputs"] = tool_inputs | |
215 return self._post(payload) | |
216 | |
217 def upload_file(self, path, history_id, **keywords): | |
218 """ | |
219 Upload the file specified by ``path`` to the history specified by | |
220 ``history_id``. | |
221 | |
222 :type path: str | |
223 :param path: path of the file to upload | |
224 | |
225 :type history_id: str | |
226 :param history_id: id of the history where to upload the file | |
227 | |
228 :type file_name: str | |
229 :param file_name: (optional) name of the new history dataset | |
230 | |
231 :type file_type: str | |
232 :param file_type: (optional) Galaxy datatype for the new dataset, default is auto | |
233 | |
234 :type dbkey: str | |
235 :param dbkey: (optional) genome dbkey | |
236 | |
237 :type to_posix_lines: bool | |
238 :param to_posix_lines: if ``True`` (the default), convert universal line | |
239 endings to POSIX line endings. Set to ``False`` when uploading a gzip, | |
240 bz2 or zip archive containing a binary file | |
241 | |
242 :type space_to_tab: bool | |
243 :param space_to_tab: whether to convert spaces to tabs. Default is | |
244 ``False``. Applicable only if to_posix_lines is ``True`` | |
245 | |
246 :rtype: dict | |
247 :return: Information about the created upload job | |
248 """ | |
249 if "file_name" not in keywords: | |
250 keywords["file_name"] = basename(path) | |
251 payload = self._upload_payload(history_id, **keywords) | |
252 payload["files_0|file_data"] = attach_file(path, name=keywords["file_name"]) | |
253 try: | |
254 return self._post(payload, files_attached=True) | |
255 finally: | |
256 payload["files_0|file_data"].close() | |
257 | |
258 def upload_from_ftp(self, path, history_id, **keywords): | |
259 """ | |
260 Upload the file specified by ``path`` from the user's FTP directory to | |
261 the history specified by ``history_id``. | |
262 | |
263 :type path: str | |
264 :param path: path of the file in the user's FTP directory | |
265 | |
266 :type history_id: str | |
267 :param history_id: id of the history where to upload the file | |
268 | |
269 See :meth:`upload_file` for the optional parameters. | |
270 | |
271 :rtype: dict | |
272 :return: Information about the created upload job | |
273 """ | |
274 payload = self._upload_payload(history_id, **keywords) | |
275 payload['files_0|ftp_files'] = path | |
276 return self._post(payload) | |
277 | |
278 def paste_content(self, content, history_id, **kwds): | |
279 """ | |
280 Upload a string to a new dataset in the history specified by | |
281 ``history_id``. | |
282 | |
283 :type content: str | |
284 :param content: content of the new dataset to upload or a list of URLs | |
285 (one per line) to upload | |
286 | |
287 :type history_id: str | |
288 :param history_id: id of the history where to upload the content | |
289 | |
290 :rtype: dict | |
291 :return: Information about the created upload job | |
292 | |
293 See :meth:`upload_file` for the optional parameters. | |
294 """ | |
295 payload = self._upload_payload(history_id, **kwds) | |
296 payload["files_0|url_paste"] = content | |
297 return self._post(payload, files_attached=False) | |
298 | |
299 put_url = paste_content | |
300 | |
301 def _upload_payload(self, history_id, **keywords): | |
302 payload = {} | |
303 payload["history_id"] = history_id | |
304 payload["tool_id"] = keywords.get("tool_id", "upload1") | |
305 tool_input = {} | |
306 tool_input["file_type"] = keywords.get('file_type', 'auto') | |
307 tool_input["dbkey"] = keywords.get("dbkey", "?") | |
308 if not keywords.get('to_posix_lines', True): | |
309 tool_input['files_0|to_posix_lines'] = False | |
310 elif keywords.get('space_to_tab', False): | |
311 tool_input['files_0|space_to_tab'] = 'Yes' | |
312 if 'file_name' in keywords: | |
313 tool_input["files_0|NAME"] = keywords['file_name'] | |
314 tool_input["files_0|type"] = "upload_dataset" | |
315 payload["inputs"] = tool_input | |
316 return payload |