comparison env/lib/python3.9/site-packages/bioblend/galaxy/workflows/__init__.py @ 0:4f3585e2f14b draft default tip

"planemo upload commit 60cee0fc7c0cda8592644e1aad72851dec82c959"
author shellac
date Mon, 22 Mar 2021 18:12:50 +0000
parents
children
comparison
equal deleted inserted replaced
-1:000000000000 0:4f3585e2f14b
1 """
2 Contains possible interactions with the Galaxy Workflows
3 """
4 import json
5 import os
6
7 from bioblend.galaxy.client import Client
8
9
10 class WorkflowClient(Client):
11 def __init__(self, galaxy_instance):
12 self.module = 'workflows'
13 super().__init__(galaxy_instance)
14
15 # the 'deleted' option is not available for workflows
16 def get_workflows(self, workflow_id=None, name=None, published=False):
17 """
18 Get all workflows or filter the specific one(s) via the provided ``name``
19 or ``workflow_id``. Provide only one argument, ``name`` or ``workflow_id``,
20 but not both.
21
22 :type workflow_id: str
23 :param workflow_id: Encoded workflow ID (incompatible with ``name``)
24
25 :type name: str
26 :param name: Filter by name of workflow (incompatible with
27 ``workflow_id``). If multiple names match the given name, all the
28 workflows matching the argument will be returned.
29
30 :type published: bool
31 :param published: if ``True``, return also published workflows
32
33 :rtype: list
34 :return: A list of workflow dicts.
35 For example::
36
37 [{'id': '92c56938c2f9b315',
38 'name': 'Simple',
39 'url': '/api/workflows/92c56938c2f9b315'}]
40
41 """
42 if workflow_id is not None and name is not None:
43 raise ValueError('Provide only one argument between name or workflow_id, but not both')
44 params = {}
45 if published:
46 params['show_published'] = True
47 workflows = self._get(params=params)
48 if workflow_id is not None:
49 workflow = next((_ for _ in workflows if _['id'] == workflow_id), None)
50 workflows = [workflow] if workflow is not None else []
51 elif name is not None:
52 workflows = [_ for _ in workflows if _['name'] == name]
53 return workflows
54
55 def show_workflow(self, workflow_id, version=None):
56 """
57 Display information needed to run a workflow.
58
59 :type workflow_id: str
60 :param workflow_id: Encoded workflow ID
61
62 :type version: int
63 :param version: Workflow version to show
64
65 :rtype: dict
66 :return: A description of the workflow and its inputs.
67 For example::
68
69 {'id': '92c56938c2f9b315',
70 'inputs': {'23': {'label': 'Input Dataset', 'value': ''}},
71 'name': 'Simple',
72 'url': '/api/workflows/92c56938c2f9b315'}
73 """
74 params = {}
75 if version is not None:
76 params['version'] = version
77
78 return self._get(id=workflow_id, params=params)
79
80 def get_workflow_inputs(self, workflow_id, label):
81 """
82 Get a list of workflow input IDs that match the given label.
83 If no input matches the given label, an empty list is returned.
84
85 :type workflow_id: str
86 :param workflow_id: Encoded workflow ID
87
88 :type label: str
89 :param label: label to filter workflow inputs on
90
91 :rtype: list
92 :return: list of workflow inputs matching the label query
93 """
94 wf = self._get(id=workflow_id)
95 inputs = wf['inputs']
96 return [id for id in inputs if inputs[id]['label'] == label]
97
98 def import_workflow_dict(self, workflow_dict, publish=False):
99 """
100 Imports a new workflow given a dictionary representing a previously
101 exported workflow.
102
103 :type workflow_dict: dict
104 :param workflow_dict: dictionary representing the workflow to be imported
105
106 :type publish: bool
107 :param publish: if ``True`` the uploaded workflow will be published;
108 otherwise it will be visible only by the user which uploads it (default)
109
110 :rtype: dict
111 :return: Information about the imported workflow.
112 For example::
113
114 {'name': 'Training: 16S rRNA sequencing with mothur: main tutorial',
115 'tags': [],
116 'deleted': false,
117 'latest_workflow_uuid': '368c6165-ccbe-4945-8a3c-d27982206d66',
118 'url': '/api/workflows/94bac0a90086bdcf',
119 'number_of_steps': 44,
120 'published': false,
121 'owner': 'jane-doe',
122 'model_class': 'StoredWorkflow',
123 'id': '94bac0a90086bdcf'}
124 """
125 payload = {'workflow': workflow_dict, 'publish': publish}
126
127 url = self._make_url() + "/upload"
128 return self._post(url=url, payload=payload)
129
130 def import_workflow_from_local_path(self, file_local_path, publish=False):
131 """
132 Imports a new workflow given the path to a file containing a previously
133 exported workflow.
134
135 :type file_local_path: str
136 :param file_local_path: File to upload to the server for new workflow
137
138 :type publish: bool
139 :param publish: if ``True`` the uploaded workflow will be published;
140 otherwise it will be visible only by the user which uploads it (default)
141
142 :rtype: dict
143 :return: Information about the imported workflow.
144 For example::
145
146 {'name': 'Training: 16S rRNA sequencing with mothur: main tutorial',
147 'tags': [],
148 'deleted': false,
149 'latest_workflow_uuid': '368c6165-ccbe-4945-8a3c-d27982206d66',
150 'url': '/api/workflows/94bac0a90086bdcf',
151 'number_of_steps': 44,
152 'published': false,
153 'owner': 'jane-doe',
154 'model_class': 'StoredWorkflow',
155 'id': '94bac0a90086bdcf'}
156
157 """
158 with open(file_local_path) as fp:
159 workflow_json = json.load(fp)
160
161 return self.import_workflow_dict(workflow_json, publish)
162
163 def import_shared_workflow(self, workflow_id):
164 """
165 Imports a new workflow from the shared published workflows.
166
167 :type workflow_id: str
168 :param workflow_id: Encoded workflow ID
169
170 :rtype: dict
171 :return: A description of the workflow.
172 For example::
173
174 {'id': 'ee0e2b4b696d9092',
175 'model_class': 'StoredWorkflow',
176 'name': 'Super workflow that solves everything!',
177 'published': False,
178 'tags': [],
179 'url': '/api/workflows/ee0e2b4b696d9092'}
180 """
181 payload = {'shared_workflow_id': workflow_id}
182 url = self._make_url()
183 return self._post(url=url, payload=payload)
184
185 def export_workflow_dict(self, workflow_id, version=None):
186 """
187 Exports a workflow.
188
189 :type workflow_id: str
190 :param workflow_id: Encoded workflow ID
191
192 :type version: int
193 :param version: Workflow version to export
194
195 :rtype: dict
196 :return: Dictionary representing the requested workflow
197 """
198 params = {}
199 if version is not None:
200 params['version'] = version
201
202 url = '/'.join((self._make_url(), 'download', workflow_id))
203 return self._get(url=url, params=params)
204
205 def export_workflow_to_local_path(self, workflow_id, file_local_path, use_default_filename=True):
206 """
207 Exports a workflow in JSON format to a given local path.
208
209 :type workflow_id: str
210 :param workflow_id: Encoded workflow ID
211
212 :type file_local_path: str
213 :param file_local_path: Local path to which the exported file will be saved.
214 (Should not contain filename if use_default_name=True)
215
216 :type use_default_filename: bool
217 :param use_default_filename: If the use_default_name parameter is True, the exported
218 file will be saved as file_local_path/Galaxy-Workflow-%s.ga, where %s
219 is the workflow name. If use_default_name is False, file_local_path
220 is assumed to contain the full file path including filename.
221
222 :rtype: None
223 :return: None
224 """
225 workflow_dict = self.export_workflow_dict(workflow_id)
226
227 if use_default_filename:
228 filename = 'Galaxy-Workflow-%s.ga' % workflow_dict['name']
229 file_local_path = os.path.join(file_local_path, filename)
230
231 with open(file_local_path, 'w') as fp:
232 json.dump(workflow_dict, fp)
233
234 def update_workflow(self, workflow_id, **kwds):
235 """
236 Update a given workflow.
237
238 :type workflow_id: str
239 :param workflow_id: Encoded workflow ID
240
241 :type workflow: dict
242 :param workflow: dictionary representing the workflow to be updated
243
244 :type name: str
245 :param name: New name of the workflow
246
247 :type annotation: str
248 :param annotation: New annotation for the workflow
249
250 :type menu_entry: bool
251 :param menu_entry: Whether the workflow should appear in the user's menu
252
253 :type tags: list of str
254 :param tags: Replace workflow tags with the given list
255
256 :rtype: dict
257 :return: Dictionary representing the updated workflow
258 """
259 return self._put(payload=kwds, id=workflow_id)
260
261 def run_workflow(self, workflow_id, dataset_map=None, params=None,
262 history_id=None, history_name=None,
263 import_inputs_to_history=False, replacement_params=None):
264 """
265 Run the workflow identified by ``workflow_id``.
266
267 .. deprecated:: 0.7.0
268 Use :meth:`invoke_workflow` instead.
269
270 :type workflow_id: str
271 :param workflow_id: Encoded workflow ID
272
273 :type dataset_map: dict
274 :param dataset_map: A mapping of workflow inputs to datasets. The datasets
275 source can be a LibraryDatasetDatasetAssociation (``ldda``),
276 LibraryDataset (``ld``), or HistoryDatasetAssociation (``hda``).
277 The map must be in the following format:
278 ``{'<input>': {'id': <encoded dataset ID>, 'src': '[ldda, ld, hda]'}}``
279 (e.g. ``{'23': {'id': '29beef4fadeed09f', 'src': 'ld'}}``)
280
281 :type params: dict
282 :param params: A mapping of non-datasets tool parameters (see below)
283
284 :type history_id: str
285 :param history_id: The encoded history ID where to store the workflow
286 output. Alternatively, ``history_name`` may be specified to create a
287 new history.
288
289 :type history_name: str
290 :param history_name: Create a new history with the given name to store
291 the workflow output. If both ``history_id`` and ``history_name`` are
292 provided, ``history_name`` is ignored. If neither is specified, a new
293 'Unnamed history' is created.
294
295 :type import_inputs_to_history: bool
296 :param import_inputs_to_history: If ``True``, used workflow inputs will be imported
297 into the history. If ``False``, only workflow outputs
298 will be visible in the given history.
299
300 :type replacement_params: dict
301 :param replacement_params: pattern-based replacements for post-job actions (see below)
302
303 :rtype: dict
304 :return: A dict containing the history ID where the outputs are placed
305 as well as output dataset IDs. For example::
306
307 {'history': '64177123325c9cfd',
308 'outputs': ['aa4d3084af404259']}
309
310 The ``params`` dict should be specified as follows::
311
312 {STEP_ID: PARAM_DICT, ...}
313
314 where PARAM_DICT is::
315
316 {PARAM_NAME: VALUE, ...}
317
318 For backwards compatibility, the following (deprecated) format is
319 also supported for ``params``::
320
321 {TOOL_ID: PARAM_DICT, ...}
322
323 in which case PARAM_DICT affects all steps with the given tool id.
324 If both by-tool-id and by-step-id specifications are used, the
325 latter takes precedence.
326
327 Finally (again, for backwards compatibility), PARAM_DICT can also
328 be specified as::
329
330 {'param': PARAM_NAME, 'value': VALUE}
331
332 Note that this format allows only one parameter to be set per step.
333
334 The ``replacement_params`` dict should map parameter names in
335 post-job actions (PJAs) to their runtime values. For
336 instance, if the final step has a PJA like the following::
337
338 {'RenameDatasetActionout_file1': {'action_arguments': {'newname': '${output}'},
339 'action_type': 'RenameDatasetAction',
340 'output_name': 'out_file1'}}
341
342 then the following renames the output dataset to 'foo'::
343
344 replacement_params = {'output': 'foo'}
345
346 see also `this email thread
347 <http://lists.bx.psu.edu/pipermail/galaxy-dev/2011-September/006875.html>`_.
348
349 .. warning::
350 This method waits for the whole workflow to be scheduled before
351 returning and does not scale to large workflows as a result. This
352 method has therefore been deprecated in favor of
353 :meth:`invoke_workflow`, which also features improved default
354 behavior for dataset input handling.
355 """
356 payload = {'workflow_id': workflow_id}
357 if dataset_map:
358 payload['ds_map'] = dataset_map
359
360 if params:
361 payload['parameters'] = params
362
363 if replacement_params:
364 payload['replacement_params'] = replacement_params
365
366 if history_id:
367 payload['history'] = f'hist_id={history_id}'
368 elif history_name:
369 payload['history'] = history_name
370 if import_inputs_to_history is False:
371 payload['no_add_to_history'] = True
372 return self._post(payload)
373
374 def invoke_workflow(self, workflow_id, inputs=None, params=None,
375 history_id=None, history_name=None,
376 import_inputs_to_history=False, replacement_params=None,
377 allow_tool_state_corrections=None, inputs_by=None):
378 """
379 Invoke the workflow identified by ``workflow_id``. This will
380 cause a workflow to be scheduled and return an object describing
381 the workflow invocation.
382
383 :type workflow_id: str
384 :param workflow_id: Encoded workflow ID
385
386 :type inputs: dict
387 :param inputs: A mapping of workflow inputs to datasets and dataset collections.
388 The datasets source can be a LibraryDatasetDatasetAssociation (``ldda``),
389 LibraryDataset (``ld``), HistoryDatasetAssociation (``hda``), or
390 HistoryDatasetCollectionAssociation (``hdca``).
391
392 The map must be in the following format:
393 ``{'<input_index>': {'id': <encoded dataset ID>, 'src': '[ldda, ld, hda, hdca]'}}``
394 (e.g. ``{'2': {'id': '29beef4fadeed09f', 'src': 'hda'}}``)
395
396 This map may also be indexed by the UUIDs of the workflow steps,
397 as indicated by the ``uuid`` property of steps returned from the
398 Galaxy API. Alternatively workflow steps may be addressed by
399 the label that can be set in the workflow editor. If using
400 uuid or label you need to also set the ``inputs_by`` parameter
401 to ``step_uuid`` or ``name``.
402
403 :type params: dict
404 :param params: A mapping of non-datasets tool parameters (see below)
405
406 :type history_id: str
407 :param history_id: The encoded history ID where to store the workflow
408 output. Alternatively, ``history_name`` may be specified to create a
409 new history.
410
411 :type history_name: str
412 :param history_name: Create a new history with the given name to store
413 the workflow output. If both ``history_id`` and ``history_name`` are
414 provided, ``history_name`` is ignored. If neither is specified, a new
415 'Unnamed history' is created.
416
417 :type import_inputs_to_history: bool
418 :param import_inputs_to_history: If ``True``, used workflow inputs will
419 be imported into the history. If ``False``, only workflow outputs will
420 be visible in the given history.
421
422 :type allow_tool_state_corrections: bool
423 :param allow_tool_state_corrections: If True, allow Galaxy to fill in
424 missing tool state when running workflows. This may be useful for
425 workflows using tools that have changed over time or for workflows
426 built outside of Galaxy with only a subset of inputs defined.
427
428 :type replacement_params: dict
429 :param replacement_params: pattern-based replacements for post-job
430 actions (see below)
431
432 :type inputs_by: str
433 :param inputs_by: Determines how inputs are referenced. Can be
434 "step_index|step_uuid" (default), "step_index", "step_id", "step_uuid", or "name".
435
436 :rtype: dict
437 :return: A dict containing the workflow invocation describing the
438 scheduling of the workflow. For example::
439
440 {'history_id': '2f94e8ae9edff68a',
441 'id': 'df7a1f0c02a5b08e',
442 'inputs': {'0': {'id': 'a7db2fac67043c7e',
443 'src': 'hda',
444 'uuid': '7932ffe0-2340-4952-8857-dbaa50f1f46a'}},
445 'model_class': 'WorkflowInvocation',
446 'state': 'ready',
447 'steps': [{'action': None,
448 'id': 'd413a19dec13d11e',
449 'job_id': None,
450 'model_class': 'WorkflowInvocationStep',
451 'order_index': 0,
452 'state': None,
453 'update_time': '2015-10-31T22:00:26',
454 'workflow_step_id': 'cbbbf59e8f08c98c',
455 'workflow_step_label': None,
456 'workflow_step_uuid': 'b81250fd-3278-4e6a-b269-56a1f01ef485'},
457 {'action': None,
458 'id': '2f94e8ae9edff68a',
459 'job_id': 'e89067bb68bee7a0',
460 'model_class': 'WorkflowInvocationStep',
461 'order_index': 1,
462 'state': 'new',
463 'update_time': '2015-10-31T22:00:26',
464 'workflow_step_id': '964b37715ec9bd22',
465 'workflow_step_label': None,
466 'workflow_step_uuid': 'e62440b8-e911-408b-b124-e05435d3125e'}],
467 'update_time': '2015-10-31T22:00:26',
468 'uuid': 'c8aa2b1c-801a-11e5-a9e5-8ca98228593c',
469 'workflow_id': '03501d7626bd192f'}
470
471 The ``params`` dict should be specified as follows::
472
473 {STEP_ID: PARAM_DICT, ...}
474
475 where PARAM_DICT is::
476
477 {PARAM_NAME: VALUE, ...}
478
479 For backwards compatibility, the following (deprecated) format is
480 also supported for ``params``::
481
482 {TOOL_ID: PARAM_DICT, ...}
483
484 in which case PARAM_DICT affects all steps with the given tool id.
485 If both by-tool-id and by-step-id specifications are used, the
486 latter takes precedence.
487
488 Finally (again, for backwards compatibility), PARAM_DICT can also
489 be specified as::
490
491 {'param': PARAM_NAME, 'value': VALUE}
492
493 Note that this format allows only one parameter to be set per step.
494
495 For a ``repeat`` parameter, the names of the contained parameters needs
496 to be specified as ``<repeat name>_<repeat index>|<param name>``, with
497 the repeat index starting at 0. For example, if the tool XML contains::
498
499 <repeat name="cutoff" title="Parameters used to filter cells" min="1">
500 <param name="name" type="text" value="n_genes" label="Name of param...">
501 <option value="n_genes">n_genes</option>
502 <option value="n_counts">n_counts</option>
503 </param>
504 <param name="min" type="float" min="0" value="0" label="Min value"/>
505 </repeat>
506
507 then the PARAM_DICT should be something like::
508
509 {...
510 "cutoff_0|name": "n_genes",
511 "cutoff_0|min": "2",
512 "cutoff_1|name": "n_counts",
513 "cutoff_1|min": "4",
514 ...}
515
516 At the time of this writing, it is not possible to change the number of
517 times the contained parameters are repeated. Therefore, the parameter
518 indexes can go from 0 to n-1, where n is the number of times the
519 repeated element was added when the workflow was saved in the Galaxy UI.
520
521 The ``replacement_params`` dict should map parameter names in
522 post-job actions (PJAs) to their runtime values. For
523 instance, if the final step has a PJA like the following::
524
525 {'RenameDatasetActionout_file1': {'action_arguments': {'newname': '${output}'},
526 'action_type': 'RenameDatasetAction',
527 'output_name': 'out_file1'}}
528
529 then the following renames the output dataset to 'foo'::
530
531 replacement_params = {'output': 'foo'}
532
533 see also `this email thread
534 <http://lists.bx.psu.edu/pipermail/galaxy-dev/2011-September/006875.html>`_.
535
536 .. warning::
537 Historically, the ``run_workflow`` method consumed a ``dataset_map``
538 data structure that was indexed by unencoded workflow step IDs. These
539 IDs would not be stable across Galaxy instances. The new ``inputs``
540 property is instead indexed by either the ``order_index`` property
541 (which is stable across workflow imports) or the step UUID which is
542 also stable.
543 """
544 payload = {'workflow_id': workflow_id}
545 if inputs:
546 payload['inputs'] = inputs
547
548 if params:
549 payload['parameters'] = params
550
551 if replacement_params:
552 payload['replacement_params'] = replacement_params
553
554 if history_id:
555 payload['history'] = f'hist_id={history_id}'
556 elif history_name:
557 payload['history'] = history_name
558 if import_inputs_to_history is False:
559 payload['no_add_to_history'] = True
560 if allow_tool_state_corrections is not None:
561 payload['allow_tool_state_corrections'] = allow_tool_state_corrections
562 if inputs_by is not None:
563 payload['inputs_by'] = inputs_by
564 url = self._invocations_url(workflow_id)
565 return self._post(payload, url=url)
566
567 def show_invocation(self, workflow_id, invocation_id):
568 """
569 Get a workflow invocation object representing the scheduling of a
570 workflow. This object may be sparse at first (missing inputs and
571 invocation steps) and will become more populated as the workflow is
572 actually scheduled.
573
574 :type workflow_id: str
575 :param workflow_id: Encoded workflow ID
576
577 :type invocation_id: str
578 :param invocation_id: Encoded workflow invocation ID
579
580 :rtype: dict
581 :return: The workflow invocation.
582 For example::
583
584 {'history_id': '2f94e8ae9edff68a',
585 'id': 'df7a1f0c02a5b08e',
586 'inputs': {'0': {'id': 'a7db2fac67043c7e',
587 'src': 'hda',
588 'uuid': '7932ffe0-2340-4952-8857-dbaa50f1f46a'}},
589 'model_class': 'WorkflowInvocation',
590 'state': 'ready',
591 'steps': [{'action': None,
592 'id': 'd413a19dec13d11e',
593 'job_id': None,
594 'model_class': 'WorkflowInvocationStep',
595 'order_index': 0,
596 'state': None,
597 'update_time': '2015-10-31T22:00:26',
598 'workflow_step_id': 'cbbbf59e8f08c98c',
599 'workflow_step_label': None,
600 'workflow_step_uuid': 'b81250fd-3278-4e6a-b269-56a1f01ef485'},
601 {'action': None,
602 'id': '2f94e8ae9edff68a',
603 'job_id': 'e89067bb68bee7a0',
604 'model_class': 'WorkflowInvocationStep',
605 'order_index': 1,
606 'state': 'new',
607 'update_time': '2015-10-31T22:00:26',
608 'workflow_step_id': '964b37715ec9bd22',
609 'workflow_step_label': None,
610 'workflow_step_uuid': 'e62440b8-e911-408b-b124-e05435d3125e'}],
611 'update_time': '2015-10-31T22:00:26',
612 'uuid': 'c8aa2b1c-801a-11e5-a9e5-8ca98228593c',
613 'workflow_id': '03501d7626bd192f'}
614 """
615 url = self._invocation_url(workflow_id, invocation_id)
616 return self._get(url=url)
617
618 def get_invocations(self, workflow_id):
619 """
620 Get a list containing all the workflow invocations corresponding to the
621 specified workflow.
622
623 :type workflow_id: str
624 :param workflow_id: Encoded workflow ID
625
626 :rtype: list
627 :return: A list of workflow invocations.
628 For example::
629
630 [{'history_id': '2f94e8ae9edff68a',
631 'id': 'df7a1f0c02a5b08e',
632 'model_class': 'WorkflowInvocation',
633 'state': 'new',
634 'update_time': '2015-10-31T22:00:22',
635 'uuid': 'c8aa2b1c-801a-11e5-a9e5-8ca98228593c',
636 'workflow_id': '03501d7626bd192f'}]
637 """
638 url = self._invocations_url(workflow_id)
639 return self._get(url=url)
640
641 def cancel_invocation(self, workflow_id, invocation_id):
642 """
643 Cancel the scheduling of a workflow.
644
645 :type workflow_id: str
646 :param workflow_id: Encoded workflow ID
647
648 :type invocation_id: str
649 :param invocation_id: Encoded workflow invocation ID
650
651 :rtype: dict
652 :return: The workflow invocation being cancelled
653 """
654 url = self._invocation_url(workflow_id, invocation_id)
655 return self._delete(url=url)
656
657 def show_invocation_step(self, workflow_id, invocation_id, step_id):
658 """
659 See the details of a particular workflow invocation step.
660
661 :type workflow_id: str
662 :param workflow_id: Encoded workflow ID
663
664 :type invocation_id: str
665 :param invocation_id: Encoded workflow invocation ID
666
667 :type step_id: str
668 :param step_id: Encoded workflow invocation step ID
669
670 :rtype: dict
671 :return: The workflow invocation step.
672 For example::
673
674 {'action': None,
675 'id': '63cd3858d057a6d1',
676 'job_id': None,
677 'model_class': 'WorkflowInvocationStep',
678 'order_index': 2,
679 'state': None,
680 'update_time': '2015-10-31T22:11:14',
681 'workflow_step_id': '52e496b945151ee8',
682 'workflow_step_label': None,
683 'workflow_step_uuid': '4060554c-1dd5-4287-9040-8b4f281cf9dc'}
684 """
685 url = self._invocation_step_url(workflow_id, invocation_id, step_id)
686 return self._get(url=url)
687
688 def run_invocation_step_action(self, workflow_id, invocation_id, step_id, action):
689 """ Execute an action for an active workflow invocation step. The
690 nature of this action and what is expected will vary based on the
691 the type of workflow step (the only currently valid action is True/False
692 for pause steps).
693
694 :type workflow_id: str
695 :param workflow_id: Encoded workflow ID
696
697 :type invocation_id: str
698 :param invocation_id: Encoded workflow invocation ID
699
700 :type step_id: str
701 :param step_id: Encoded workflow invocation step ID
702
703 :type action: object
704 :param action: Action to use when updating state, semantics depends on
705 step type.
706
707 :rtype: dict
708 :return: Representation of the workflow invocation step
709 """
710 url = self._invocation_step_url(workflow_id, invocation_id, step_id)
711 payload = {"action": action}
712 return self._put(payload=payload, url=url)
713
714 def delete_workflow(self, workflow_id):
715 """
716 Delete a workflow identified by `workflow_id`.
717
718 :type workflow_id: str
719 :param workflow_id: Encoded workflow ID
720
721 :rtype: str
722 :return: A message about the deletion
723
724 .. warning::
725 Deleting a workflow is irreversible - all workflow data
726 will be permanently deleted.
727 """
728 return self._delete(id=workflow_id)
729
730 def _invocation_step_url(self, workflow_id, invocation_id, step_id):
731 return '/'.join((self._invocation_url(workflow_id, invocation_id), "steps", step_id))
732
733 def _invocation_url(self, workflow_id, invocation_id):
734 return '/'.join((self._invocations_url(workflow_id), invocation_id))
735
736 def _invocations_url(self, workflow_id):
737 return '/'.join((self._make_url(workflow_id), 'invocations'))
738
739
740 __all__ = ('WorkflowClient',)