Mercurial > repos > guerler > springsuite
comparison planemo/lib/python3.7/site-packages/gxformat2/export.py @ 1:56ad4e20f292 draft
"planemo upload commit 6eee67778febed82ddd413c3ca40b3183a3898f1"
author | guerler |
---|---|
date | Fri, 31 Jul 2020 00:32:28 -0400 |
parents | |
children |
comparison
equal
deleted
inserted
replaced
0:d30785e31577 | 1:56ad4e20f292 |
---|---|
1 """Functionality for converting a standard Galaxy workflow into a format 2 workflow.""" | |
2 | |
3 import json | |
4 from collections import OrderedDict | |
5 | |
6 from ._labels import Labels | |
7 from ._yaml import ordered_dump | |
8 | |
9 | |
10 def _copy_common_properties(from_native_step, to_format2_step): | |
11 annotation = from_native_step.get("annotation", "") | |
12 if annotation: | |
13 to_format2_step["doc"] = annotation | |
14 position = from_native_step.get("position", None) | |
15 if position: | |
16 to_format2_step["position"] = position | |
17 | |
18 | |
19 def from_galaxy_native(native_workflow_dict, tool_interface=None, json_wrapper=False): | |
20 """Convert native .ga workflow definition to a format2 workflow. | |
21 | |
22 This is highly experimental and currently broken. | |
23 """ | |
24 data = OrderedDict() | |
25 data['class'] = 'GalaxyWorkflow' | |
26 _copy_common_properties(native_workflow_dict, data) | |
27 if "name" in native_workflow_dict: | |
28 data["label"] = native_workflow_dict.pop("name") | |
29 for top_level_key in ['tags', 'uuid', 'report']: | |
30 value = native_workflow_dict.get(top_level_key) | |
31 if value: | |
32 data[top_level_key] = value | |
33 | |
34 native_steps = native_workflow_dict.get("steps") | |
35 | |
36 label_map = {} | |
37 all_labeled = True | |
38 for key, step in native_steps.items(): | |
39 label = step.get("label") | |
40 if not label: | |
41 all_labeled = False | |
42 label_map[str(key)] = label | |
43 | |
44 inputs = OrderedDict() | |
45 outputs = OrderedDict() | |
46 steps = [] | |
47 | |
48 labels = Labels() | |
49 | |
50 # For each step, rebuild the form and encode the state | |
51 for step in native_steps.values(): | |
52 for workflow_output in step.get("workflow_outputs", []): | |
53 source = _to_source(workflow_output, label_map, output_id=step["id"]) | |
54 output_id = labels.ensure_new_output_label(workflow_output.get("label")) | |
55 outputs[output_id] = {"outputSource": source} | |
56 | |
57 module_type = step.get("type") | |
58 if module_type in ['data_input', 'data_collection_input', 'parameter_input']: | |
59 step_id = step["label"] # TODO: auto-label | |
60 input_dict = {} | |
61 tool_state = _tool_state(step) | |
62 if module_type == 'data_collection_input': | |
63 input_dict['type'] = 'collection' | |
64 elif module_type == 'data_input': | |
65 input_dict['type'] = 'data' | |
66 tool_state = _tool_state(step) | |
67 elif module_type == "parameter_input": | |
68 input_dict['type'] = tool_state.get("parameter_type") | |
69 | |
70 for tool_state_key in ['optional', 'format', 'default', 'restrictions', 'suggestions', 'restrictOnConnections']: | |
71 if tool_state_key in tool_state: | |
72 input_dict[tool_state_key] = tool_state[tool_state_key] | |
73 | |
74 _copy_common_properties(step, input_dict) | |
75 # If we are only copying property - use the CWL-style short-hand | |
76 if len(input_dict) == 1: | |
77 inputs[step_id] = input_dict["type"] | |
78 else: | |
79 inputs[step_id] = input_dict | |
80 continue | |
81 | |
82 if module_type == "pause": | |
83 step_dict = OrderedDict() | |
84 optional_props = ['label'] | |
85 _copy_common_properties(step, step_dict) | |
86 _copy_properties(step, step_dict, optional_props=optional_props) | |
87 _convert_input_connections(step, step_dict, label_map) | |
88 step_dict["type"] = "pause" | |
89 steps.append(step_dict) | |
90 continue | |
91 | |
92 if module_type == 'subworkflow': | |
93 step_dict = OrderedDict() | |
94 optional_props = ['label'] | |
95 _copy_common_properties(step, step_dict) | |
96 _copy_properties(step, step_dict, optional_props=optional_props) | |
97 _convert_input_connections(step, step_dict, label_map) | |
98 _convert_post_job_actions(step, step_dict) | |
99 subworkflow_native_dict = step["subworkflow"] | |
100 subworkflow = from_galaxy_native(subworkflow_native_dict, tool_interface=tool_interface, json_wrapper=False) | |
101 step_dict["run"] = subworkflow | |
102 steps.append(step_dict) | |
103 continue | |
104 | |
105 if module_type != 'tool': | |
106 raise NotImplementedError("Unhandled module type %s" % module_type) | |
107 | |
108 step_dict = OrderedDict() | |
109 optional_props = ['label', 'tool_shed_repository'] | |
110 required_props = ['tool_id', 'tool_version'] | |
111 _copy_properties(step, step_dict, optional_props, required_props) | |
112 _copy_common_properties(step, step_dict) | |
113 | |
114 tool_state = _tool_state(step) | |
115 tool_state.pop("__page__", None) | |
116 tool_state.pop("__rerun_remap_job_id__", None) | |
117 step_dict['tool_state'] = tool_state | |
118 | |
119 _convert_input_connections(step, step_dict, label_map) | |
120 _convert_post_job_actions(step, step_dict) | |
121 steps.append(step_dict) | |
122 | |
123 data['inputs'] = inputs | |
124 data['outputs'] = outputs | |
125 | |
126 if all_labeled: | |
127 steps_dict = OrderedDict() | |
128 for step in steps: | |
129 label = step.pop("label") | |
130 steps_dict[label] = step | |
131 data['steps'] = steps_dict | |
132 else: | |
133 data['steps'] = steps | |
134 | |
135 if json_wrapper: | |
136 return { | |
137 "yaml_content": ordered_dump(data) | |
138 } | |
139 | |
140 return data | |
141 | |
142 | |
143 def _tool_state(step): | |
144 tool_state = json.loads(step['tool_state']) | |
145 return tool_state | |
146 | |
147 | |
148 def _copy_properties(from_native_step, to_format2_step, optional_props=[], required_props=[]): | |
149 for prop in optional_props: | |
150 value = from_native_step.get(prop) | |
151 if value: | |
152 to_format2_step[prop] = value | |
153 for prop in required_props: | |
154 value = from_native_step.get(prop) | |
155 to_format2_step[prop] = value | |
156 | |
157 | |
158 def _convert_input_connections(from_native_step, to_format2_step, label_map): | |
159 in_dict = from_native_step.get("in", {}).copy() | |
160 input_connections = from_native_step['input_connections'] | |
161 for input_name, input_defs in input_connections.items(): | |
162 if not isinstance(input_defs, list): | |
163 input_defs = [input_defs] | |
164 for input_def in input_defs: | |
165 source = _to_source(input_def, label_map) | |
166 if input_name == "__NO_INPUT_OUTPUT_NAME__": | |
167 input_name = "$step" | |
168 assert source.endswith("/__NO_INPUT_OUTPUT_NAME__") | |
169 source = source[:-len("/__NO_INPUT_OUTPUT_NAME__")] | |
170 in_dict[input_name] = { | |
171 "source": source | |
172 } | |
173 to_format2_step["in"] = in_dict | |
174 | |
175 | |
176 def _convert_post_job_actions(from_native_step, to_format2_step): | |
177 | |
178 def _ensure_output_def(key): | |
179 if "outputs" in to_format2_step: | |
180 to_format2_step["out"] = to_format2_step.pop("outputs") | |
181 elif "out" not in to_format2_step: | |
182 to_format2_step["out"] = {} | |
183 | |
184 outputs_dict = to_format2_step["out"] | |
185 if key not in outputs_dict: | |
186 outputs_dict[key] = {} | |
187 return outputs_dict[key] | |
188 | |
189 if "post_job_actions" in from_native_step: | |
190 post_job_actions = from_native_step["post_job_actions"].copy() | |
191 to_remove_keys = [] | |
192 | |
193 for post_job_action_key, post_job_action_value in post_job_actions.items(): | |
194 action_type = post_job_action_value["action_type"] | |
195 output_name = post_job_action_value.get("output_name") | |
196 action_args = post_job_action_value.get("action_arguments", {}) | |
197 | |
198 handled = True | |
199 if action_type == "RenameDatasetAction": | |
200 output_dict = _ensure_output_def(output_name) | |
201 output_dict["rename"] = action_args["newname"] | |
202 handled = True | |
203 elif action_type == "HideDatasetAction": | |
204 output_dict = _ensure_output_def(output_name) | |
205 output_dict["hide"] = True | |
206 handled = True | |
207 elif action_type == "DeleteIntermediatesAction": | |
208 output_dict = _ensure_output_def(output_name) | |
209 output_dict["delete_intermediate_datasets"] = True | |
210 elif action_type == "ChangeDatatypeAction": | |
211 output_dict = _ensure_output_def(output_name) | |
212 output_dict['change_datatype'] = action_args | |
213 handled = True | |
214 elif action_type == "TagDatasetAction": | |
215 output_dict = _ensure_output_def(output_name) | |
216 output_dict["add_tags"] = action_args["tags"].split(",") | |
217 elif action_type == "RemoveTagDatasetAction": | |
218 output_dict = _ensure_output_def(output_name) | |
219 output_dict["remove_tags"] = action_args["tags"].split(",") | |
220 elif action_type == "ColumnSetAction": | |
221 output_dict = _ensure_output_def(output_name) | |
222 output_dict["set_columns"] = action_args | |
223 else: | |
224 handled = False | |
225 | |
226 if handled: | |
227 to_remove_keys.append(post_job_action_key) | |
228 | |
229 for to_remove in to_remove_keys: | |
230 del post_job_actions[to_remove] | |
231 | |
232 if post_job_actions: | |
233 to_format2_step["post_job_actions"] = post_job_actions | |
234 | |
235 | |
236 def _to_source(has_output_name, label_map, output_id=None): | |
237 output_id = output_id if output_id is not None else has_output_name['id'] | |
238 output_id = str(output_id) | |
239 output_name = has_output_name['output_name'] | |
240 output_label = label_map.get(output_id) or output_id | |
241 if output_name == "output": | |
242 source = output_label | |
243 else: | |
244 source = "%s/%s" % (output_label, output_name) | |
245 return source | |
246 | |
247 | |
248 __all__ = ( | |
249 'from_galaxy_native', | |
250 ) |