Mercurial > repos > shellac > guppy_basecaller
comparison env/lib/python3.7/site-packages/gxformat2/export.py @ 5:9b1c78e6ba9c draft default tip
"planemo upload commit 6c0a8142489327ece472c84e558c47da711a9142"
| author | shellac |
|---|---|
| date | Mon, 01 Jun 2020 08:59:25 -0400 |
| parents | 79f47841a781 |
| children |
comparison
equal
deleted
inserted
replaced
| 4:79f47841a781 | 5:9b1c78e6ba9c |
|---|---|
| 1 """Functionality for converting a standard Galaxy workflow into a format 2 workflow.""" | |
| 2 | |
| 3 import json | |
| 4 from collections import OrderedDict | |
| 5 | |
| 6 from ._labels import Labels | |
| 7 from ._yaml import ordered_dump | |
| 8 | |
| 9 | |
| 10 def _copy_common_properties(from_native_step, to_format2_step): | |
| 11 annotation = from_native_step.get("annotation", "") | |
| 12 if annotation: | |
| 13 to_format2_step["doc"] = annotation | |
| 14 position = from_native_step.get("position", None) | |
| 15 if position: | |
| 16 to_format2_step["position"] = position | |
| 17 | |
| 18 | |
| 19 def from_galaxy_native(native_workflow_dict, tool_interface=None, json_wrapper=False): | |
| 20 """Convert native .ga workflow definition to a format2 workflow. | |
| 21 | |
| 22 This is highly experimental and currently broken. | |
| 23 """ | |
| 24 data = OrderedDict() | |
| 25 data['class'] = 'GalaxyWorkflow' | |
| 26 _copy_common_properties(native_workflow_dict, data) | |
| 27 if "name" in native_workflow_dict: | |
| 28 data["label"] = native_workflow_dict.pop("name") | |
| 29 for top_level_key in ['tags', 'uuid', 'report']: | |
| 30 value = native_workflow_dict.get(top_level_key) | |
| 31 if value: | |
| 32 data[top_level_key] = value | |
| 33 | |
| 34 native_steps = native_workflow_dict.get("steps") | |
| 35 | |
| 36 label_map = {} | |
| 37 all_labeled = True | |
| 38 for key, step in native_steps.items(): | |
| 39 label = step.get("label") | |
| 40 if not label: | |
| 41 all_labeled = False | |
| 42 label_map[str(key)] = label | |
| 43 | |
| 44 inputs = OrderedDict() | |
| 45 outputs = OrderedDict() | |
| 46 steps = [] | |
| 47 | |
| 48 labels = Labels() | |
| 49 | |
| 50 # For each step, rebuild the form and encode the state | |
| 51 for step in native_steps.values(): | |
| 52 for workflow_output in step.get("workflow_outputs", []): | |
| 53 source = _to_source(workflow_output, label_map, output_id=step["id"]) | |
| 54 output_id = labels.ensure_new_output_label(workflow_output.get("label")) | |
| 55 outputs[output_id] = {"outputSource": source} | |
| 56 | |
| 57 module_type = step.get("type") | |
| 58 if module_type in ['data_input', 'data_collection_input', 'parameter_input']: | |
| 59 step_id = step["label"] # TODO: auto-label | |
| 60 input_dict = {} | |
| 61 if module_type == 'data_collection_input': | |
| 62 input_dict['type'] = 'collection' | |
| 63 elif module_type == 'data_input': | |
| 64 input_dict['type'] = 'data' | |
| 65 elif module_type == "parameter_input": | |
| 66 tool_state = _tool_state(step) | |
| 67 input_dict['type'] = tool_state.get("parameter_type") | |
| 68 # TODO: handle parameter_input types | |
| 69 _copy_common_properties(step, input_dict) | |
| 70 # If we are only copying property - use the CWL-style short-hand | |
| 71 if len(input_dict) == 1: | |
| 72 inputs[step_id] = input_dict["type"] | |
| 73 else: | |
| 74 inputs[step_id] = input_dict | |
| 75 continue | |
| 76 | |
| 77 if module_type == "pause": | |
| 78 step_dict = OrderedDict() | |
| 79 optional_props = ['label'] | |
| 80 _copy_common_properties(step, step_dict) | |
| 81 _copy_properties(step, step_dict, optional_props=optional_props) | |
| 82 _convert_input_connections(step, step_dict, label_map) | |
| 83 step_dict["type"] = "pause" | |
| 84 steps.append(step_dict) | |
| 85 continue | |
| 86 | |
| 87 if module_type == 'subworkflow': | |
| 88 step_dict = OrderedDict() | |
| 89 optional_props = ['label'] | |
| 90 _copy_common_properties(step, step_dict) | |
| 91 _copy_properties(step, step_dict, optional_props=optional_props) | |
| 92 _convert_input_connections(step, step_dict, label_map) | |
| 93 _convert_post_job_actions(step, step_dict) | |
| 94 subworkflow_native_dict = step["subworkflow"] | |
| 95 subworkflow = from_galaxy_native(subworkflow_native_dict, tool_interface=tool_interface, json_wrapper=False) | |
| 96 step_dict["run"] = subworkflow | |
| 97 steps.append(step_dict) | |
| 98 continue | |
| 99 | |
| 100 if module_type != 'tool': | |
| 101 raise NotImplementedError("Unhandled module type %s" % module_type) | |
| 102 | |
| 103 step_dict = OrderedDict() | |
| 104 optional_props = ['label', 'tool_shed_repository'] | |
| 105 required_props = ['tool_id', 'tool_version'] | |
| 106 _copy_properties(step, step_dict, optional_props, required_props) | |
| 107 _copy_common_properties(step, step_dict) | |
| 108 | |
| 109 tool_state = _tool_state(step) | |
| 110 tool_state.pop("__page__", None) | |
| 111 tool_state.pop("__rerun_remap_job_id__", None) | |
| 112 step_dict['tool_state'] = tool_state | |
| 113 | |
| 114 _convert_input_connections(step, step_dict, label_map) | |
| 115 _convert_post_job_actions(step, step_dict) | |
| 116 steps.append(step_dict) | |
| 117 | |
| 118 data['inputs'] = inputs | |
| 119 data['outputs'] = outputs | |
| 120 | |
| 121 if all_labeled: | |
| 122 steps_dict = OrderedDict() | |
| 123 for step in steps: | |
| 124 label = step.pop("label") | |
| 125 steps_dict[label] = step | |
| 126 data['steps'] = steps_dict | |
| 127 else: | |
| 128 data['steps'] = steps | |
| 129 | |
| 130 if json_wrapper: | |
| 131 return { | |
| 132 "yaml_content": ordered_dump(data) | |
| 133 } | |
| 134 | |
| 135 return data | |
| 136 | |
| 137 | |
| 138 def _tool_state(step): | |
| 139 tool_state = json.loads(step['tool_state']) | |
| 140 return tool_state | |
| 141 | |
| 142 | |
| 143 def _copy_properties(from_native_step, to_format2_step, optional_props=[], required_props=[]): | |
| 144 for prop in optional_props: | |
| 145 value = from_native_step.get(prop) | |
| 146 if value: | |
| 147 to_format2_step[prop] = value | |
| 148 for prop in required_props: | |
| 149 value = from_native_step.get(prop) | |
| 150 to_format2_step[prop] = value | |
| 151 | |
| 152 | |
| 153 def _convert_input_connections(from_native_step, to_format2_step, label_map): | |
| 154 in_dict = from_native_step.get("in", {}).copy() | |
| 155 input_connections = from_native_step['input_connections'] | |
| 156 for input_name, input_defs in input_connections.items(): | |
| 157 if not isinstance(input_defs, list): | |
| 158 input_defs = [input_defs] | |
| 159 for input_def in input_defs: | |
| 160 source = _to_source(input_def, label_map) | |
| 161 if input_name == "__NO_INPUT_OUTPUT_NAME__": | |
| 162 input_name = "$step" | |
| 163 assert source.endswith("/__NO_INPUT_OUTPUT_NAME__") | |
| 164 source = source[:-len("/__NO_INPUT_OUTPUT_NAME__")] | |
| 165 in_dict[input_name] = { | |
| 166 "source": source | |
| 167 } | |
| 168 to_format2_step["in"] = in_dict | |
| 169 | |
| 170 | |
| 171 def _convert_post_job_actions(from_native_step, to_format2_step): | |
| 172 | |
| 173 def _ensure_output_def(key): | |
| 174 if "outputs" in to_format2_step: | |
| 175 to_format2_step["out"] = to_format2_step.pop("outputs") | |
| 176 elif "out" not in to_format2_step: | |
| 177 to_format2_step["out"] = {} | |
| 178 | |
| 179 outputs_dict = to_format2_step["out"] | |
| 180 if key not in outputs_dict: | |
| 181 outputs_dict[key] = {} | |
| 182 return outputs_dict[key] | |
| 183 | |
| 184 if "post_job_actions" in from_native_step: | |
| 185 post_job_actions = from_native_step["post_job_actions"].copy() | |
| 186 to_remove_keys = [] | |
| 187 | |
| 188 for post_job_action_key, post_job_action_value in post_job_actions.items(): | |
| 189 action_type = post_job_action_value["action_type"] | |
| 190 output_name = post_job_action_value.get("output_name") | |
| 191 action_args = post_job_action_value.get("action_arguments", {}) | |
| 192 | |
| 193 handled = True | |
| 194 if action_type == "RenameDatasetAction": | |
| 195 output_dict = _ensure_output_def(output_name) | |
| 196 output_dict["rename"] = action_args["newname"] | |
| 197 handled = True | |
| 198 elif action_type == "HideDatasetAction": | |
| 199 output_dict = _ensure_output_def(output_name) | |
| 200 output_dict["hide"] = True | |
| 201 handled = True | |
| 202 elif action_type == "DeleteIntermediatesAction": | |
| 203 output_dict = _ensure_output_def(output_name) | |
| 204 output_dict["delete_intermediate_datasets"] = True | |
| 205 elif action_type == "ChangeDatatypeAction": | |
| 206 output_dict = _ensure_output_def(output_name) | |
| 207 output_dict['change_datatype'] = action_args | |
| 208 handled = True | |
| 209 elif action_type == "TagDatasetAction": | |
| 210 output_dict = _ensure_output_def(output_name) | |
| 211 output_dict["add_tags"] = action_args["tags"].split(",") | |
| 212 elif action_type == "RemoveTagDatasetAction": | |
| 213 output_dict = _ensure_output_def(output_name) | |
| 214 output_dict["remove_tags"] = action_args["tags"].split(",") | |
| 215 elif action_type == "ColumnSetAction": | |
| 216 output_dict = _ensure_output_def(output_name) | |
| 217 output_dict["set_columns"] = action_args | |
| 218 else: | |
| 219 handled = False | |
| 220 | |
| 221 if handled: | |
| 222 to_remove_keys.append(post_job_action_key) | |
| 223 | |
| 224 for to_remove in to_remove_keys: | |
| 225 del post_job_actions[to_remove] | |
| 226 | |
| 227 if post_job_actions: | |
| 228 to_format2_step["post_job_actions"] = post_job_actions | |
| 229 | |
| 230 | |
| 231 def _to_source(has_output_name, label_map, output_id=None): | |
| 232 output_id = output_id if output_id is not None else has_output_name['id'] | |
| 233 output_id = str(output_id) | |
| 234 output_name = has_output_name['output_name'] | |
| 235 output_label = label_map.get(output_id) or output_id | |
| 236 if output_name == "output": | |
| 237 source = output_label | |
| 238 else: | |
| 239 source = "%s/%s" % (output_label, output_name) | |
| 240 return source | |
| 241 | |
| 242 | |
| 243 __all__ = ( | |
| 244 'from_galaxy_native', | |
| 245 ) |
