13
|
1 from __future__ import print_function
|
|
2 import requests
|
|
3 import json
|
|
4 import time
|
|
5 from urllib.request import urlretrieve
|
|
6 import sys
|
|
7 import csv
|
|
8 import argparse
|
|
9
|
|
10 """
|
|
11 Tool's email:
|
|
12 usernmae: cravatgalaxy@gmail.com
|
|
13 password: chicken_quesadilla
|
|
14 """
|
|
15
|
|
16 email = 'cravatgalaxy@gmail.com'
|
|
17
|
|
18 class CravatSubmissionException(Exception):
|
|
19 def __init__(self, message):
|
|
20 super(CravatSubmissionException, self).__init__(message)
|
|
21
|
|
22 class CravatSubmission(object):
|
|
23
|
|
24 def get_cmd_args(self, argv):
|
|
25 parser = argparse.ArgumentParser()
|
|
26 parser.add_argument('path',
|
|
27 help="Path to python module")
|
|
28 parser.add_argument('--input',
|
|
29 '-i',
|
|
30 required = True,
|
|
31 help='Input path to a cravat file for querying',)
|
|
32 parser.add_argument('--output',
|
|
33 '-o',
|
|
34 default = None,
|
|
35 help = 'Output path to write results from query')
|
|
36 parser.add_argument('--analysis',
|
|
37 '-a',
|
|
38 required=True,
|
|
39 help = "Cravat analysis. Should be 'VEST', 'CHASM', 'NONE', or 'VEST;CHASM'")
|
|
40 return parser.parse_args(argv)
|
|
41
|
|
42 def is_valid_analysis(self, analysis):
|
|
43 """: Test if analysis is a recognized value"""
|
|
44 analyses = ["VEST", "CHASM", "VEST;CHASM", ""]
|
|
45 return analysis in analyses
|
|
46
|
|
47 def is_skippable(self, s):
|
|
48 """: Test if a line (str or list/tuple) is skippable, a.k.a. a header or blank line"""
|
|
49 if not isinstance(s, str):
|
|
50 raise CravatSubmissionException("is_skippable accepts a string")
|
|
51 skippable = s == "" \
|
|
52 or s[0] == "#" \
|
|
53 or s.startswith('"#For more information on CRAVAT') \
|
|
54 or s.isspace()
|
|
55 return skippable
|
|
56
|
|
57 def parse(self, s, sep='\t'):
|
|
58 """: Convert string line to an array of values"""
|
|
59 return s.strip().split(sep)
|
|
60
|
|
61 def unparse(self, array, sep='\t', newline='\n'):
|
|
62 """: Convert an array of values to a writable string line"""
|
|
63 return sep.join([str(i) for i in array]) + newline
|
|
64
|
|
65 def get_headers(self, path, pattern='Input line', sep='\t'):
|
|
66 """: Get the headers from a Results/Details file obtained from by a finished Cravat submission"""
|
|
67 with open(path, 'r') as f:
|
|
68 for line in f:
|
|
69 if line.startswith(pattern):
|
|
70 return self.parse(line)
|
|
71 return None
|
|
72
|
|
73 def create_index(self, path, prop='Input line'):
|
|
74 """
|
|
75 : Create an index of seek/tell positions in file associated to a line value. Used to record
|
|
76 : the location of lines betwen two files that are associated with each other without reading entire
|
|
77 : files into memory.
|
|
78 """
|
|
79 headers = self.get_headers(path)
|
|
80 if prop not in headers:
|
|
81 raise CravatSubmissionException("Index retrievel property not found in headers")
|
|
82 prop_loc = headers.index(prop)
|
|
83 index = {}
|
|
84 with open(path, 'r') as f:
|
|
85 pos = 0
|
|
86 line = f.readline()
|
|
87 while line != "":
|
|
88 if not self.is_skippable(line):
|
|
89 parsed = self.parse(line)
|
|
90 if not parsed == headers:
|
|
91 index[parsed[prop_loc]] = pos
|
|
92 pos = f.tell()
|
|
93 line = f.readline()
|
|
94 return index
|
|
95
|
|
96 def get_header_val_dict(self, headers, vals):
|
|
97 """: Associate an array of header keys to an array of values."""
|
|
98 return { header:val for (header, val) in zip(headers, vals) }
|
|
99
|
|
100 def write_results(self, results_path, details_path, out_path, write_headers=True):
|
|
101 """
|
|
102 : Using the paths to the Results and Details file from a Cravat Sumbission,
|
|
103 : write the output file.
|
|
104 """
|
|
105 results_headers = self.get_headers(results_path)
|
|
106 details_headers = self.get_headers(details_path)
|
|
107 if results_headers == None \
|
|
108 or details_headers == None:
|
|
109 raise CravatSubmissionException("Unable to intepret headers in Results or Details submission files")
|
|
110 headers = results_headers
|
|
111 headers.extend(filter(lambda x: x not in headers, details_headers))
|
|
112 results_index = self.create_index(results_path)
|
|
113 details_index = self.create_index(details_path)
|
|
114 with open(results_path, 'r') as results_file, \
|
|
115 open(details_path, 'r') as details_file, \
|
|
116 open(out_path, 'w') as out_file:
|
|
117 if write_headers:
|
|
118 out_file.write(self.unparse(headers))
|
|
119 for line_id, file_pos in results_index.items():
|
|
120 results_file.seek(file_pos)
|
|
121 results_vals = self.parse(results_file.readline())
|
|
122 results_dict = self.get_header_val_dict(results_headers, results_vals)
|
|
123 if line_id in details_index:
|
|
124 details_file.seek(details_index[line_id])
|
|
125 details_vals = self.parse(details_file.readline())
|
|
126 details_dict = self.get_header_val_dict(details_headers, details_vals)
|
|
127 # On a repeated entry, the Details value will overwrite Results value
|
|
128 results_dict.update(details_dict)
|
|
129 line = [ results_dict.get(header, 'None') for header in headers ]
|
|
130 out_file.write(self.unparse(line))
|
|
131
|
|
132 def submit(self, in_path, analysis):
|
|
133 """: Make a POST request to submit a job to production CRAVAT server."""
|
|
134 if not self.is_valid_analysis(analysis):
|
|
135 raise ValueError("Did not get valid analyses.")
|
|
136 # Create post request to submit job to CRAVAT production server
|
|
137 submit = requests.post('http://cravat.us/CRAVAT/rest/service/submit',
|
|
138 files={'inputfile' : open(in_path)},
|
|
139 data={'email' : email,
|
|
140 'analyses' : analysis})
|
|
141 # Check job run status in loop until status is 'Success'
|
|
142 jobid = json.loads(submit.text)['jobid']
|
|
143 while True:
|
|
144 check = requests.get('http://cravat.us/CRAVAT/rest/service/status', params={'jobid': jobid})
|
|
145 status = json.loads(check.text)['status']
|
|
146 print(status)
|
|
147 if status == 'Success':
|
|
148 break
|
|
149 else:
|
|
150 time.sleep(2)
|
|
151 # Download completed job results to local files
|
|
152 timestamp = time.strftime("%Y-%m-%d_%H-%M-%S_")
|
|
153 results_path = 'Z_Variant_Result' + timestamp + '.tsv'
|
|
154 details_path = 'Z_Additional_Details' + timestamp + '.tsv'
|
|
155 urlretrieve("http://cravat.us/CRAVAT/results/" + jobid + "/" + "Variant.Result.tsv",
|
|
156 filename=results_path)
|
|
157 urlretrieve("http://cravat.us/CRAVAT/results/" + jobid + "/" + "Variant_Additional_Details.Result.tsv",
|
|
158 filename=details_path)
|
|
159 return results_path, details_path
|
|
160
|
|
161 if __name__ == "__main__":
|
|
162 submission = CravatSubmission()
|
|
163 cmd_args = submission.get_cmd_args(sys.argv)
|
|
164 # Galaxy converts semi-colons to X's. Switch it back
|
|
165 analysis = cmd_args.analysis
|
|
166 if analysis == "VESTXCHASM":
|
|
167 analysis = "VEST;CHASM"
|
|
168 results_path, details_path = submission.submit(cmd_args.input, analysis)
|
|
169 #submission.write_results('Results_test.tsv', 'Details_test.tsv', 'Out_test.tsv')
|
|
170 submission.write_results(results_path, details_path, cmd_args.output) |