comparison cravat_submit/cravat_submit.py @ 21:67a13940d0bf draft default tip

Uploaded
author in_silico
date Thu, 16 Aug 2018 15:10:43 -0400
parents 275d45d14350
children
comparison
equal deleted inserted replaced
20:6da9a12f04b5 21:67a13940d0bf
1 from __future__ import print_function
2 import requests
3 import json
4 import time
5 try:
6 # Python 3.0+
7 from urllib.request import urlretrieve
8 except:
9 # Python 2.7
10 from urllib import urlretrieve
11 import sys
12 import csv
13 import argparse
14
15 """
16 Tool's email:
17 usernmae: cravatgalaxy@gmail.com
18 password: chicken_quesadilla
19 """
20
21 email = 'cravatgalaxy@gmail.com'
22
23 class CravatSubmissionException(Exception):
24 def __init__(self, message):
25 super(CravatSubmissionException, self).__init__(message)
26
27 class CravatSubmission(object):
28
29 def get_cmd_args(self, argv):
30 parser = argparse.ArgumentParser()
31 parser.add_argument('path',
32 help="Path to python module")
33 parser.add_argument('--input',
34 '-i',
35 required = True,
36 help='Input path to a cravat file for querying',)
37 parser.add_argument('--output',
38 '-o',
39 default = None,
40 help = 'Output path to write results from query')
41 parser.add_argument('--analysis',
42 '-a',
43 required=True,
44 help = "Cravat analysis. Should be 'VEST', 'CHASM', 'NONE', or 'VEST;CHASM'")
45 return parser.parse_args(argv)
46
47 def is_valid_analysis(self, analysis):
48 """: Test if analysis is a recognized value"""
49 analyses = ["VEST", "CHASM", "VEST;CHASM", ""]
50 return analysis in analyses
51
52 def is_skippable(self, s):
53 """: Test if a line (str or list/tuple) is skippable, a.k.a. a header or blank line"""
54 if not isinstance(s, str):
55 raise CravatSubmissionException("is_skippable accepts a string")
56 skippable = s == "" \
57 or s[0] == "#" \
58 or s.startswith('"#For more information on CRAVAT') \
59 or s.isspace()
60 return skippable
61
62 def parse(self, s, sep='\t'):
63 """: Convert string line to an array of values"""
64 return s.strip().split(sep)
65
66 def unparse(self, array, sep='\t', newline='\n'):
67 """: Convert an array of values to a writable string line"""
68 return sep.join([str(i) for i in array]) + newline
69
70 def get_headers(self, path, pattern='Input line', sep='\t'):
71 """: Get the headers from a Results/Details file obtained from by a finished Cravat submission"""
72 with open(path, 'r') as f:
73 for line in f:
74 if line.startswith(pattern):
75 return self.parse(line)
76 return None
77
78 def create_index(self, path, prop='Input line'):
79 """
80 : Create an index of seek/tell positions in file associated to a line value. Used to record
81 : the location of lines betwen two files that are associated with each other without reading entire
82 : files into memory.
83 """
84 headers = self.get_headers(path)
85 if prop not in headers:
86 raise CravatSubmissionException("Index retrievel property not found in headers")
87 prop_loc = headers.index(prop)
88 index = {}
89 with open(path, 'r') as f:
90 pos = 0
91 line = f.readline()
92 while line != "":
93 if not self.is_skippable(line):
94 parsed = self.parse(line)
95 if not parsed == headers:
96 index[parsed[prop_loc]] = pos
97 pos = f.tell()
98 line = f.readline()
99 return index
100
101 def get_header_val_dict(self, headers, vals):
102 """: Associate an array of header keys to an array of values."""
103 return { header:val for (header, val) in zip(headers, vals) }
104
105 def write_results(self, results_path, details_path, out_path, write_headers=True):
106 """
107 : Using the paths to the Results and Details file from a Cravat Sumbission,
108 : write the output file.
109 """
110 results_headers = self.get_headers(results_path)
111 details_headers = self.get_headers(details_path)
112 if results_headers == None \
113 or details_headers == None:
114 raise CravatSubmissionException("Unable to intepret headers in Results or Details submission files")
115 headers = results_headers
116 headers.extend(filter(lambda x: x not in headers, details_headers))
117 results_index = self.create_index(results_path)
118 details_index = self.create_index(details_path)
119 with open(results_path, 'r') as results_file, \
120 open(details_path, 'r') as details_file, \
121 open(out_path, 'w') as out_file:
122 if write_headers:
123 out_file.write(self.unparse(headers))
124 for line_id, file_pos in results_index.items():
125 results_file.seek(file_pos)
126 results_vals = self.parse(results_file.readline())
127 results_dict = self.get_header_val_dict(results_headers, results_vals)
128 if line_id in details_index:
129 details_file.seek(details_index[line_id])
130 details_vals = self.parse(details_file.readline())
131 details_dict = self.get_header_val_dict(details_headers, details_vals)
132 # On a repeated entry, the Details value will overwrite Results value
133 results_dict.update(details_dict)
134 line = [ results_dict.get(header, 'None') for header in headers ]
135 out_file.write(self.unparse(line))
136
137 def submit(self, in_path, analysis):
138 """: Make a POST request to submit a job to production CRAVAT server."""
139 if not self.is_valid_analysis(analysis):
140 raise ValueError("Did not get valid analyses.")
141 # Create post request to submit job to CRAVAT production server
142 submit = requests.post('http://cravat.us/CRAVAT/rest/service/submit',
143 files={'inputfile' : open(in_path)},
144 data={'email' : email,
145 'analyses' : analysis})
146 # Check job run status in loop until status is 'Success'
147 jobid = json.loads(submit.text)['jobid']
148 while True:
149 check = requests.get('http://cravat.us/CRAVAT/rest/service/status', params={'jobid': jobid})
150 status = json.loads(check.text)['status']
151 #print(status)
152 if status == 'Success':
153 break
154 else:
155 time.sleep(2)
156 # Download completed job results to local files
157 timestamp = time.strftime("%Y-%m-%d_%H-%M-%S_")
158 results_path = 'Z_Variant_Result' + timestamp + '.tsv'
159 details_path = 'Z_Additional_Details' + timestamp + '.tsv'
160 urlretrieve("http://cravat.us/CRAVAT/results/" + jobid + "/" + "Variant.Result.tsv",
161 filename=results_path)
162 urlretrieve("http://cravat.us/CRAVAT/results/" + jobid + "/" + "Variant_Additional_Details.Result.tsv",
163 filename=details_path)
164 return results_path, details_path
165
166 if __name__ == "__main__":
167 submission = CravatSubmission()
168 cmd_args = submission.get_cmd_args(sys.argv)
169 # Galaxy converts semi-colons to X's. Switch it back
170 analysis = cmd_args.analysis
171 if analysis == "VESTXCHASM":
172 analysis = "VEST;CHASM"
173 results_path, details_path = submission.submit(cmd_args.input, analysis)
174 #submission.write_results('Results_test.tsv', 'Details_test.tsv', 'Out_test.tsv')
175 submission.write_results(results_path, details_path, cmd_args.output)