0
|
1 import argparse
|
|
2 import datetime
|
|
3 import json
|
|
4 import os
|
|
5 import sys
|
|
6 from urllib.request import Request, urlopen
|
|
7
|
|
8
|
|
9 def url_download(url, workdir):
|
|
10 file_path = os.path.abspath(os.path.join(workdir, os.path.basename(url)))
|
|
11 src = None
|
|
12 dst = None
|
|
13 try:
|
|
14 req = Request(url)
|
|
15 src = urlopen(req)
|
|
16 with open(file_path, 'wb') as dst:
|
|
17 while True:
|
|
18 chunk = src.read(2**10)
|
|
19 if chunk:
|
|
20 dst.write(chunk)
|
|
21 else:
|
|
22 break
|
|
23 except Exception as e:
|
|
24 sys.exit(str(e))
|
|
25 finally:
|
|
26 if src:
|
|
27 src.close()
|
|
28 return file_path
|
|
29
|
|
30
|
|
31 def download(url, out_file):
|
|
32 today = datetime.datetime.utcnow().strftime("%Y-%m-%d")
|
|
33
|
|
34 with open(out_file) as fh:
|
|
35 params = json.load(fh)
|
|
36
|
|
37 workdir = params['output_data'][0]['extra_files_path']
|
|
38 os.makedirs(workdir)
|
|
39 file_path = url_download(url, workdir)
|
|
40 name = '%s_%s' % (today, os.path.basename(file_path))
|
|
41
|
|
42 data_manager_json = {"data_tables": {}}
|
|
43 data_manager_entry = {}
|
|
44 data_manager_entry['value'] = today
|
|
45 data_manager_entry['name'] = name
|
|
46 data_manager_entry['path'] = file_path
|
|
47 data_manager_json["data_tables"]["pima_pv"] = data_manager_entry
|
|
48
|
|
49 with open(out_file, 'w') as fh:
|
|
50 json.dump(data_manager_json, fh, sort_keys=True)
|
|
51
|
|
52
|
|
53 parser = argparse.ArgumentParser()
|
|
54
|
|
55 parser.add_argument('--url', dest='url', help='URL to download plasmids_and_vectors.fasta file')
|
|
56 parser.add_argument('--out_file', dest='out_file', help='JSON output file')
|
|
57
|
|
58 args = parser.parse_args()
|
|
59
|
|
60 download(args.url, args.out_file)
|