comparison data_manager/data_manager_plant_tribes_scaffolds_download.py @ 4:93253aebaf2e draft

"planemo upload for repository https://github.com/galaxyproject/tools-iuc/tree/master/data_managers/data_manager_plant_tribes_scaffolds_downloader commit 6085b7d47fcb47ad1587ac2542abdef782f61fe4"
author iuc
date Fri, 17 Jul 2020 04:19:35 -0400
parents 5833ef61c1f8
children 1550b1741780
comparison
equal deleted inserted replaced
3:5833ef61c1f8 4:93253aebaf2e
5 import json 5 import json
6 import os 6 import os
7 import shutil 7 import shutil
8 import sys 8 import sys
9 import tarfile 9 import tarfile
10 import urllib2
11 import zipfile 10 import zipfile
12 11 from urllib.request import Request, urlopen
13 12
14 DEFAULT_DATA_TABLE_NAMES = ["plant_tribes_scaffolds"] 13 DEFAULT_DATA_TABLE_NAMES = ["plant_tribes_scaffolds"]
15 14
16 15
17 def add_data_table_entry(data_manager_dict, data_table_name, data_table_entry): 16 def add_data_table_entry(data_manager_dict, data_table_name, data_table_entry):
50 def url_download(url, work_directory): 49 def url_download(url, work_directory):
51 file_path = os.path.join(work_directory, os.path.basename(url)) 50 file_path = os.path.join(work_directory, os.path.basename(url))
52 src = None 51 src = None
53 dst = None 52 dst = None
54 try: 53 try:
55 req = urllib2.Request(url) 54 req = Request(url)
56 src = urllib2.urlopen(req) 55 src = urlopen(req)
57 dst = open(file_path, 'wb') 56 with open(file_path, 'wb') as dst:
58 while True: 57 while True:
59 chunk = src.read(2**10) 58 chunk = src.read(2**10)
60 if chunk: 59 if chunk:
61 dst.write(chunk) 60 dst.write(chunk)
62 else: 61 else:
63 break 62 break
64 except Exception as e: 63 except Exception as e:
65 print >>sys.stderr, str(e) 64 sys.exit(str(e))
66 finally: 65 finally:
67 if src: 66 if src:
68 src.close() 67 src.close()
69 if dst:
70 dst.close()
71 return file_path 68 return file_path
72 69
73 70
74 def download(target_directory, web_url, config_web_url, description, data_table_names=DEFAULT_DATA_TABLE_NAMES): 71 def download(target_directory, web_url, config_web_url, description, data_table_names=DEFAULT_DATA_TABLE_NAMES):
75 data_manager_dict = {} 72 data_manager_dict = {}
115 parser.add_argument('--web_url', dest='web_url', help='URL for downloading scaffolds') 112 parser.add_argument('--web_url', dest='web_url', help='URL for downloading scaffolds')
116 parser.add_argument('--config_web_url', dest='config_web_url', help='URL for downloading default configs') 113 parser.add_argument('--config_web_url', dest='config_web_url', help='URL for downloading default configs')
117 114
118 args = parser.parse_args() 115 args = parser.parse_args()
119 116
120 # Some magic happens with tools of type "manage_data" in that the output 117 with open(args.out_file) as fh:
121 # file contains some JSON data that allows us to define the target directory. 118 params = json.loads(fh.read())
122 params = json.loads(open(args.out_file).read())
123 target_directory = params['output_data'][0]['extra_files_path'] 119 target_directory = params['output_data'][0]['extra_files_path']
124 make_directory(target_directory) 120 make_directory(target_directory)
125 121
126 if args.description is None: 122 if args.description is None:
127 description = '' 123 description = ''
129 description = args.description.strip() 125 description = args.description.strip()
130 126
131 # Get the scaffolds data. 127 # Get the scaffolds data.
132 data_manager_dict = download(target_directory, args.web_url, args.config_web_url, description) 128 data_manager_dict = download(target_directory, args.web_url, args.config_web_url, description)
133 # Write the JSON output dataset. 129 # Write the JSON output dataset.
134 fh = open(args.out_file, 'wb') 130 with open(args.out_file, 'w') as fh:
135 fh.write(json.dumps(data_manager_dict)) 131 fh.write(json.dumps(data_manager_dict, sort_keys=True))
136 fh.close()