Mercurial > repos > iuc > data_manager_qiime_database_downloader
comparison data_manager/data_manager_qiime_download.py @ 3:cc18f0f3514c draft
"planemo upload for repository https://github.com/galaxyproject/tools-iuc/tree/master/data_managers/data_manager_qiime_database_downloader commit 02d2967f77e3fa5a18aea63dc84aa9ab418dc165"
author | iuc |
---|---|
date | Sun, 22 Nov 2020 12:53:09 +0000 |
parents | cbe065fbd038 |
children | c4dba88e85ef |
comparison
equal
deleted
inserted
replaced
2:cbe065fbd038 | 3:cc18f0f3514c |
---|---|
67 NB the directory pointed to by 'extra_files_path' | 67 NB the directory pointed to by 'extra_files_path' |
68 doesn't exist initially, it is the job of the script | 68 doesn't exist initially, it is the job of the script |
69 to create it if necessary. | 69 to create it if necessary. |
70 | 70 |
71 """ | 71 """ |
72 params = json.loads(open(jsonfile).read()) | 72 with open(jsonfile) as fh: |
73 params = json.load(fh) | |
73 return (params['param_dict'], | 74 return (params['param_dict'], |
74 params['output_data'][0]['extra_files_path']) | 75 params['output_data'][0]['extra_files_path']) |
75 | 76 |
76 | 77 |
77 # Utility functions for creating data table dictionaries | 78 # Utility functions for creating data table dictionaries |
79 # Example usage: | 80 # Example usage: |
80 # >>> d = create_data_tables_dict() | 81 # >>> d = create_data_tables_dict() |
81 # >>> add_data_table(d,'my_data') | 82 # >>> add_data_table(d,'my_data') |
82 # >>> add_data_table_entry(dict(dbkey='hg19',value='human')) | 83 # >>> add_data_table_entry(dict(dbkey='hg19',value='human')) |
83 # >>> add_data_table_entry(dict(dbkey='mm9',value='mouse')) | 84 # >>> add_data_table_entry(dict(dbkey='mm9',value='mouse')) |
84 # >>> print str(json.dumps(d)) | 85 # >>> print(json.dumps(d)) |
85 def create_data_tables_dict(): | 86 def create_data_tables_dict(): |
86 """Return a dictionary for storing data table information | 87 """Return a dictionary for storing data table information |
87 | 88 |
88 Returns a dictionary that can be used with 'add_data_table' | 89 Returns a dictionary that can be used with 'add_data_table' |
89 and 'add_data_table_entry' to store information about a | 90 and 'add_data_table_entry' to store information about a |
124 | 125 |
125 def get_ftp_file(ftp, filename): | 126 def get_ftp_file(ftp, filename): |
126 """ | 127 """ |
127 """ | 128 """ |
128 try: | 129 try: |
129 ftp.retrbinary("RETR " + filename, open(filename, 'wb').write) | 130 with open(filename, 'wb') as fh: |
131 ftp.retrbinary("RETR " + filename, fh.write) | |
130 except Exception: | 132 except Exception: |
131 print("Error") | 133 print("Error") |
132 | 134 |
133 | 135 |
134 def download_archive(db, version, ext): | 136 def download_archive(db, version, ext): |
178 def extract_archive(filepath, ext, db): | 180 def extract_archive(filepath, ext, db): |
179 """ | 181 """ |
180 """ | 182 """ |
181 archive_content_path = "tmp" | 183 archive_content_path = "tmp" |
182 if ext == "tar.gz" or ext == "tgz": | 184 if ext == "tar.gz" or ext == "tgz": |
183 tar = tarfile.open(filepath) | 185 with tarfile.open(filepath) as tar: |
184 tar.extractall(path=archive_content_path) | 186 tar.extractall(path=archive_content_path) |
185 tar.close() | |
186 archive_content_path = find_archive_content_path(archive_content_path) | 187 archive_content_path = find_archive_content_path(archive_content_path) |
187 elif ext == "zip": | 188 elif ext == "zip": |
188 zip_ref = zipfile.ZipFile(filepath, 'r') | 189 with zipfile.ZipFile(filepath, 'r') as zip_ref: |
189 zip_ref.extractall(archive_content_path) | 190 zip_ref.extractall(archive_content_path) |
190 zip_ref.close() | |
191 archive_content_path = find_archive_content_path(archive_content_path) | 191 archive_content_path = find_archive_content_path(archive_content_path) |
192 return archive_content_path | 192 return archive_content_path |
193 | 193 |
194 | 194 |
195 def move_unite_files(archive_content_path, filename_prefix, name_prefix, data_tables, target_dir): | 195 def move_unite_files(archive_content_path, filename_prefix, name_prefix, data_tables, target_dir): |
370 args.version, | 370 args.version, |
371 target_dir) | 371 target_dir) |
372 | 372 |
373 # Write output JSON | 373 # Write output JSON |
374 print("Outputting JSON") | 374 print("Outputting JSON") |
375 print(str(json.dumps(data_tables))) | |
376 with open(jsonfile, 'w') as out: | 375 with open(jsonfile, 'w') as out: |
377 json.dump(data_tables, out) | 376 json.dump(data_tables, out, sort_keys=True) |
378 print("Done.") | 377 print("Done.") |