Mercurial > repos > iuc > data_manager_pangolearn
comparison data_manager/pangolearn_dm.py @ 0:3eac657893fe draft
"planemo upload for repository https://github.com/galaxyproject/tools-iuc/tree/master/data_managers/data_manager_pangolearn commit 68adfad76cdb5ff13ec9fb49895a3cde2c502514"
| author | iuc |
|---|---|
| date | Sat, 24 Apr 2021 20:56:04 +0000 |
| parents | |
| children | 112bb7a9da3c |
comparison
equal
deleted
inserted
replaced
| -1:000000000000 | 0:3eac657893fe |
|---|---|
| 1 #!/usr/bin/env python | |
| 2 | |
| 3 import argparse | |
| 4 import datetime | |
| 5 import json | |
| 6 import operator | |
| 7 import os | |
| 8 import shutil | |
| 9 import sys | |
| 10 import tarfile | |
| 11 | |
| 12 import requests | |
| 13 | |
| 14 | |
| 15 def extract_date(tag_str): | |
| 16 parts = tag_str.split("_") | |
| 17 assert len(parts) < 3, "expected maximum of two parts, got " + str(parts) | |
| 18 tag_date = datetime.datetime.strptime(parts[0], "%Y-%m-%d") | |
| 19 if len(parts) == 2: | |
| 20 version = int(parts[1]) | |
| 21 assert ( | |
| 22 version < 24 * 60 | |
| 23 ) # because the code stores versions as minutes of the day, it can't handle versions > 1440 | |
| 24 tag_date += datetime.timedelta(minutes=version) | |
| 25 return tag_date | |
| 26 | |
| 27 | |
| 28 def get_model_list( | |
| 29 existing_release_tags, | |
| 30 url="https://api.github.com/repos/cov-lineages/pangoLEARN/releases", | |
| 31 ): | |
| 32 response = requests.get(url) | |
| 33 if response.status_code == 200: | |
| 34 release_list = json.loads(response.text) | |
| 35 release_info = [ | |
| 36 dict( | |
| 37 tag_name=e["tag_name"], | |
| 38 name=e["name"], | |
| 39 date=extract_date(e["tag_name"]), | |
| 40 tarball_url=e["tarball_url"], | |
| 41 ) | |
| 42 for e in release_list | |
| 43 if e["tag_name"] not in existing_release_tags | |
| 44 ] | |
| 45 return release_info | |
| 46 else: | |
| 47 response.raise_for_status() | |
| 48 | |
| 49 | |
| 50 def filter_by_date(existing_release_tags, start_date=None, end_date=None): | |
| 51 release_list = get_model_list(existing_release_tags) | |
| 52 return [ | |
| 53 element | |
| 54 for element in release_list | |
| 55 if not ( | |
| 56 (end_date is not None and element["date"] > end_date) | |
| 57 or (start_date is not None and element["date"] < start_date) | |
| 58 ) | |
| 59 ] | |
| 60 | |
| 61 | |
| 62 def download_and_unpack(url, output_directory): | |
| 63 response = requests.get(url) | |
| 64 if response.status_code == 200: | |
| 65 tmp_filename = url.split("/")[-1] | |
| 66 tmpfile = open(tmp_filename, "wb") | |
| 67 tmpfile.write(response.content) | |
| 68 tmpfile.close() | |
| 69 shutil.copy(tmp_filename, "/tmp") | |
| 70 tf = tarfile.open(tmp_filename) | |
| 71 pl_path = tf.next().name | |
| 72 tf.extractall(output_directory) | |
| 73 os.unlink(tmp_filename) | |
| 74 os.rename( | |
| 75 output_directory + "/" + pl_path + "/" + "pangoLEARN", | |
| 76 output_directory + "/" + tmp_filename, | |
| 77 ) | |
| 78 shutil.rmtree(output_directory + "/" + pl_path) | |
| 79 return tmp_filename | |
| 80 else: | |
| 81 response.raise_for_status() | |
| 82 | |
| 83 | |
| 84 def parse_date(d): | |
| 85 return datetime.datetime.strptime(d, "%Y-%m-%d") | |
| 86 | |
| 87 | |
| 88 if __name__ == "__main__": | |
| 89 | |
| 90 parser = argparse.ArgumentParser() | |
| 91 parser.add_argument("--testmode", default=False, action="store_true") | |
| 92 parser.add_argument("--latest", default=False, action="store_true") | |
| 93 parser.add_argument("--start_date", type=parse_date) | |
| 94 parser.add_argument("--end_date", type=parse_date) | |
| 95 parser.add_argument("--overwrite", default=False, action="store_true") | |
| 96 parser.add_argument('--pangolearn_format_version', default="1.0") | |
| 97 parser.add_argument("datatable_name") | |
| 98 parser.add_argument("galaxy_datamanager_filename") | |
| 99 args = parser.parse_args() | |
| 100 | |
| 101 if args.testmode: | |
| 102 releases = filter_by_date(start_date=args.start_date, end_date=args.end_date) | |
| 103 for release in releases: | |
| 104 print(release["tag_name"], release["tarball_url"].split("/")[-1]) | |
| 105 sys.exit(0) | |
| 106 | |
| 107 with open(args.galaxy_datamanager_filename) as fh: | |
| 108 config = json.load(fh) | |
| 109 | |
| 110 output_directory = config.get("output_data", [{}])[0].get("extra_files_path", None) | |
| 111 data_manager_dict = {} | |
| 112 data_manager_dict["data_tables"] = config.get("data_tables", {}) | |
| 113 data_manager_dict["data_tables"][args.datatable_name] = data_manager_dict[ | |
| 114 "data_tables" | |
| 115 ].get(args.datatable_name, []) | |
| 116 | |
| 117 # NOTE: the data_manager_dict["data_tables"][args.datatable_name] is not actually populated with the | |
| 118 # contents of the existing data table, so the "no-overwrite" logic and the | |
| 119 # only-download-what-we-don't-have logic does not in fact work. It is left but unused for now. | |
| 120 if not args.overwrite: | |
| 121 existing_release_tags = set( | |
| 122 [ | |
| 123 el["value"] | |
| 124 for el in data_manager_dict["data_tables"][args.datatable_name] | |
| 125 ] | |
| 126 ) | |
| 127 else: | |
| 128 existing_release_tags = set() | |
| 129 if args.latest: | |
| 130 releases = [get_model_list(existing_release_tags)[0]] | |
| 131 else: | |
| 132 releases = filter_by_date( | |
| 133 existing_release_tags, start_date=args.start_date, end_date=args.end_date | |
| 134 ) | |
| 135 releases_to_download = [ | |
| 136 release | |
| 137 for release in releases | |
| 138 if release["tag_name"] not in existing_release_tags | |
| 139 ] | |
| 140 for release in releases_to_download: | |
| 141 tag = download_and_unpack(release["tarball_url"], output_directory) | |
| 142 data_manager_dict["data_tables"][args.datatable_name].append( | |
| 143 dict( | |
| 144 value=tag, | |
| 145 description=release["name"], | |
| 146 format_version=args.pangolearn_format_version, | |
| 147 path=output_directory + "/" + tag, | |
| 148 ) | |
| 149 ) | |
| 150 data_manager_dict["data_tables"][args.datatable_name].sort( | |
| 151 key=operator.itemgetter("value"), reverse=True | |
| 152 ) | |
| 153 with open(args.galaxy_datamanager_filename, "w") as fh: | |
| 154 json.dump(data_manager_dict, fh, indent=2, sort_keys=True) |
