Mercurial > repos > fubar > jbrowse2
comparison jbrowse2.py @ 80:dff27c9f6d72 draft
planemo upload for repository https://github.com/galaxyproject/tools-iuc/tree/master/tools/jbrowse2 commit 7bb0fa234bdbc42768b30e426472a47b2523297f
author | fubar |
---|---|
date | Wed, 03 Apr 2024 01:00:21 +0000 |
parents | 14ecbe46ae9f |
children | e9bcbed05108 |
comparison
equal
deleted
inserted
replaced
79:14ecbe46ae9f | 80:dff27c9f6d72 |
---|---|
18 | 18 |
19 logging.basicConfig(level=logging.DEBUG) | 19 logging.basicConfig(level=logging.DEBUG) |
20 log = logging.getLogger("jbrowse") | 20 log = logging.getLogger("jbrowse") |
21 | 21 |
22 JB2VER = "v2.10.3" | 22 JB2VER = "v2.10.3" |
23 # version pinned for cloning | 23 # version pinned if cloning - but not cloning now |
24 | 24 |
25 TODAY = datetime.datetime.now().strftime("%Y-%m-%d") | 25 TODAY = datetime.datetime.now().strftime("%Y-%m-%d") |
26 SELF_LOCATION = os.path.dirname(os.path.realpath(__file__)) | 26 SELF_LOCATION = os.path.dirname(os.path.realpath(__file__)) |
27 GALAXY_INFRASTRUCTURE_URL = None | 27 GALAXY_INFRASTRUCTURE_URL = None |
28 mapped_chars = { | 28 mapped_chars = { |
399 return metadata | 399 return metadata |
400 | 400 |
401 | 401 |
402 class JbrowseConnector(object): | 402 class JbrowseConnector(object): |
403 def __init__(self, outdir, jbrowse2path): | 403 def __init__(self, outdir, jbrowse2path): |
404 self.trackCounter = 0 # to avoid name clashes | |
404 self.assemblies = [] # these require more than a few line diff. | 405 self.assemblies = [] # these require more than a few line diff. |
405 self.assmeta = {} | 406 self.assmeta = {} |
406 self.ass_first_contigs = ( | 407 self.ass_first_contigs = ( |
407 [] | 408 [] |
408 ) # for default session - these are read as first line of the assembly .fai | 409 ) # for default session - these are read as first line of the assembly .fai |
479 style_data, | 480 style_data, |
480 ] | 481 ] |
481 } | 482 } |
482 return wstyle | 483 return wstyle |
483 | 484 |
484 def urllib_get_2018(): | 485 def getNrow(self, url): |
485 # Using a protected member like this is not any more fragile | 486 useuri = url.startswith("https://") or url.startswith("http://") |
486 # than extending the class and using it. I would use it. | 487 if not useuri: |
487 url = "https://localhost:6667/my-endpoint" | 488 fl = open(url, "r").readlines() |
488 ssl._create_default_https_context = ssl._create_unverified_context | 489 nrow = len(fl) |
489 with urllib.request.urlopen(url=url) as f: | 490 else: |
490 print(f.read().decode("utf-8")) | 491 try: |
491 | 492 scontext = ssl.SSLContext(ssl.PROTOCOL_TLS) |
492 def urllib_get_2022(): | 493 scontext.verify_mode = ssl.VerifyMode.CERT_NONE |
493 # Finally! Able to use the publice API. Happy happy! | 494 with urllib.request.urlopen(url, context=scontext) as f: |
494 url = "https://localhost:6667/my-endpoint" | 495 fl = f.readlines() |
495 scontext = ssl.SSLContext(ssl.PROTOCOL_TLS) | 496 nrow = len(fl) |
496 scontext.verify_mode = ssl.VerifyMode.CERT_NONE | 497 except Exception: |
497 with urllib.request.urlopen(url=url, context=scontext) as f: | 498 nrow = 0 |
498 print(f.read().decode("utf-8")) | 499 logging.debug("### getNrow returning %d" % nrow) |
500 return nrow | |
499 | 501 |
500 def process_genomes(self, genomes): | 502 def process_genomes(self, genomes): |
501 assembly = [] | 503 assembly = [] |
502 assmeta = [] | 504 assmeta = [] |
503 useuri = False | 505 useuri = False |
505 for i, genome_node in enumerate(genomes): | 507 for i, genome_node in enumerate(genomes): |
506 this_genome = {} | 508 this_genome = {} |
507 if genome_node["useuri"] == "yes": | 509 if genome_node["useuri"] == "yes": |
508 useuri = True | 510 useuri = True |
509 genome_name = genome_node["label"].strip() | 511 genome_name = genome_node["label"].strip() |
512 if len(genome_name) == 0: | |
513 genome_name = os.path.splitext(os.path.basename(genome_node["path"]))[0] | |
510 if len(genome_name.split()) > 1: | 514 if len(genome_name.split()) > 1: |
511 genome_name = genome_name.split()[0] | 515 genome_name = genome_name.split()[0] |
512 # spaces and cruft break scripts when substituted | 516 # spaces and cruft break scripts when substituted |
513 if genome_name not in genome_names: | 517 if genome_name not in genome_names: |
514 # pafs with shared references | 518 # pafs with shared references |
682 # url = "%s/api/datasets/%s/display?to_ext=hic " % (self.giURL, dsId) | 686 # url = "%s/api/datasets/%s/display?to_ext=hic " % (self.giURL, dsId) |
683 useuri = trackData["useuri"].lower() == "yes" | 687 useuri = trackData["useuri"].lower() == "yes" |
684 if useuri: | 688 if useuri: |
685 uri = data | 689 uri = data |
686 else: | 690 else: |
687 uri = "%s.hic" % trackData["label"] | 691 uri = tId |
688 # slashes in names cause path trouble | 692 # slashes in names cause path trouble |
689 dest = os.path.join(self.outdir, uri) | 693 dest = os.path.join(self.outdir, uri) |
690 cmd = ["cp", data, dest] | 694 cmd = ["cp", data, dest] |
691 self.subprocess_check_call(cmd) | 695 self.subprocess_check_call(cmd) |
692 categ = trackData["category"] | 696 categ = trackData["category"] |
718 "url": "https://unpkg.com/jbrowse-plugin-mafviewer/dist/jbrowse-plugin-mafviewer.umd.production.min.js", | 722 "url": "https://unpkg.com/jbrowse-plugin-mafviewer/dist/jbrowse-plugin-mafviewer.umd.production.min.js", |
719 } | 723 } |
720 ] | 724 ] |
721 } | 725 } |
722 categ = trackData["category"] | 726 categ = trackData["category"] |
723 fname = "%s" % tId | 727 fname = tId |
724 dest = "%s/%s" % (self.outdir, fname) | 728 dest = "%s/%s" % (self.outdir, fname) |
725 gname = trackData["assemblyNames"] | 729 gname = trackData["assemblyNames"] |
726 | 730 |
727 cmd = [ | 731 cmd = [ |
728 "bash", | 732 "bash", |
809 logging.debug("### gff3rebase cmd = %s" % " ".join(cmd)) | 813 logging.debug("### gff3rebase cmd = %s" % " ".join(cmd)) |
810 gff3_rebased.close() | 814 gff3_rebased.close() |
811 # Replace original gff3 file | 815 # Replace original gff3 file |
812 shutil.copy(gff3_rebased.name, gff3) | 816 shutil.copy(gff3_rebased.name, gff3) |
813 os.unlink(gff3_rebased.name) | 817 os.unlink(gff3_rebased.name) |
814 url = "%s.gff3.gz" % trackData["label"] | 818 self.add_gff(gff3, trackData, **kwargs) |
815 dest = "%s/%s" % (self.outdir, url) | 819 |
816 self._sort_gff(gff3, dest) | 820 def add_bigwig(self, data, trackData): |
817 tId = trackData["label"] | 821 tId = trackData["label"] |
818 categ = trackData["category"] | |
819 trackDict = { | |
820 "type": "FeatureTrack", | |
821 "trackId": tId, | |
822 "name": trackData["name"], | |
823 "assemblyNames": [trackData["assemblyNames"]], | |
824 "category": [ | |
825 categ, | |
826 ], | |
827 "adapter": { | |
828 "type": "Gff3TabixAdapter", | |
829 "gffGzLocation": { | |
830 "uri": url, | |
831 }, | |
832 "index": { | |
833 "location": { | |
834 "uri": url + ".tbi", | |
835 } | |
836 }, | |
837 }, | |
838 "displays": [ | |
839 { | |
840 "type": "LinearBasicDisplay", | |
841 "displayId": "%s-LinearBasicDisplay" % tId, | |
842 }, | |
843 { | |
844 "type": "LinearArcDisplay", | |
845 "displayId": "%s-LinearArcDisplay" % tId, | |
846 }, | |
847 ], | |
848 } | |
849 style_json = self._prepare_track_style(trackDict) | |
850 trackDict["style"] = style_json | |
851 self.tracksToAdd[trackData["assemblyNames"]].append(trackDict) | |
852 self.trackIdlist.append(tId) | |
853 os.unlink(gff3) | |
854 | |
855 def add_bigwig(self, data, trackData): | |
856 useuri = trackData["useuri"].lower() == "yes" | 822 useuri = trackData["useuri"].lower() == "yes" |
857 if useuri: | 823 if useuri: |
858 url = data | 824 url = data |
859 else: | 825 else: |
860 url = "%s.bigwig" % trackData["label"] | 826 url = tId |
861 # slashes in names cause path trouble | 827 # slashes in names cause path trouble |
862 dest = os.path.join(self.outdir, url) | 828 dest = os.path.join(self.outdir, url) |
863 cmd = ["cp", data, dest] | 829 cmd = ["cp", data, dest] |
864 self.subprocess_check_call(cmd) | 830 self.subprocess_check_call(cmd) |
865 bwloc = {"uri": url} | 831 bwloc = {"uri": url} |
866 tId = trackData["label"] | |
867 categ = trackData["category"] | 832 categ = trackData["category"] |
868 trackDict = { | 833 trackDict = { |
869 "type": "QuantitativeTrack", | 834 "type": "QuantitativeTrack", |
870 "trackId": tId, | 835 "trackId": tId, |
871 "name": trackData["name"], | 836 "name": trackData["name"], |
895 bindex = bam_index | 860 bindex = bam_index |
896 categ = trackData["category"] | 861 categ = trackData["category"] |
897 if useuri: | 862 if useuri: |
898 url = data | 863 url = data |
899 else: | 864 else: |
900 fname = "%s.bam" % trackData["label"] | 865 fname = tId |
901 dest = "%s/%s" % (self.outdir, fname) | 866 dest = "%s/%s" % (self.outdir, fname) |
902 url = fname | 867 url = fname |
903 bindex = fname + ".bai" | 868 bindex = fname + ".bai" |
904 self.subprocess_check_call(["cp", data, dest]) | 869 self.subprocess_check_call(["cp", data, dest]) |
905 if bam_index is not None and os.path.exists(bam_index): | 870 if bam_index is not None and os.path.exists(bam_index): |
955 genseqad = "Not found" | 920 genseqad = "Not found" |
956 logging.warn("No adapter found for cram %s in gsa=%s" % (tId, gsa)) | 921 logging.warn("No adapter found for cram %s in gsa=%s" % (tId, gsa)) |
957 if useuri: | 922 if useuri: |
958 url = data | 923 url = data |
959 else: | 924 else: |
960 fname = "%s.cram" % trackData["label"] | 925 fname = tId |
961 dest = "%s/%s" % (self.outdir, fname) | 926 dest = os.path.join(self.outdir, fname) |
962 url = fname | 927 url = fname |
963 self.subprocess_check_call(["cp", data, dest]) | 928 self.subprocess_check_call(["cp", data, dest]) |
964 if cram_index is not None and os.path.exists(cram_index): | 929 if cram_index is not None and os.path.exists(cram_index): |
965 if not os.path.exists(dest + ".crai"): | 930 if not os.path.exists(dest + ".crai"): |
966 # most probably made by galaxy and stored in galaxy dirs, need to copy it to dest | 931 # most probably made by galaxy and stored in galaxy dirs, need to copy it to dest |
1008 categ = trackData["category"] | 973 categ = trackData["category"] |
1009 useuri = trackData["useuri"].lower() == "yes" | 974 useuri = trackData["useuri"].lower() == "yes" |
1010 if useuri: | 975 if useuri: |
1011 url = data | 976 url = data |
1012 else: | 977 else: |
1013 url = "%s.vcf.gz" % tId | 978 url = tId |
1014 dest = "%s/%s" % (self.outdir, url) | 979 dest = "%s/%s" % (self.outdir, url) |
1015 cmd = "bgzip -c %s > %s" % (data, dest) | 980 cmd = "bgzip -c %s > %s" % (data, dest) |
1016 self.subprocess_popen(cmd) | 981 self.subprocess_popen(cmd) |
1017 cmd = ["tabix", "-f", "-p", "vcf", dest] | 982 cmd = ["tabix", "-f", "-p", "vcf", dest] |
1018 self.subprocess_check_call(cmd) | 983 self.subprocess_check_call(cmd) |
1057 # Only index if not already done | 1022 # Only index if not already done |
1058 if not os.path.exists(dest): | 1023 if not os.path.exists(dest): |
1059 cmd = "jbrowse sort-gff '%s' | bgzip -c > '%s'" % ( | 1024 cmd = "jbrowse sort-gff '%s' | bgzip -c > '%s'" % ( |
1060 data, | 1025 data, |
1061 dest, | 1026 dest, |
1062 ) # "gff3sort.pl --precise '%s' | grep -v \"^$\" > '%s'" | 1027 ) |
1063 self.subprocess_popen(cmd) | 1028 self.subprocess_popen(cmd) |
1064 self.subprocess_check_call(["tabix", "-f", "-p", "gff", dest]) | 1029 self.subprocess_check_call(["tabix", "-f", "-p", "gff", dest]) |
1065 | 1030 |
1066 def _sort_bed(self, data, dest): | 1031 def _sort_bed(self, data, dest): |
1067 # Only index if not already done | 1032 # Only index if not already done |
1069 cmd = "sort -k1,1 -k2,2n '%s' | bgzip -c > '%s'" % (data, dest) | 1034 cmd = "sort -k1,1 -k2,2n '%s' | bgzip -c > '%s'" % (data, dest) |
1070 self.subprocess_popen(cmd) | 1035 self.subprocess_popen(cmd) |
1071 cmd = ["tabix", "-f", "-p", "bed", dest] | 1036 cmd = ["tabix", "-f", "-p", "bed", dest] |
1072 self.subprocess_check_call(cmd) | 1037 self.subprocess_check_call(cmd) |
1073 | 1038 |
1074 def add_gff(self, data, ext, trackData): | 1039 def add_gff(self, data, trackData): |
1040 tId = trackData["label"] | |
1075 useuri = trackData["useuri"].lower() == "yes" | 1041 useuri = trackData["useuri"].lower() == "yes" |
1076 if useuri: | 1042 if useuri: |
1077 url = trackData["path"] | 1043 url = trackData["path"] |
1078 else: | 1044 else: |
1079 url = "%s.%s.gz" % (trackData["label"], ext) | 1045 url = tId + ".gz" |
1080 dest = "%s/%s" % (self.outdir, url) | 1046 dest = "%s/%s" % (self.outdir, url) |
1081 self._sort_gff(data, dest) | 1047 self._sort_gff(data, dest) |
1082 tId = trackData["label"] | |
1083 categ = trackData["category"] | 1048 categ = trackData["category"] |
1084 trackDict = { | 1049 trackDict = { |
1085 "type": "FeatureTrack", | 1050 "type": "FeatureTrack", |
1086 "trackId": tId, | 1051 "trackId": tId, |
1087 "name": trackData["name"], | 1052 "name": trackData["name"], |
1121 categ = trackData["category"] | 1086 categ = trackData["category"] |
1122 useuri = trackData["useuri"].lower() == "yes" | 1087 useuri = trackData["useuri"].lower() == "yes" |
1123 if useuri: | 1088 if useuri: |
1124 url = data | 1089 url = data |
1125 else: | 1090 else: |
1126 url = "%s.%s.gz" % (trackData["label"], ext) | 1091 url = tId |
1127 dest = "%s/%s" % (self.outdir, url) | 1092 dest = "%s/%s" % (self.outdir, url) |
1128 self._sort_bed(data, dest) | 1093 self._sort_bed(data, dest) |
1129 trackDict = { | 1094 trackDict = { |
1130 "type": "FeatureTrack", | 1095 "type": "FeatureTrack", |
1131 "trackId": tId, | 1096 "trackId": tId, |
1166 self.trackIdlist.append(tId) | 1131 self.trackIdlist.append(tId) |
1167 | 1132 |
1168 def add_paf(self, data, trackData, pafOpts, **kwargs): | 1133 def add_paf(self, data, trackData, pafOpts, **kwargs): |
1169 tname = trackData["name"] | 1134 tname = trackData["name"] |
1170 tId = trackData["label"] | 1135 tId = trackData["label"] |
1171 url = "%s.paf" % tId | 1136 url = tId |
1172 useuri = data.startswith("http://") or data.startswith("https://") | 1137 useuri = data.startswith("http://") or data.startswith("https://") |
1173 if not useuri: | 1138 if not useuri: |
1174 dest = "%s/%s" % (self.outdir, url) | 1139 dest = "%s/%s" % (self.outdir, url) |
1175 self.symlink_or_copy(os.path.realpath(data), dest) | 1140 self.symlink_or_copy(os.path.realpath(data), dest) |
1141 nrow = self.getNrow(dest) | |
1176 else: | 1142 else: |
1177 url = data | 1143 url = data |
1144 nrow = self.getNrow(url) | |
1178 categ = trackData["category"] | 1145 categ = trackData["category"] |
1179 pgnames = [ | 1146 pgnames = [x.strip() for x in pafOpts["genome_label"].split(",")] |
1180 x.strip() for x in pafOpts["genome_label"].split(",") if len(x.strip()) > 0 | |
1181 ] | |
1182 pgpaths = [ | 1147 pgpaths = [ |
1183 x.strip() for x in pafOpts["genome"].split(",") if len(x.strip()) > 0 | 1148 x.strip() for x in pafOpts["genome"].split(",") if len(x.strip()) > 0 |
1184 ] | 1149 ] |
1185 passnames = [trackData["assemblyNames"]] # always first | 1150 passnames = [trackData["assemblyNames"]] # always first |
1151 for i, gp in enumerate(pgpaths): | |
1152 if len(pgnames[i].strip()) == 0: | |
1153 # user may have left it blank - cannot make non-optional if want optional tracks. | |
1154 gn = os.path.basename(gp) | |
1155 pgnames[i] = os.path.splitext(gn)[0] | |
1186 logging.debug( | 1156 logging.debug( |
1187 "### add_paf got pafOpts=%s, pgnames=%s, pgpaths=%s for %s" | 1157 "### add_paf got pafOpts=%s, pgnames=%s, pgpaths=%s for %s" |
1188 % (pafOpts, pgnames, pgpaths, tId) | 1158 % (pafOpts, pgnames, pgpaths, tId) |
1189 ) | 1159 ) |
1190 for i, gname in enumerate(pgnames): | 1160 for i, gp in enumerate(pgpaths): |
1161 gname = pgnames[i] | |
1191 if len(gname.split()) > 1: | 1162 if len(gname.split()) > 1: |
1192 gname = gname.split()[0] | 1163 gname = gname.split()[0] |
1193 passnames.append(gname) | 1164 passnames.append(gname) |
1194 # trouble from spacey names in command lines avoidance | 1165 # trouble from spacey names in command lines avoidance |
1195 useuri = pgpaths[i].startswith("http://") or pgpaths[i].startswith( | 1166 useuri = gp.startswith("http://") or gp.startswith("https://") |
1196 "https://" | |
1197 ) | |
1198 | 1167 |
1199 if gname not in self.genome_names: | 1168 if gname not in self.genome_names: |
1200 # ignore if already there - eg for duplicates among pafs. | 1169 # ignore if already there - eg for duplicates among pafs. |
1201 asstrack, first_contig = self.make_assembly(pgpaths[i], gname, useuri) | 1170 asstrack, first_contig = self.make_assembly(gp, gname, useuri) |
1202 self.genome_names.append(gname) | 1171 self.genome_names.append(gname) |
1203 self.tracksToAdd[gname] = [] | 1172 self.tracksToAdd[gname] = [] |
1204 self.assemblies.append(asstrack) | 1173 self.assemblies.append(asstrack) |
1205 trackDict = { | 1174 trackDict = { |
1206 "type": "SyntenyTrack", | 1175 "type": "SyntenyTrack", |
1227 { | 1196 { |
1228 "type": "LinearComparativeDisplay", | 1197 "type": "LinearComparativeDisplay", |
1229 "displayId": "%s-LinearComparativeDisplay" % tId, | 1198 "displayId": "%s-LinearComparativeDisplay" % tId, |
1230 }, | 1199 }, |
1231 { | 1200 { |
1232 "type": "LinearSyntenyDisplay", | 1201 "type": "LinearBasicDisplay", |
1233 "displayId": "%s-LinearSyntenyDisplay" % tId, | 1202 "displayId": "%s-LinearSyntenyDisplay" % tId, |
1234 }, | 1203 }, |
1235 ], | 1204 ], |
1236 } | 1205 } |
1237 style_json = { | 1206 if nrow > 50000: |
1238 "displays": [ | 1207 style_json = { |
1239 { | 1208 "displays": [ |
1240 "type": "LGVSyntenyDisplay", | 1209 { |
1241 "displayId": "%s-LGVSyntenyDisplay" % tId, | 1210 "type": "LGVSyntenyDisplay", |
1242 } | 1211 "displayId": "%s-LGVSyntenyDisplay" % tId, |
1243 ] | 1212 } |
1244 } | 1213 ] |
1214 } | |
1215 else: | |
1216 style_json = { | |
1217 "displays": [ | |
1218 { | |
1219 "type": "LinearBasicDisplay", | |
1220 "displayId": "%s-LinearBasicDisplay" % tId, | |
1221 } | |
1222 ] | |
1223 } | |
1224 | |
1245 trackDict["style"] = style_json | 1225 trackDict["style"] = style_json |
1246 self.tracksToAdd[trackData["assemblyNames"]].append(trackDict) | 1226 self.tracksToAdd[trackData["assemblyNames"]].append(trackDict) |
1247 self.trackIdlist.append(tId) | 1227 self.trackIdlist.append(tId) |
1248 | 1228 |
1249 def process_annotations(self, track): | 1229 def process_annotations(self, track): |
1250 category = track["category"].replace("__pd__date__pd__", TODAY) | 1230 category = track["category"].replace("__pd__date__pd__", TODAY) |
1251 for i, ( | 1231 for trackIndex, ( |
1252 dataset_path, | 1232 dataset_path, |
1253 dataset_ext, | 1233 dataset_ext, |
1254 useuri, | 1234 useuri, |
1255 track_human_label, | 1235 track_human_label, |
1256 extra_metadata, | 1236 extra_metadata, |
1262 track_human_label = track_human_label.replace(" ", "_") | 1242 track_human_label = track_human_label.replace(" ", "_") |
1263 outputTrackConfig = { | 1243 outputTrackConfig = { |
1264 "category": category, | 1244 "category": category, |
1265 "style": {}, | 1245 "style": {}, |
1266 } | 1246 } |
1267 | |
1268 outputTrackConfig["assemblyNames"] = track["assemblyNames"] | 1247 outputTrackConfig["assemblyNames"] = track["assemblyNames"] |
1269 outputTrackConfig["key"] = track_human_label | 1248 outputTrackConfig["key"] = track_human_label |
1270 outputTrackConfig["useuri"] = useuri | 1249 outputTrackConfig["useuri"] = useuri |
1271 outputTrackConfig["path"] = dataset_path | 1250 outputTrackConfig["path"] = dataset_path |
1272 outputTrackConfig["ext"] = dataset_ext | 1251 outputTrackConfig["ext"] = dataset_ext |
1273 | 1252 |
1274 outputTrackConfig["trackset"] = track.get("trackset", {}) | 1253 outputTrackConfig["trackset"] = track.get("trackset", {}) |
1275 outputTrackConfig["label"] = "%s_%i_%s" % ( | 1254 outputTrackConfig["label"] = "%s_%d.%s" % ( |
1255 track_human_label, | |
1256 self.trackCounter, | |
1276 dataset_ext, | 1257 dataset_ext, |
1277 i, | |
1278 track_human_label, | |
1279 ) | 1258 ) |
1259 self.trackCounter += 1 | |
1280 outputTrackConfig["metadata"] = extra_metadata | 1260 outputTrackConfig["metadata"] = extra_metadata |
1281 outputTrackConfig["name"] = track_human_label | 1261 outputTrackConfig["name"] = track_human_label |
1282 | 1262 |
1283 if dataset_ext in ("gff", "gff3"): | 1263 if dataset_ext in ("gff", "gff3"): |
1284 self.add_gff( | 1264 self.add_gff( |
1285 dataset_path, | 1265 dataset_path, |
1286 dataset_ext, | |
1287 outputTrackConfig, | 1266 outputTrackConfig, |
1288 ) | 1267 ) |
1289 elif dataset_ext in ("hic", "juicebox_hic"): | 1268 elif dataset_ext in ("hic", "juicebox_hic"): |
1290 self.add_hic( | 1269 self.add_hic( |
1291 dataset_path, | 1270 dataset_path, |
1292 outputTrackConfig, | 1271 outputTrackConfig, |
1293 ) | 1272 ) |
1294 elif dataset_ext in ("cool", "mcool", "scool"): | 1273 elif dataset_ext in ("cool", "mcool", "scool"): |
1295 hic_url = "%s_%d.hic" % (track_human_label, i) | 1274 hic_url = outputTrackConfig["label"] |
1296 hic_path = os.path.join(self.outdir, hic_url) | 1275 hic_path = os.path.join(self.outdir, hic_url) |
1297 self.subprocess_check_call( | 1276 self.subprocess_check_call( |
1298 [ | 1277 [ |
1299 "hictk", | 1278 "hictk", |
1300 "convert", | 1279 "convert", |
1386 logging.debug( | 1365 logging.debug( |
1387 "### No style data in default data %s for %s" | 1366 "### No style data in default data %s for %s" |
1388 % (default_data, tId) | 1367 % (default_data, tId) |
1389 ) | 1368 ) |
1390 style_data = {"type": "LinearBasicDisplay"} | 1369 style_data = {"type": "LinearBasicDisplay"} |
1391 if "displays" in track_conf: | 1370 if "displays" in track_conf: |
1392 disp = track_conf["displays"][0]["type"] | 1371 disp = track_conf["displays"][0]["type"] |
1393 style_data["type"] = disp | 1372 style_data["type"] = disp |
1394 if track_conf.get("style_labels", None): | 1373 if track_conf.get("style_labels", None): |
1395 # TODO fix this: it should probably go in a renderer block (SvgFeatureRenderer) but still does not work | 1374 # TODO fix this: it should probably go in a renderer block (SvgFeatureRenderer) but still does not work |
1396 # TODO move this to per track displays? | 1375 # TODO move this to per track displays? |
1397 style_data["labels"] = track_conf["style_labels"] | 1376 style_data["labels"] = track_conf["style_labels"] |
1398 tracks_data.append( | 1377 tracks_data.append( |
1416 first = [x for x in self.ass_first_contigs if x[0] == gnome] | 1395 first = [x for x in self.ass_first_contigs if x[0] == gnome] |
1417 if len(first) > 0: | 1396 if len(first) > 0: |
1418 [gnome, refName, end] = first[0] | 1397 [gnome, refName, end] = first[0] |
1419 start = 0 | 1398 start = 0 |
1420 end = int(end) | 1399 end = int(end) |
1421 refName = self.assmeta[gnome][0].get("genome_firstcontig", None) | |
1422 drdict = { | 1400 drdict = { |
1423 "refName": refName, | 1401 "refName": refName, |
1424 "start": start, | 1402 "start": start, |
1425 "end": end, | 1403 "end": end, |
1426 "reversed": False, | 1404 "reversed": False, |
1472 with open(self.config_json_file, "w") as config_file: | 1450 with open(self.config_json_file, "w") as config_file: |
1473 json.dump(self.config_json, config_file, indent=2) | 1451 json.dump(self.config_json, config_file, indent=2) |
1474 | 1452 |
1475 def add_defsess_to_index(self, data): | 1453 def add_defsess_to_index(self, data): |
1476 """ | 1454 """ |
1477 This was included on request of the new codeowner from Anthony's IUC PR. | 1455 Included on request of the new codeowner, from Anthony's IUC PR. |
1478 Now fixed to deal with each assembly and tracks separately. | 1456 Had to be fixed to keep each assembly with the associated tracks for a default view. |
1479 Originally used only the first assembly, putting all tracks there and | 1457 Originally used only the first assembly, putting all tracks there and so breaking some |
1480 generally falling apart when tested with 2 or more. Seems ironic that | 1458 when tested with 2 or more. Seems ironic that this vital feature could not have ever been tested |
1481 this vital feature was never tested given the rejection of my original IUC PR | 1459 given that my declining to add it was the basis for a reviewer's rejection of my original IUC PR. |
1482 because it was not there. And no, reviewer, I do not want this important piece of history | 1460 A simple 2 line diff apparently. |
1483 removed. I prefer that it remain here since it has caused me considerable discomfort. | 1461 |
1462 The technical problem is that this index.html hack breaks the promise of all the form fields | |
1463 for track controls such as visibility default that were working mostly. They need to be removed from the form by whoever | |
1464 thought this method was a good solution to the JB2 bug breaking config.json style default | |
1465 view coordinates. | |
1466 | |
1467 And no, dear reviewer of this code, please leave this piece of history. | |
1468 It is true and I prefer that it remain here to document my considerable discomfort at this unfair treatment. | |
1484 | 1469 |
1485 ---------------------------------------------------------- | 1470 ---------------------------------------------------------- |
1486 Add some default session settings: set some assemblies/tracks on/off | 1471 Add some default session settings: set some assemblies/tracks on/off |
1487 | 1472 |
1488 This allows to select a default view: | 1473 This allows to select a default view: |
1624 GALAXY_INFRASTRUCTURE_URL = "http://" + GALAXY_INFRASTRUCTURE_URL | 1609 GALAXY_INFRASTRUCTURE_URL = "http://" + GALAXY_INFRASTRUCTURE_URL |
1625 | 1610 |
1626 jc = JbrowseConnector(outdir=args.outdir, jbrowse2path=args.jbrowse2path) | 1611 jc = JbrowseConnector(outdir=args.outdir, jbrowse2path=args.jbrowse2path) |
1627 | 1612 |
1628 default_session_data = {} | 1613 default_session_data = {} |
1629 | |
1630 for ass in root.findall("assembly"): | 1614 for ass in root.findall("assembly"): |
1631 genomes = [ | 1615 genomes = [ |
1632 { | 1616 { |
1633 "path": x.attrib["path"], | 1617 "path": x.attrib["path"], |
1634 "label": x.attrib["label"], | 1618 "label": x.attrib["label"], |
1664 | 1648 |
1665 trackfiles = track.findall("files/trackFile") | 1649 trackfiles = track.findall("files/trackFile") |
1666 if trackfiles: | 1650 if trackfiles: |
1667 for x in track.findall("files/trackFile"): | 1651 for x in track.findall("files/trackFile"): |
1668 track_conf["label"] = x.attrib["label"] | 1652 track_conf["label"] = x.attrib["label"] |
1669 trackkey = track_conf["label"] | |
1670 track_conf["useuri"] = x.attrib["useuri"] | 1653 track_conf["useuri"] = x.attrib["useuri"] |
1671 if is_multi_bigwig: | 1654 if is_multi_bigwig: |
1672 multi_bigwig_paths.append( | 1655 multi_bigwig_paths.append( |
1673 ( | 1656 ( |
1674 x.attrib["label"], | 1657 x.attrib["label"], |
1764 assconf = jc.config_json.get("assemblies", []) | 1747 assconf = jc.config_json.get("assemblies", []) |
1765 assconf += jc.assemblies | 1748 assconf += jc.assemblies |
1766 jc.config_json["assemblies"] = assconf | 1749 jc.config_json["assemblies"] = assconf |
1767 logging.debug("assemblies=%s, gnames=%s" % (assconf, jc.genome_names)) | 1750 logging.debug("assemblies=%s, gnames=%s" % (assconf, jc.genome_names)) |
1768 jc.write_config() | 1751 jc.write_config() |
1769 # jc.add_default_session(default_session_data) | 1752 jc.add_default_session(default_session_data) |
1770 # note that this can be left in the config.json but has NO EFFECT if add_defsess_to_index is called. | 1753 # note that this can be left in the config.json but has NO EFFECT if add_defsess_to_index is called. |
1771 jc.add_defsess_to_index(default_session_data) | 1754 # jc.add_defsess_to_index(default_session_data) |
1772 # jc.text_index() not sure what broke here. | 1755 # jc.text_index() not sure what broke here. |