Mercurial > repos > fubar > jbrowse2
view x/static/js/4133.451f5e47.chunk.js.map @ 125:49f3d3878413 draft
planemo upload for repository https://github.com/galaxyproject/tools-iuc/tree/master/tools/jbrowse2 commit 5ea1f9c1eef1de76232e69aa6d34cda77d90d566
author | fubar |
---|---|
date | Sat, 05 Oct 2024 23:58:05 +0000 |
parents | |
children |
line wrap: on
line source
{"version":3,"file":"static/js/4133.451f5e47.chunk.js","mappings":"+IAAA,MAAMA,GAMS,MAAMC,EAArB,cACE,KAAAC,QAAU,IAAIC,IACd,KAAAC,gBAAkB,IAAIC,eAyCxB,CAjCE,SAAAC,CAAUC,EAAsB,IAAIP,GAClC,GAAIQ,KAAKD,OAAOE,QACd,MAAM,IAAIC,MAAM,yCAKlBF,KAAKN,QAAQS,IAAIJ,GACbA,EAAOE,QAGTD,KAAKI,cAAcL,GACyB,mBAA5BA,EAAOM,kBACvBN,EAAOM,iBAAiB,SAAS,KAC/BL,KAAKI,cAAcL,EAAO,GAGhC,CAEA,aAAAK,CAAcL,GACZC,KAAKN,QAAQY,OAAOP,GACM,IAAtBC,KAAKN,QAAQa,MACfP,KAAKJ,gBAAgBY,OAEzB,CAEA,UAAIT,GACF,OAAOC,KAAKJ,gBAAgBG,MAC9B,CAEA,KAAAS,GACER,KAAKJ,gBAAgBY,OACvB,EChDa,MAAMC,EAArB,cACE,KAAAC,UAAY,IAAIf,GAclB,CAXE,WAAAgB,CAAYC,EAAqB,QAC/BZ,KAAKU,UAAUP,IAAIS,GACnBA,EAASZ,KAAKa,eAChB,CAEA,QAAAD,CAASE,GACPd,KAAKa,eAAiBC,EACtB,IAAK,MAAMC,KAAOf,KAAKU,UACrBK,EAAID,EAER,ECSa,MAAME,EAWnB,WAAAC,EAAY,KACVC,EAAI,MACJC,IAKA,GAAoB,mBAATD,EACT,MAAM,IAAIE,UAAU,6BAEtB,GAAqB,iBAAVD,EACT,MAAM,IAAIC,UAAU,4BAEtB,GACuB,mBAAdD,EAAME,KACQ,mBAAdF,EAAMG,KACW,mBAAjBH,EAAMb,OAEb,MAAM,IAAIc,UACR,qEAIJpB,KAAKmB,MAAQA,EACbnB,KAAKuB,aAAeL,CACtB,CAEA,uBAAOM,CAAiBC,GACtB,MAEqB,eAAnBA,EAAUC,MAGS,gBAAnBD,EAAUE,MAEY,wBAAtBF,EAAUX,SAEY,mBAAtBW,EAAUX,OAEd,CAEA,KAAAc,CAAMC,EAAaC,GACb9B,KAAKmB,MAAME,IAAIQ,KAASC,GAC1B9B,KAAKmB,MAAMb,OAAOuB,EAEtB,CAEA,IAAAX,CAAKW,EAAaE,EAAShC,EAAsBiC,GAC/C,MAAMC,EAAU,IAAIxC,EACdyC,EAAiB,IAAIzB,EAC3ByB,EAAevB,YAAYqB,GAC3B,MAAMG,EAAqB,CACzBF,QAASA,EACTG,QAASpC,KAAKuB,aAAaQ,EAAME,EAAQlC,QAASe,IAChDoB,EAAetB,SAASE,EAAQ,IAElCuB,SAAS,EACTH,iBACA,WAAIjC,GACF,OAAOD,KAAKiC,QAAQlC,OAAOE,OAC7B,GAEFkC,EAASF,QAAQnC,UAAUC,GAG3BoC,EAASF,QAAQlC,OAAOM,iBAAiB,SAAS,KAC3C8B,EAASE,SACZrC,KAAK4B,MAAMC,EAAKM,EAClB,IAIFA,EAASC,QACNE,MACC,KACEH,EAASE,SAAU,CAAI,IAEzB,KACEF,EAASE,SAAU,EAGnBrC,KAAK4B,MAAMC,EAAKM,EAAS,IAG5BI,OAAMC,IAIL,MADAC,QAAQD,MAAMA,GACRA,CAAK,IAGfxC,KAAKmB,MAAMG,IAAIO,EAAKM,EACtB,CAEA,yBAAOO,CAAsBN,EAAqBrC,GAIhD,SAAS4C,IACP,GAAI5C,aAAM,EAANA,EAAQE,QACV,MAAM2C,OAAOC,OAAO,IAAI3C,MAAM,WAAY,CAAEyB,KAAM,eAEtD,CAEA,OAAOS,EAAQE,MACbQ,IACEH,IACOG,KAETN,IAEE,MADAG,IACMH,CAAK,GAGjB,CAEA,GAAAO,CAAIlB,GACF,OAAO7B,KAAKmB,MAAM4B,IAAIlB,EACxB,CAeA,GAAAR,CACEQ,EACAE,EACAhC,EACAiC,GAEA,IAAKjC,GAAUgC,aAAgBiB,YAC7B,MAAM,IAAI5B,UACR,yGAGJ,MAAM6B,EAAajD,KAAKmB,MAAME,IAAIQ,GAElC,OAAIoB,EACEA,EAAWhD,UAAYgD,EAAWZ,SAEpCrC,KAAK4B,MAAMC,EAAKoB,GACTjD,KAAKqB,IAAIQ,EAAKE,EAAMhC,EAAQiC,IAGjCiB,EAAWZ,QAENY,EAAWb,SAKpBa,EAAWhB,QAAQnC,UAAUC,GAC7BkD,EAAWf,eAAevB,YAAYqB,GAE/BhB,EAAsB0B,mBAC3BO,EAAWb,QACXrC,KAKJC,KAAKkB,KAAKW,EAAKE,EAAMhC,EAAQiC,GACtBhB,EAAsB0B,mBAG3B1C,KAAKmB,MAAME,IAAIQ,GAAMO,QACrBrC,GAEJ,CAQA,OAAO8B,GACL,MAAMqB,EAAclD,KAAKmB,MAAME,IAAIQ,GAC/BqB,IACGA,EAAYb,SACfa,EAAYjB,QAAQzB,QAEtBR,KAAKmB,MAAMb,OAAOuB,GAEtB,CAMA,KAAAsB,GAEE,MAAMC,EAAUpD,KAAKmB,MAAMkC,OAC3B,IAAIC,EAAc,EAClB,IAAK,IAAIR,EAASM,EAAQG,QAAST,EAAOU,KAAMV,EAASM,EAAQG,OAC/DvD,KAAKM,OAAOwC,EAAOW,OACnBH,GAAe,EAEjB,OAAOA,CACT,E,6CClPa,MAAMI,EAGnB,WAAAzC,CAAY0C,EAAuBC,GACjC5D,KAAK2D,cAAgBA,EACrB3D,KAAK4D,aAAeA,CACtB,CAEA,QAAAC,GACE,MAAO,GAAG7D,KAAK2D,iBAAiB3D,KAAK4D,cACvC,CAEA,SAAAE,CAAUC,GACR,OACE/D,KAAK2D,cAAgBI,EAAEJ,eAAiB3D,KAAK4D,aAAeG,EAAEH,YAElE,CAEA,UAAOI,IAAOC,GACZ,IAAID,EACAE,EAAI,EACR,MAAQF,EAAKE,GAAK,EAChBF,EAAMC,EAAKC,GAEb,KAAOA,EAAID,EAAKE,OAAQD,GAAK,EACvBF,EAAIF,UAAUG,EAAKC,IAAM,IAC3BF,EAAMC,EAAKC,IAGf,OAAOF,CACT,EAEK,SAASI,EAAUC,EAAeC,EAAS,EAAGC,GAAY,GAC/D,GAAIA,EACF,MAAM,IAAIrE,MAAM,mDAGlB,OAAO,IAAIwD,EACW,cAApBW,EAAMC,EAAS,GACO,WAApBD,EAAMC,EAAS,GACK,SAApBD,EAAMC,EAAS,GACK,MAApBD,EAAMC,EAAS,GACK,IAApBD,EAAMC,EAAS,GACfD,EAAMC,EAAS,GAChBD,EAAMC,EAAS,IAAM,EAAKD,EAAMC,GAErC,CC3Ce,MAAME,EAGnB,WAAAvD,CACSwD,EACAC,EACAC,EACAC,GAHA,KAAAH,KAAAA,EACA,KAAAC,KAAAA,EACA,KAAAC,IAAAA,EACA,KAAAC,aAAAA,CACN,CAEH,cAAAC,GACE,MAAO,GAAG7E,KAAKyE,KAAKZ,eAAe7D,KAAK0E,KAAKb,mBAC3C7D,KAAK2E,oBACU3E,KAAK8E,gBACxB,CAEA,QAAAjB,GACE,OAAO7D,KAAK6E,gBACd,CAEA,SAAAf,CAAUC,GACR,OACE/D,KAAKyE,KAAKX,UAAUC,EAAEU,OACtBzE,KAAK0E,KAAKZ,UAAUC,EAAEW,OACtB1E,KAAK2E,IAAMZ,EAAEY,GAEjB,CAEA,WAAAG,GACE,YAA0BC,IAAtB/E,KAAK4E,aACA5E,KAAK4E,aAEP5E,KAAK0E,KAAKf,cAAgB,MAAY3D,KAAKyE,KAAKd,aACzD,E,wBChCK,SAASqB,EAAQC,GACtB,OAAO,IAAIC,SAAQC,GAAWC,WAAWD,EAASF,IACpD,CA0EO,SAASI,EAAeC,EAAiBC,GAC9C,MAAMC,EAAwB,GAC9B,IAAIC,EAEJ,GAAsB,IAAlBH,EAAOnB,OACT,OAAOmB,EAGTA,EAAOI,MAAK,CAACC,EAAIC,KACf,MAAMC,EAAMF,EAAGlB,KAAKd,cAAgBiC,EAAGnB,KAAKd,cAC5C,OAAe,IAARkC,EAAYF,EAAGlB,KAAKb,aAAegC,EAAGnB,KAAKb,aAAeiC,CAAG,IAGtE,IAAK,MAAMC,KAASR,IACbC,GAAUO,EAAMpB,KAAKZ,UAAUyB,GAAU,UAC1BR,IAAdU,GACFD,EAAaO,KAAKD,GAClBL,EAAYK,IAvCWE,EAyCJP,GAzCmBQ,EAyCRH,GAvC3BrB,KAAKd,cAAgBqC,EAAOtB,KAAKf,cAAgB,MACxDsC,EAAOvB,KAAKf,cAAgBqC,EAAOvB,KAAKd,cAAgB,IAuC9CmC,EAAMpB,KAAKZ,UAAU2B,EAAUf,MAAQ,IACzCe,EAAUf,KAAOoB,EAAMpB,OAGzBc,EAAaO,KAAKD,GAClBL,EAAYK,KA/Cf,IAAwBE,EAAeC,EAqD5C,OAAOT,CACT,CAEO,SAASU,EAAe7B,EAAeC,GAO5C,MAAO,CAAE6B,UAjHJ,SAAsBC,GAC3B,GACEA,EAAKC,YAAYC,OAAOC,mBACxBH,EAAKI,SAASF,OAAOG,kBAErB,MAAM,IAAIvG,MAAM,oBAElB,OAAOkG,EAAKM,UACd,CAmGoBC,CAChB,gBACEC,MAAMC,UAAUC,MAAMC,KAAK1C,EAAOC,EAAQA,EAAS,IACnD,IAIN,CAEO,SAAS0C,EACdC,EACAC,GAEA,OAAOD,EACHA,EAAcnD,UAAUoD,GAAiB,EACvCA,EACAD,EACFC,CACN,CAEO,SAASC,EACdC,EACAC,EAAwCC,GAAKA,GAE7C,IAAIC,EAAY,EACZC,EAAgB,EACpB,MAAMC,EAAc,GACdC,EAAsC,CAAC,EAC7C,IAAK,IAAIxD,EAAI,EAAGA,EAAIkD,EAAWjD,OAAQD,GAAK,EAC1C,IAAKkD,EAAWlD,GAAI,CAClB,GAAIsD,EAAgBtD,EAAG,CACrB,IAAIyD,EAAUP,EAAWvD,SAAS,OAAQ2D,EAAetD,GACzDyD,EAAUN,EAAaM,GACvBF,EAAYF,GAAaI,EACzBD,EAAYC,GAAWJ,CACzB,CACAC,EAAgBtD,EAAI,EACpBqD,GAAa,CACf,CAEF,MAAO,CAAEG,cAAaD,cACxB,CCxJe,MAAeG,EAQ5B,WAAA3G,EAAY,WACV4G,EAAU,aACVR,EAAgBS,GAAcA,IAK9B9H,KAAK6H,WAAaA,EAClB7H,KAAKqH,aAAeA,CACtB,ECMa,MAAMU,UAAYH,EAG/B,eAAMzB,CAAU6B,EAAeC,G,QAE7B,OAAsC,QAA/B,EAAwB,QAAxB,SADiBjI,KAAKkI,MAAMD,IAClBE,QAAQH,UAAM,eAAEI,aAAK,eAAEjC,YAAa,CACvD,CAGA,YAAMkC,CAAOJ,GACX,MAAM5D,QAAerE,KAAK6H,WAAWS,SAASL,GAG9C,GAlCc,WAkCV5D,EAAMkE,aAAa,GACrB,MAAM,IAAIrI,MAAM,kBAGlB,MAAMsI,EAAWnE,EAAMoE,YAAY,GAKnC,IACIxB,EADAyB,EAAO,EAKX,MAAMP,EAAU,IAAIvB,MAIjB4B,GACH,IAAK,IAAItE,EAAI,EAAGA,EAAIsE,EAAUtE,IAAK,CAEjC,MAAMyE,EAAWtE,EAAMoE,YAAYC,GACnC,IAAIN,EAEJM,GAAQ,EACR,MAAME,EAAoC,CAAC,EAE3C,IAAK,IAAIC,EAAI,EAAGA,EAAIF,EAAUE,GAAK,EAAG,CACpC,MAAMlE,EAAMN,EAAMkE,aAAaG,GAE/B,GADAA,GAAQ,EACII,QAARnE,EACF+D,GAAQ,EACRN,EAAQlC,EAAe7B,EAAOqE,EAAO,IACrCA,GAAQ,OACH,IAAI/D,EAAMmE,MACf,MAAM,IAAI5I,MAAM,oDACX,CACL,MAAM6I,EAAa1E,EAAMoE,YAAYC,GACrCA,GAAQ,EACR,MAAMpD,EAAS,IAAIsB,MAAamC,GAChC,IAAK,IAAIC,EAAI,EAAGA,EAAID,EAAYC,IAAK,CACnC,MAAMC,EAAI7E,EAAUC,EAAOqE,GAC3BA,GAAQ,EACR,MAAMQ,EAAI9E,EAAUC,EAAOqE,GAC3BA,GAAQ,EACRzB,EAAgBD,EAAcC,EAAegC,GAC7C3D,EAAO0D,GAAK,IAAIxE,EAAMyE,EAAGC,EAAGvE,EAC9B,CACAiE,EAASjE,GAAOW,CAClB,EACF,CAEA,MAAM6D,EAAc9E,EAAMoE,YAAYC,GACtCA,GAAQ,EAIR,MAAMU,EAAc,IAAIxC,MAAqBuC,GAC7C,IAAK,IAAIN,EAAI,EAAGA,EAAIM,EAAaN,IAAK,CACpC,MAAMvE,EAASF,EAAUC,EAAOqE,GAChCA,GAAQ,EACRzB,EAAgBD,EAAcC,EAAe3C,GAC7C8E,EAAYP,GAAKvE,CACnB,CAEA6D,EAAQjE,GAAK,CAAE0E,WAAUQ,cAAahB,QACxC,CAEA,MAAO,CACLiB,KAAK,EACLpC,gBACAqC,aAAc,MACdnB,UACAK,WAEJ,CAEA,cAAMe,CACJC,EACAC,EACAC,EACAzB,EAAiB,CAAC,GAElB,MAAMiB,EAAI,MACJS,OAAkB5E,IAAV0E,EAERG,SADkB5J,KAAKkI,MAAMD,IACVE,QAAQqB,GACjC,IAAKI,EACH,MAAO,GAET,MAAM,YAAER,EAAc,GAAE,MAAEhB,GAAUwB,EACpC,GAA2B,IAAvBR,EAAYjF,OACd,MAAO,GAET,MAAM0F,OAAY9E,IAAR2E,GAAqBN,EAAYjF,OAAS,GAAK+E,GA3H5CpB,EA2HwD4B,GA1H3D5B,EA0HgEoB,QA3H9E,IAAiBpB,EA4Hb,MAAMR,OAAcvC,IAAV0E,EAAsB,EA/HpC,SAAmB3B,GACjB,OAAOA,EAAKA,EA8H2CoB,KA7HzD,CA6HwCY,CAAUL,GACxCM,EACF,IAAInD,MADO+C,GACAE,EAAIvC,GAAK4B,EACVE,EAAYjF,OAAS,GAC7B6F,EAAYZ,EAAYA,EAAYjF,OAAS,GAAGR,cACtD,GAAIkG,GAAKT,EAAYjF,OAAS,GAAK+E,EACjC,MAAM,IAAIhJ,MAAM,0CAElB,IAAI+J,EAAab,EAAY9B,EAAI4B,GAAGvF,cACpC,IAAK,IAAIO,EAAIoD,EAAI4B,EAAGL,EAAI,EAAG3E,EAAI2F,EAAIX,EAAGhF,IAAK2E,IACzCkB,EAAOlB,GAAK,CACVqB,MAAOd,EAAYlF,EAAI,GAAGP,cAAgBsG,EAC1CR,MAAOvF,EAAIgF,EACXQ,IAAKxF,EAAIgF,EAAIA,GAEfe,EAAab,EAAYlF,EAAI,GAAGP,cAElC,OAAOoG,EAAOI,KAAIC,IAAK,IAClBA,EACHF,MAAQE,EAAEF,QAAS9B,aAAK,EAALA,EAAOjC,YAAa,GAAM6D,KAEjD,CAEA,oBAAMK,CACJrC,EACAhE,EACAsG,EACArC,EAAiB,CAAC,GAEdjE,EAAM,IACRA,EAAM,GAGR,MAAMuG,QAAkBvK,KAAKkI,MAAMD,GACnC,IAAKsC,EACH,MAAO,GAET,MAAMC,EAAKD,EAAUpC,QAAQH,GAC7B,IAAKwC,EACH,MAAO,GAIT,MAAMC,GAnKqBf,EAmKWY,EAjKjC,CACL,CAAC,EAAG,GACJ,CAAC,IAJaI,EAmKmB1G,IA/JpB,IAAK,IAHpB0F,GAAO,IAGyB,KAC9B,CAAC,GAAKgB,GAAO,IAAK,GAAKhB,GAAO,KAC9B,CAAC,IAAMgB,GAAO,IAAK,IAAMhB,GAAO,KAChC,CAAC,KAAOgB,GAAO,IAAK,KAAOhB,GAAO,KAClC,CAAC,MAAQgB,GAAO,IAAK,MAAQhB,GAAO,OARxC,IAAkBgB,EAAahB,EAoK3B,MAAMpE,EAAkB,GAGxB,IAAK,MAAOmE,EAAOC,KAAQe,EACzB,IAAK,IAAI9F,EAAM8E,EAAO9E,GAAO+E,EAAK/E,IAChC,GAAI6F,EAAG5B,SAASjE,GAAM,CACpB,MAAMgG,EAAYH,EAAG5B,SAASjE,GAC9B,IAAK,MAAMiG,KAAYD,EACrBrF,EAAOS,KAAK6E,EAEhB,CAMJ,MAAMC,EAAQL,EAAGpB,YAAYjF,OAC7B,IAAIoB,EACJ,MAAMuF,EAASC,KAAK/G,IAAIA,GAAO,GAAI6G,EAAQ,GACrCG,EAASD,KAAK/G,IAAIsG,GAAO,GAAIO,EAAQ,GAC3C,IAAK,IAAI3G,EAAI4G,EAAQ5G,GAAK8G,IAAU9G,EAAG,CACrC,MAAM+G,EAAKT,EAAGpB,YAAYlF,GACtB+G,KAAQ1F,GAAU0F,EAAGnH,UAAUyB,GAAU,KAC3CA,EAAS0F,EAEb,CAEA,OAAO5F,EAAeC,EAAQC,EAChC,CAEA,WAAM2C,CAAMD,EAAiB,CAAC,GAO5B,OANKjI,KAAKkL,SACRlL,KAAKkL,OAASlL,KAAKqI,OAAOJ,GAAM1F,OAAMsH,IAEpC,MADA7J,KAAKkL,YAASnG,EACR8E,CAAC,KAGJ7J,KAAKkL,MACd,CAEA,eAAMC,CAAU3B,EAAevB,EAAiB,CAAC,G,MAE/C,SAA8B,QAArB,SADYjI,KAAKkI,MAAMD,IAChBE,QAAQqB,UAAM,eAAEZ,SAClC,E,8EC3MF,SAASwC,EAAOC,EAAaC,GAC3B,OAAOP,KAAKQ,MAAMF,EAAM,GAAKC,EAC/B,CAEe,MAAME,UAAY5D,EAAjC,c,oBACU,KAAA6D,aAAe,EACf,KAAAC,MAAQ,EACR,KAAAC,SAAW,CA6MrB,CAzME,eAAMxF,CAAU6B,EAAeC,G,QAE7B,OAAsC,QAA/B,EAAwB,QAAxB,SADiBjI,KAAKkI,MAAMD,IAClBE,QAAQH,UAAM,eAAEI,aAAK,eAAEjC,YAAa,CACvD,CAEA,cAAMoD,GACJ,MAAO,EACT,CAEA,YAAAqC,CAAavH,EAAeC,GAC1B,MAAMuH,EAAcxH,EAAMoE,YAAYnE,GAChCwH,EACU,MAAdD,EAAwB,uBAAyB,iBAC7CE,EACJ,CAAE,EAAG,UAAW,EAAG,MAAO,EAAG,OACf,GAAdF,GACF,IAAKE,EACH,MAAM,IAAI7L,MAAM,qCAAqC2L,KAEvD,MAAMG,EAAgB,CACpBC,IAAK5H,EAAMoE,YAAYnE,EAAS,GAChCmF,MAAOpF,EAAMoE,YAAYnE,EAAS,GAClCoF,IAAKrF,EAAMoE,YAAYnE,EAAS,KAE5B4H,EAAY7H,EAAMoE,YAAYnE,EAAS,IACvC6H,EAAWD,EAAYE,OAAOC,aAAaH,GAAa,GACxDI,EAAYjI,EAAMoE,YAAYnE,EAAS,IACvCiI,EAAoBlI,EAAMoE,YAAYnE,EAAS,IAErD,MAAO,CACL0H,gBACAF,iBACAI,YACAC,WACAG,YACAP,SACAF,iBACG1E,EACD9C,EAAMmI,SAASlI,EAAS,GAAIA,EAAS,GAAKiI,GAC1CvM,KAAKqH,cAGX,CAGA,YAAMgB,CAAOJ,GACX,MAAMwE,QAAezM,KAAK6H,WAAWS,SAASL,GACxC5D,QAAc,IAAAqI,OAAMD,GAE1B,IAAIE,EAEJ,GApEe,WAoEXtI,EAAMkE,aAAa,GACrBoE,EAAa,MACR,IArEQ,WAqEJtI,EAAMkE,aAAa,GAG5B,MAAM,IAAIrI,MAAM,kBAFhByM,EAAa,CAIf,CAEA3M,KAAK2L,SAAWtH,EAAMoE,YAAY,GAClCzI,KAAK0L,MAAQrH,EAAMoE,YAAY,GAC/BzI,KAAKyL,eAAiB,GAAyB,GAAlBzL,KAAK0L,MAAQ,IAAW,GAAK,EAC1D,MAAMkB,EAAYvI,EAAMoE,YAAY,IAC9BoE,EAAMD,GAAa,GAAK5M,KAAK4L,aAAavH,EAAO,SAAMU,EACvDyD,EAAWnE,EAAMoE,YAAY,GAAKmE,GAKxC,IACI3F,EADAyB,EAAO,GAAKkE,EAAY,EAE5B,MAAMzE,EAAU,IAAIvB,MAGjB4B,GACH,IAAK,IAAItE,EAAI,EAAGA,EAAIsE,EAAUtE,IAAK,CAEjC,MAAMyE,EAAWtE,EAAMoE,YAAYC,GACnCA,GAAQ,EACR,MAAME,EAAoC,CAAC,EAC3C,IAAIR,EACJ,IAAK,IAAIS,EAAI,EAAGA,EAAIF,EAAUE,IAAK,CACjC,MAAMlE,EAAMN,EAAMkE,aAAaG,GAE/B,GADAA,GAAQ,EACJ/D,EAAM3E,KAAKyL,aACbrD,EAAQlC,EAAe7B,EAAOqE,EAAO,IACrCA,GAAQ,OACH,CACLzB,EAAgBD,EAAcC,EAAe7C,EAAUC,EAAOqE,IAC9DA,GAAQ,EACR,MAAMK,EAAa1E,EAAMoE,YAAYC,GACrCA,GAAQ,EACR,MAAMpD,EAAS,IAAIsB,MAAamC,GAChC,IAAK,IAAIC,EAAI,EAAGA,EAAID,EAAYC,GAAK,EAAG,CACtC,MAAMC,EAAI7E,EAAUC,EAAOqE,GAC3BA,GAAQ,EACR,MAAMQ,EAAI9E,EAAUC,EAAOqE,GAC3BA,GAAQ,EACRzB,EAAgBD,EAAcC,EAAegC,GAC7C3D,EAAO0D,GAAK,IAAIxE,EAAMyE,EAAGC,EAAGvE,EAC9B,CACAiE,EAASjE,GAAOW,CAClB,CACF,CAEA6C,EAAQjE,GAAK,CAAE0E,WAAUR,QAC3B,CAEA,MAAO,CACLuE,aACA1F,gBACAkB,UACAK,WACAsE,KAAK,EACLxD,aAAc,SACXuD,EAEP,CAEA,oBAAMxC,CACJrC,EACAhE,EACAsG,EACArC,EAAiB,CAAC,GAEdjE,EAAM,IACRA,EAAM,GAGR,MAAMuG,QAAkBvK,KAAKkI,MAAMD,GAC7BuC,EAAKD,aAAS,EAATA,EAAWpC,QAAQH,GAC9B,IAAKwC,EACH,MAAO,GAET,MAAMC,EAAkBzK,KAAK+M,SAAS/I,EAAKsG,GAE3C,GAA+B,IAA3BG,EAAgBtG,OAClB,MAAO,GAGT,MAAMmB,EAAS,GAEf,IAAK,MAAOmE,EAAOC,KAAQe,EACzB,IAAK,IAAI9F,EAAM8E,EAAO9E,GAAO+E,EAAK/E,IAChC,GAAI6F,EAAG5B,SAASjE,GAAM,CACpB,MAAMgG,EAAYH,EAAG5B,SAASjE,GAC9B,IAAK,MAAMqI,KAAKrC,EACdrF,EAAOS,KAAKiH,EAEhB,CAIJ,OAAO3H,EAAeC,EAAQ,IAAI5B,EAAc,EAAG,GACrD,CAMA,QAAAqJ,CAASrC,EAAahB,IACpBgB,GAAO,GACG,IACRA,EAAM,GAEJhB,EAAM,GAAK,KACbA,EAAM,GAAK,IAEbA,GAAO,EACP,IAAIuD,EAAI,EACJC,EAAI,EACJ5F,EAAItH,KAAK2L,SAAwB,EAAb3L,KAAK0L,MAC7B,MAAMyB,EAAO,GACb,KAAOF,GAAKjN,KAAK0L,MAAOpE,GAAK,EAAG4F,GAAY,EA3LjC,IA2LwC,EAAJD,GAAQA,GAAK,EAAG,CAC7D,MAAMlJ,EAAImJ,EAAI9B,EAAOV,EAAKpD,GACpBuC,EAAIqD,EAAI9B,EAAO1B,EAAKpC,GAC1B,GAAIuC,EAAI9F,EAAIoJ,EAAKhJ,OAASnE,KAAKyL,aAC7B,MAAM,IAAIvL,MACR,SAASwK,KAAOhB,oDAAsD1J,KAAK2L,mBAAmB3L,KAAK0L,iEAGvGyB,EAAKpH,KAAK,CAAChC,EAAG8F,GAChB,CACA,OAAOsD,CACT,CAEA,WAAMjF,CAAMD,EAAiB,CAAC,GAO5B,OANKjI,KAAKkL,SACRlL,KAAKkL,OAASlL,KAAKqI,OAAOJ,GAAM1F,OAAMsH,IAEpC,MADA7J,KAAKkL,YAASnG,EACR8E,CAAC,KAGJ7J,KAAKkL,MACd,CAEA,eAAMC,CAAU3B,EAAevB,EAAiB,CAAC,G,MAE/C,SAA8B,QAArB,SADYjI,KAAKkI,MAAMD,IAChBE,QAAQqB,UAAM,eAAEZ,SAClC,ECtOF,MCEMwE,EAAiB,mBAAmBC,MAAM,IAC1CC,EAAgB,mBAAmBD,MAAM,IAKhC,MAAME,EAUnB,WAAAtM,CAAYgD,GATJ,KAAAlC,KAAO,CAAC,EAIR,KAAAyL,SAAqB,GACrB,KAAAC,gBAAiB,EAKvB,MAAM,MAAEpJ,EAAK,WAAEqJ,GAAezJ,GACxB,UAAE0J,EAAS,MAAElE,GAAUpF,EAC7BrE,KAAK+B,KAAO,CAAE0H,MAAOkE,EAAUlF,YAAYgB,EAAQ,IACnDzJ,KAAKqE,MAAQA,EACbrE,KAAK4N,IAAMF,EACX1N,KAAK6N,OAASF,EAAUlF,YAAYgB,EAAQ,GAC5CzJ,KAAK8N,OAA6C,WAApCH,EAAUlF,YAAYgB,EAAQ,MAAqB,EACnE,CAEA,GAAApI,CAAI0M,GAEF,OAAI/N,KAAK+N,IAEH/N,KAAK+B,KAAKgM,KAId/N,KAAK+B,KAAKgM,GAAS/N,KAAK+N,MAHf/N,KAAK+B,KAAKgM,IAMd/N,KAAKgO,KAAKD,EAAME,cACzB,CAEA,GAAAvE,GACE,OAAO1J,KAAKqB,IAAI,SAAWrB,KAAKqB,IAAI,gBACtC,CAEA,MAAA6M,GACE,OAAOlO,KAAK6N,MACd,CAIA,IAAAG,CAAKD,GACH,OAAIA,KAAS/N,KAAK+B,OAGlB/B,KAAK+B,KAAKgM,GAAS/N,KAAKmO,UAAUJ,IAFzB/N,KAAK+B,KAAKgM,EAIrB,CAEA,KAAAK,GACEpO,KAAKqO,gBAEL,IAAIC,EAAO,CAAC,OAEPtO,KAAKuO,qBACRD,EAAKvI,KACH,QACA,MACA,SACA,QACA,OACA,KACA,QACA,gBACA,mBAGA/F,KAAKwO,YACPF,EAAKvI,KAAK,wBAAyB,oBAErCuI,EAAOA,EAAKG,OAAOzO,KAAKwN,UAAY,IAEpC,IAAK,MAAMxE,KAAKpG,OAAOS,KAAKrD,KAAK+B,MAC1BiH,EAAE0F,WAAW,MAAc,gBAAN1F,GACxBsF,EAAKvI,KAAKiD,GAId,MAAM2F,EAAgC,CAAC,EACvC,OAAOL,EAAKM,QAAO1B,IACjB,GACGA,KAAKlN,KAAK+B,WAAyBgD,IAAjB/E,KAAK+B,KAAKmL,IACvB,OAANA,GACM,OAANA,EAEA,OAAO,EAGT,MAAM2B,EAAK3B,EAAEe,cACP3G,EAAIqH,EAAKE,GAEf,OADAF,EAAKE,IAAM,GACHvH,CAAC,GAEb,CAEA,MAAAwH,GAEA,CAEA,QAAAC,GACE,OAAO/O,KAAKqB,IAAI,cAClB,CAEA,EAAA2N,GACE,OAAOhP,KAAK4N,GACd,CAMA,EAAAqB,GACE,MAAMA,GAA+B,MAAzBjP,KAAKqB,IAAI,gBAA2B,EAChD,OAAc,MAAP4N,OAAalK,EAAYkK,CAClC,CAEA,KAAA/E,GACE,OAAOlK,KAAKqB,IAAI,KAClB,CAEA,IAAA6N,G,MACE,OAAqB,QAAd,EAAAlP,KAAKmP,iBAAS,eAAEC,KAAK,IAC9B,CAEA,OAAAD,GACE,GAAInP,KAAKuO,oBACP,OAGF,MAAM,MAAE9E,EAAK,UAAEkE,GAAc3N,KAAKqE,MAC5BgL,EACJ5F,EACA,GACAzJ,KAAKqB,IAAI,gBACiB,EAA1BrB,KAAKqB,IAAI,eACTrB,KAAKqB,IAAI,cACLiO,EAAOtP,KAAKqB,IAAI,cACtB,OAAOsM,EAAUnB,SAAS6C,EAAGA,EAAIC,EACnC,CAEA,MAAAC,GACE,OAAOvP,KAAKwP,yBAA2B,EAAI,CAC7C,CAEA,iCAAAC,GACE,IAAIzP,KAAK0P,iBAGT,OAAO1P,KAAK2P,6BAA+B,EAAI,CACjD,CAEA,IAAAjO,GACE,OAAO1B,KAAKqB,IAAI,aAClB,CAEA,UAAAuO,GACE,MAAMC,EAAK7P,KAAKqB,IAAI,iBACd,UAAEsM,EAAS,MAAElE,GAAUzJ,KAAKqE,MAClC,OAAOsJ,EAAU9J,SAAS,QAAS4F,EAAQ,GAAIA,EAAQ,GAAKoG,EAAK,EACnE,CAMA,SAAA1B,CAAU2B,GAIR,GAAI9P,KAAKyN,eACP,OAGF,MAAM,UAAEE,EAAS,MAAElE,GAAUzJ,KAAKqE,MAClC,IAAIgL,EACFrP,KAAK+P,YACLtG,EACE,GACAzJ,KAAKqB,IAAI,gBACiB,EAA1BrB,KAAKqB,IAAI,eACTrB,KAAKqB,IAAI,cACTrB,KAAKqB,IAAI,cAEb,MAAM2O,EAAWhQ,KAAKqE,MAAMqF,IAC5B,IAAIuG,EACJ,KAAOZ,EAAIW,GAAYC,IAAUH,GAAS,CACxC,MAAMI,EAAM9D,OAAOC,aAAasB,EAAU0B,GAAI1B,EAAU0B,EAAI,IAC5DY,EAAQC,EAAIjC,cACZ,MAAMkC,EAAO/D,OAAOC,aAAasB,EAAU0B,EAAI,IAG/C,IAAI5L,EACJ,OAHA4L,GAAK,EAGGc,GACN,IAAK,IACH1M,EAAQ2I,OAAOC,aAAasB,EAAU0B,IACtCA,GAAK,EACL,MAEF,IAAK,IACH5L,EAAQkK,EAAUlF,YAAY4G,GAC9BA,GAAK,EACL,MAEF,IAAK,IACH5L,EAAQkK,EAAUpF,aAAa8G,GAC/BA,GAAK,EACL,MAEF,IAAK,IACH5L,EAAQkK,EAAUyC,SAASf,GAC3BA,GAAK,EACL,MAEF,IAAK,IACH5L,EAAQkK,EAAU0C,UAAUhB,GAC5BA,GAAK,EACL,MAEF,IAAK,IACH5L,EAAQkK,EAAU2C,YAAYjB,GAC9BA,GAAK,EACL,MAEF,IAAK,IACH5L,EAAQkK,EAAU4C,aAAalB,GAC/BA,GAAK,EACL,MAEF,IAAK,IACH5L,EAAQkK,EAAU6C,YAAYnB,GAC9BA,GAAK,EACL,MAEF,IAAK,IACL,IAAK,IAEH,IADA5L,EAAQ,GACD4L,GAAKW,GAAU,CACpB,MAAMS,EAAK9C,EAAU0B,KACrB,GAAW,IAAPoB,EACF,MAEAhN,GAAS2I,OAAOC,aAAaoE,EAEjC,CACA,MAEF,IAAK,IAAK,CACRhN,EAAQ,GACR,MAAMgN,EAAK9C,EAAU0B,KACfqB,EAAQtE,OAAOC,aAAaoE,GAC5BE,EAAQhD,EAAUlF,YAAY4G,GAEpC,GADAA,GAAK,EACS,MAAVqB,EACF,GAAY,OAARR,EACF,IAAK,IAAIlH,EAAI,EAAGA,EAAI2H,EAAO3H,IAAK,CAC9B,MAAM4H,EAAQjD,EAAUlF,YAAY4G,GAGpC5L,IAFYmN,GAAS,GACVtD,EAAsB,GAARsD,GAEzBvB,GAAK,CACP,MAEA,IAAK,IAAIrG,EAAI,EAAGA,EAAI2H,EAAO3H,IACzBvF,GAASkK,EAAUlF,YAAY4G,GAC3BrG,EAAI,EAAI2H,IACVlN,GAAS,KAEX4L,GAAK,EAIX,GAAc,MAAVqB,EACF,GAAY,OAARR,EACF,IAAK,IAAIlH,EAAI,EAAGA,EAAI2H,EAAO3H,IAAK,CAC9B,MAAM4H,EAAQjD,EAAUpF,aAAa8G,GAGrC5L,IAFYmN,GAAS,GACVtD,EAAsB,GAARsD,GAEzBvB,GAAK,CACP,MAEA,IAAK,IAAIrG,EAAI,EAAGA,EAAI2H,EAAO3H,IACzBvF,GAASkK,EAAUpF,aAAa8G,GAC5BrG,EAAI,EAAI2H,IACVlN,GAAS,KAEX4L,GAAK,EAIX,GAAc,MAAVqB,EACF,IAAK,IAAI1H,EAAI,EAAGA,EAAI2H,EAAO3H,IACzBvF,GAASkK,EAAU2C,YAAYjB,GAC3BrG,EAAI,EAAI2H,IACVlN,GAAS,KAEX4L,GAAK,EAGT,GAAc,MAAVqB,EACF,IAAK,IAAI1H,EAAI,EAAGA,EAAI2H,EAAO3H,IACzBvF,GAASkK,EAAU4C,aAAalB,GAC5BrG,EAAI,EAAI2H,IACVlN,GAAS,KAEX4L,GAAK,EAGT,GAAc,MAAVqB,EACF,IAAK,IAAI1H,EAAI,EAAGA,EAAI2H,EAAO3H,IACzBvF,GAASkK,EAAUyC,SAASf,GACxBrG,EAAI,EAAI2H,IACVlN,GAAS,KAEX4L,GAAK,EAGT,GAAc,MAAVqB,EACF,IAAK,IAAI1H,EAAI,EAAGA,EAAI2H,EAAO3H,IACzBvF,GAASkK,EAAU0C,UAAUhB,GACzBrG,EAAI,EAAI2H,IACVlN,GAAS,KAEX4L,GAAK,EAGT,GAAc,MAAVqB,EACF,IAAK,IAAI1H,EAAI,EAAGA,EAAI2H,EAAO3H,IACzBvF,GAASkK,EAAU6C,YAAYnB,GAC3BrG,EAAI,EAAI2H,IACVlN,GAAS,KAEX4L,GAAK,EAGT,KACF,CACA,QACE5M,QAAQoO,KAAK,yBAAyBV,8BACtC1M,OAAQsB,EACRsK,EAAIW,EAOR,GAHAhQ,KAAK+P,WAAaV,EAElBrP,KAAKwN,SAASzH,KAAKmK,GACfD,IAAUH,EACZ,OAAOrM,EAGTzD,KAAK+B,KAAKkO,GAASxM,CACrB,CACAzD,KAAKyN,gBAAiB,CAExB,CAEA,aAAAY,GACErO,KAAKmO,UAAU,GACjB,CAEA,WAAA2C,CAAYC,GACV,OAEEA,EACGC,MAAM,UAEN7G,KAAI8G,GAAM,CAAC,KAAKC,KAAKD,GAAI,GAAGE,cAAe7K,OAAO8K,SAASH,EAAI,MAEtE,CAMA,QAAAzC,GACE,SDjYW,ECiYDxO,KAAK8N,MACjB,CAGA,gBAAAuD,GACE,SDpYgB,ECoYNrR,KAAK8N,MACjB,CAGA,iBAAAS,GACE,SDvYU,ECuYAvO,KAAK8N,MACjB,CAGA,cAAA4B,GACE,SD1YW,EC0YD1P,KAAK8N,MACjB,CAGA,qBAAA0B,GACE,SD7YY,GC6YFxP,KAAK8N,MACjB,CAGA,yBAAA6B,GACE,SDhZa,GCgZH3P,KAAK8N,MACjB,CAGA,OAAAwD,GACE,SDnZU,GCmZAtR,KAAK8N,MACjB,CAGA,OAAAyD,GACE,SDtZU,ICsZAvR,KAAK8N,MACjB,CAGA,WAAA0D,GACE,SDzZc,ICyZJxR,KAAK8N,MACjB,CAGA,UAAA2D,GACE,SD5ZW,IC4ZDzR,KAAK8N,MACjB,CAGA,WAAA4D,GACE,SD/ZQ,KC+ZE1R,KAAK8N,MACjB,CAGA,eAAA6D,GACE,SDlakB,KCkaR3R,KAAK8N,MACjB,CAEA,KAAAiD,GACE,GAAI/Q,KAAKuO,oBACP,OAGF,MAAM,UAAEZ,EAAS,MAAElE,GAAUzJ,KAAKqE,MAC5BuN,EAAc5R,KAAKqB,IAAI,eAC7B,IAAIgO,EAAI5F,EAAQ,GAAKzJ,KAAKqB,IAAI,gBAC9B,MAAMwQ,EAAS7R,KAAKqB,IAAI,cACxB,IAAI0P,EAAQ,GACRe,EAAO,EAIPlB,EAAQjD,EAAUlF,YAAY4G,GAC9B0C,EAAMnB,GAAS,EACfK,EAAK3D,EAAsB,GAARsD,GACvB,GAAW,MAAPK,GAAcc,IAAQF,EAWxB,OARAxC,GAAK,EACLuB,EAAQjD,EAAUlF,YAAY4G,GAC9B0C,EAAMnB,GAAS,EACfK,EAAK3D,EAAsB,GAARsD,GACR,MAAPK,GACFxO,QAAQoO,KAAK,wBAEf7Q,KAAK+B,KAAKiQ,cAAgBD,EACnB/R,KAAKqB,IAAI,MAEhB,IAAK,IAAI2L,EAAI,EAAGA,EAAI4E,IAAe5E,EACjC4D,EAAQjD,EAAUlF,YAAY4G,GAC9B0C,EAAMnB,GAAS,EACfK,EAAK3D,EAAsB,GAARsD,GACnBG,GAASgB,EAAMd,EAIJ,MAAPA,GAAqB,MAAPA,GAAqB,MAAPA,IAC9Ba,GAAQC,GAGV1C,GAAK,EAIP,OADArP,KAAK+B,KAAKiQ,cAAgBF,EACnBf,CAEX,CAEA,aAAAiB,GACE,OAAIhS,KAAK+B,KAAKiQ,eAGZhS,KAAKqB,IAAI,SAFFrB,KAAK+B,KAAKiQ,aAKrB,CAEA,WAAAC,GACE,OAA8B,MAAvBjS,KAAKqB,IAAI,WAClB,CAEA,YAAA6Q,GACE,OAAgC,IAAzBlS,KAAKqB,IAAI,aAClB,CAKA,UAAA8Q,GACE,OAAQnS,KAAKqB,IAAI,cAAgB,GAAM,CACzC,CAEA,YAAA+Q,GACE,OAAOpS,KAAKqS,KACd,CAEA,GAAAA,GACE,MAAM,UAAE1E,EAAS,MAAElE,GAAUzJ,KAAKqE,MAC5BgL,EACJ5F,EAAQ,GAAKzJ,KAAKqB,IAAI,gBAA4C,EAA1BrB,KAAKqB,IAAI,eAC7CiR,EAAWtS,KAAKqB,IAAI,cACpBkR,EAAMvS,KAAKqB,IAAI,cACrB,IAAImR,EAAM,GACNtO,EAAI,EACR,IAAK,IAAI2E,EAAI,EAAGA,EAAIyJ,IAAYzJ,EAAG,CACjC,MAAM4J,EAAK9E,EAAU0B,EAAIxG,GACzB2J,GAAOpF,GAAqB,IAALqF,IAAc,GACrCvO,IACIA,EAAIqO,IACNC,GAAOpF,EAAoB,GAALqF,GACtBvO,IAEJ,CACA,OAAOsO,CACT,CAGA,kBAAAE,GACE,IACG1S,KAAKuO,sBACLvO,KAAK0P,kBACN1P,KAAK6N,SAAW7N,KAAK2S,cACrB,CACA,MAAMC,EAAK5S,KAAKwP,wBAA0B,IAAM,IAC1CqD,EAAK7S,KAAK2P,4BAA8B,IAAM,IACpD,IAAImD,EAAK,IACLC,EAAK,IACL/S,KAAKsR,WACPwB,EAAK,IACLC,EAAK,KACI/S,KAAKuR,YACduB,EAAK,IACLC,EAAK,KAGP,MAAMC,EAAM,GAaZ,OAZchT,KAAKiT,kBACP,GACVD,EAAI,GAAKJ,EACTI,EAAI,GAAKF,EACTE,EAAI,GAAKH,EACTG,EAAI,GAAKD,IAETC,EAAI,GAAKJ,EACTI,EAAI,GAAKF,EACTE,EAAI,GAAKH,EACTG,EAAI,GAAKD,GAEJC,EAAI5D,KAAK,GAClB,CACA,MAAO,EACT,CAEA,UAAA8D,GACE,OAAOlT,KAAKqE,MAAMsJ,UAAUlF,YAAYzI,KAAKqE,MAAMoF,MAAQ,GAC7D,CAEA,QAAA0J,GACE,OAAOnT,KAAKqE,MAAMsJ,UAAUlF,YAAYzI,KAAKqE,MAAMoF,MAAQ,GAC7D,CAEA,UAAA2J,GACE,OAAOpT,KAAKqE,MAAMsJ,UAAUlF,YAAYzI,KAAKqE,MAAMoF,MAAQ,GAC7D,CAEA,WAAAkJ,GACE,OAAO3S,KAAKqE,MAAMsJ,UAAUlF,YAAYzI,KAAKqE,MAAMoF,MAAQ,GAC7D,CAEA,SAAA4J,GACE,OAAOrT,KAAKqE,MAAMsJ,UAAUlF,YAAYzI,KAAKqE,MAAMoF,MAAQ,GAC7D,CAEA,eAAAwJ,GACE,OAAOjT,KAAKqE,MAAMsJ,UAAUlF,YAAYzI,KAAKqE,MAAMoF,MAAQ,GAC7D,CAEA,MAAA6J,GACE,MAAMvR,EAA4B,CAAC,EACnC,IAAK,MAAMiH,KAAKpG,OAAOS,KAAKrD,MACtBgJ,EAAE0F,WAAW,MAAc,UAAN1F,IAIzBjH,EAAKiH,GAAKhJ,KAAKgJ,IAGjB,OAAOjH,CACT,ECvmBK,SAASwR,EAAgBC,GAC9B,MAAMC,EAAQD,EAAKnG,MAAM,SACnBtL,EAAkE,GACxE,IAAK,MAAM2R,KAAQD,EAAO,CACxB,MAAOvD,KAAQyD,GAAUD,EAAKrG,MAAM,MAChC6C,GACFnO,EAAKgE,KAAK,CACRmK,IAAKA,EAAIpJ,MAAM,GACf/E,KAAM4R,EAAOxJ,KAAIyJ,IACf,MAAMC,EAAID,EAAEE,QAAQ,KAGpB,MAAO,CAAE5D,IAFQ0D,EAAE9M,MAAM,EAAG+M,GAEJpQ,MADVmQ,EAAE9M,MAAM+M,EAAI,GACK,KAIvC,CACA,OAAO9R,CACT,CCHO,MAAMgS,EAAY,SAiBzB,MAAMC,EACG,IAAAC,GACL,MAAM,IAAI/T,MAAM,eAClB,CACO,IAAAgU,GACL,MAAM,IAAIhU,MAAM,eAClB,CAEO,QAAAoI,GACL,MAAM,IAAIpI,MAAM,eAClB,CAEO,KAAAiU,GACL,MAAM,IAAIjU,MAAM,eAClB,EAEa,MAAMkU,EAyBnB,WAAAnT,EAAY,cACVoT,EAAa,QACbC,EAAO,OACPC,EAAM,QACNC,EAAO,cACPC,EAAa,OACbC,EAAM,QACNC,EAAO,cACPC,EAAa,OACbC,EAAM,OACNC,EAAM,gBACNC,EAAkB,IAAG,cACrBC,EAAgBlN,GAAKA,IAiBrB,GA9CK,KAAAgN,QAAS,EAGR,KAAAG,aAAe,IAAIjU,EAAAkU,EAA0C,CACnE/T,MAAO,IAAI,IAAJ,CAAa,CAClBgU,QAAS,KAEXjU,KAAMkU,MAAOnR,EAAYlE,KACvB,MAAM,MAAE+F,EAAK,KAAEmC,GAAShE,GAClB,KAAElC,EAAI,WAAEsT,EAAU,WAAEC,SAAqBtV,KAAKuV,WAAW,CAC7DzP,QACAmC,KAAM,IAAKA,EAAMlI,YAEnB,OAAOC,KAAKwV,gBAAgBzT,EAAMsT,EAAYC,EAAYxP,EAAM,IA+BlE9F,KAAKqH,aAAe2N,EAEhBX,EACFrU,KAAKyV,IAAMpB,OACN,GAAIC,EACTtU,KAAKyV,IAAM,IAAI,KAAUnB,QACpB,GAAIC,EACTvU,KAAKyV,IAAM,IAAI,KAAWlB,OACrB,KAAIO,EAIT,MAAM,IAAI5U,MAAM,4BAHhBF,KAAK8U,QAAS,EACd9U,KAAKyV,IAAM,IAAIzB,CAGjB,CACA,GAAIY,EACF5U,KAAK0V,MAAQ,IAAIlK,EAAI,CAAE3D,WAAY+M,SAC9B,GAAID,EACT3U,KAAK0V,MAAQ,IAAIlK,EAAI,CAAE3D,WAAY,IAAI,KAAU8M,UAC5C,GAAIE,EACT7U,KAAK0V,MAAQ,IAAIlK,EAAI,CAAE3D,WAAY,IAAI,KAAWgN,UAC7C,GAAIJ,EACTzU,KAAK0V,MAAQ,IAAI3N,EAAI,CAAEF,WAAY4M,SAC9B,GAAID,EACTxU,KAAK0V,MAAQ,IAAI3N,EAAI,CAAEF,WAAY,IAAI,KAAU2M,UAC5C,GAAIE,EACT1U,KAAK0V,MAAQ,IAAI3N,EAAI,CAAEF,WAAY,IAAI,KAAW6M,UAC7C,GAAIJ,EACTtU,KAAK0V,MAAQ,IAAI3N,EAAI,CAAEF,WAAY,IAAI,KAAU,GAAGyM,gBAC/C,GAAIC,EACTvU,KAAK0V,MAAQ,IAAI3N,EAAI,CAAEF,WAAY,IAAI,KAAW,GAAG0M,eAChD,KAAIO,EAGT,MAAM,IAAI5U,MAAM,gCAFhBF,KAAK8U,QAAS,CAGhB,CACA9U,KAAK+U,gBAAkBA,CACzB,CAEA,kBAAMY,CAAaC,GACjB,MAAM3N,EP/DH,SAAkB4N,EAA8B,CAAC,GACtD,MAAO,YAAaA,EAAO,CAAE9V,OAAQ8V,GAAqBA,CAC5D,CO6DiBC,CAASF,GACtB,IAAK5V,KAAK0V,MACR,OAEF,MAAMnL,QAAkBvK,KAAK0V,MAAMxN,MAAMD,GACnC8N,EAAMxL,EAAUtD,cAClBsD,EAAUtD,cAActD,cAAgB,WACxCoB,EACJ,IAAI0H,EACJ,GAAIsJ,EAAK,CACP,MAAMzO,EAAIyO,EApIC,MAqILC,QAAYhW,KAAKyV,IAAIxB,KAAK,KAAOgC,MAAM3O,GAAI,EAAGA,EAAG,EAAGW,GAC1D,IAAK+N,EAAIE,UACP,MAAM,IAAIhW,MAAM,wBAElBuM,EAASuJ,EAAIvJ,OAAOD,SAAS,EAAGzB,KAAK/G,IAAIgS,EAAIE,UAAWH,GAC1D,MACEtJ,QAAezM,KAAKyV,IAAInN,SAASL,GAGnC,MAAMkO,QAAc,IAAAzJ,OAAMD,GAE1B,GAAI0J,EAAM1N,YAAY,KAAOsL,EAC3B,MAAM,IAAI7T,MAAM,kBAElB,MAAMkW,EAAUD,EAAM1N,YAAY,GAElCzI,KAAKqW,OAASF,EAAMtS,SAAS,OAAQ,EAAG,EAAIuS,GAC5C,MAAM,WAAEE,EAAU,WAAEC,SAAqBvW,KAAKwW,aAC5CJ,EAAU,EACV,MACAnO,GAKF,OAHAjI,KAAKsW,WAAaA,EAClBtW,KAAKuW,WAAaA,EAEXhD,EAAgBvT,KAAKqW,OAC9B,CAEA,SAAAI,CAAUxO,GAOR,OANKjI,KAAK0W,UACR1W,KAAK0W,QAAU1W,KAAK2V,aAAa1N,GAAM1F,OAAMsH,IAE3C,MADA7J,KAAK0W,aAAU3R,EACT8E,CAAC,KAGJ7J,KAAK0W,OACd,CAEA,mBAAMC,CAAc1O,EAAiB,CAAC,GAEpC,aADMjI,KAAKyW,UAAUxO,GACdjI,KAAKqW,MACd,CAIA,kBAAMG,CACJ/M,EACAmN,EACA3O,GAKA,GAAIwB,EAAQmN,EACV,OAAO5W,KAAKwW,aAAa/M,EAAqB,EAAdmN,EAAiB3O,GAEnD,MAAM1H,EAAOqW,EA7LA,OA8LP,UAAEV,EAAS,OAAEzJ,SAAiBzM,KAAKyV,IAAIxB,KAC3C,KAAOgC,MAAM1V,GACb,EACAqW,EACA,EACA3O,GAEF,IAAKiO,EACH,MAAM,IAAIhW,MAAM,qCAElB,MAAMiW,QAAc,IAAAzJ,OAClBD,EAAOD,SAAS,EAAGzB,KAAK/G,IAAIkS,EAAWU,KAEnCC,EAAOV,EAAM1N,YAAYgB,GAC/B,IAAI4F,EAAI5F,EAAQ,EAChB,MAAM6M,EAAqC,CAAC,EACtCC,EAAoD,GAC1D,IAAK,IAAIrS,EAAI,EAAGA,EAAI2S,EAAM3S,GAAK,EAAG,CAChC,MAAM4S,EAAQX,EAAM1N,YAAY4G,GAC1B1H,EAAU3H,KAAKqH,aACnB8O,EAAMtS,SAAS,OAAQwL,EAAI,EAAGA,EAAI,EAAIyH,EAAQ,IAE1CC,EAAOZ,EAAM1N,YAAY4G,EAAIyH,EAAQ,GAM3C,GAJAR,EAAW3O,GAAWzD,EACtBqS,EAAWxQ,KAAK,CAAE4B,UAASxD,OAAQ4S,IAEnC1H,EAAIA,EAAI,EAAIyH,EACRzH,EAAI8G,EAAMhS,OAIZ,OAHA1B,QAAQoO,KACN,wCAAwC+F,YAEnC5W,KAAKwW,aAAa/M,EAAqB,EAAdmN,EAAiB3O,EAErD,CACA,MAAO,CAAEqO,aAAYC,aACvB,CAEA,wBAAMS,CACJC,EACAjT,EACAsG,EACArC,GAEA,OAxOJmN,eAA4B8B,GAC1B,IAAIC,EAAW,GACf,UAAW,MAAMC,KAAKF,EACpBC,EAAMA,EAAI1I,OAAO2I,GAEnB,OAAOD,CACT,CAkOWE,CAAUrX,KAAKsX,sBAAsBL,EAAKjT,EAAKsG,EAAKrC,GAC7D,CAEA,2BAAOqP,CACLL,EACAjT,EACAsG,EACArC,G,YAEMjI,KAAKyW,UAAUxO,GACrB,MAAMsP,EAAuB,QAAf,EAAAvX,KAAKsW,kBAAU,eAAGW,GAChC,QAAclS,IAAVwS,GAAwBvX,KAAK0V,MAE1B,CACL,MAAMpQ,QAAetF,KAAK0V,MAAMrL,eAAekN,EAAOvT,EAAM,EAAGsG,EAAKrC,SAC7DjI,KAAKwX,oBAAoBlS,EAAQiS,EAAOvT,EAAKsG,EAAKrC,EAC3D,WAJQ,EAKV,CAEA,yBAAOuP,CACLlS,EACAiS,EACAvT,EACAsG,EACArC,EAAgB,CAAC,GAEjB,MAAM,YAAEwP,GAAgBxP,EAClByP,EAAQ,GACd,IAAIlU,GAAO,EAEX,IAAK,MAAMsC,KAASR,EAAQ,CAC1B,MAAMqS,QAAgB3X,KAAKiV,aAAa5T,IACtCyE,EAAMjC,WACN,CAAEiC,QAAOmC,QACTA,EAAKlI,QAGD6X,EAAO,GACb,IAAK,MAAMC,KAAWF,EACpB,GAAIE,EAAQ3J,WAAaqJ,EAAO,CAC9B,GAAIM,EAAQxW,IAAI,UAAYiJ,EAAK,CAE/B9G,GAAO,EACP,KACF,CAAWqU,EAAQxW,IAAI,QAAU2C,GAE/B4T,EAAK7R,KAAK8R,EAEd,CAIF,GAFAH,EAAM3R,KAAK6R,SACLA,EACFpU,EACF,KAEJ,EPrRG,SAA0BzD,GAC/B,GAAKA,GAIDA,EAAOE,QAAS,CAElB,GAA4B,oBAAjB6X,aAA8B,CACvC,MAAMjO,EAAI,IAAI3J,MAAM,WAGpB,MADA2J,EAAElI,KAAO,cACHkI,CACR,CACE,MAAM,IAAIiO,aAAa,UAAW,aAEtC,CACF,EOuQIC,CAAiB9P,EAAKlI,QAClB0X,UACIzX,KAAKgY,WAAWT,EAAOG,EAAOzP,GAExC,CAEA,gBAAM+P,CAAWT,EAAeG,EAAuBzP,GACrD,MAAM,cAAEgQ,EAAa,cAAEC,EAAgB,KAAWjQ,EAC5CkQ,EAAwC,CAAC,EACzCC,EAAkC,CAAC,EACzCV,EAAMvN,KAAI4L,IACR,MAAMsC,EAAoC,CAAC,EAC3C,IAAK,MAAMC,KAAWvC,EAAK,CACzB,MAAMrU,EAAO4W,EAAQ5W,OACfsN,EAAKsJ,EAAQtJ,KACdqJ,EAAU3W,KACb2W,EAAU3W,GAAQ,GAEpB2W,EAAU3W,KACV0W,EAAQpJ,GAAM,CAChB,CACA,IAAK,MAAOhG,EAAGE,KAAMtG,OAAO2V,QAAQF,GACxB,IAANnP,IACFiP,EAAanP,IAAK,EAEtB,IAGF,MAAMwP,EAAmC,GACzCd,EAAMvN,KAAI4L,IACR,IAAK,MAAMnC,KAAKmC,EAAK,CACnB,MAAMrU,EAAOkS,EAAElS,OACT+H,EAAQmK,EAAEvS,IAAI,SACdoX,EAAQ7E,EAAEP,YACVqF,EAAQ9E,EAAEjB,cAEd3S,KAAK0V,OACLyC,EAAazW,KACZuW,GACES,IAAUnB,GAASxM,KAAK4N,IAAIlP,EAAQgP,GAASP,IAEhDM,EAAazS,KACX/F,KAAK0V,MAAMrL,eAAeqO,EAAOD,EAAOA,EAAQ,EAAGxQ,GAGzD,KAKF,MAAMkC,EAAM,IAAIyO,IACV5C,QAAY9Q,QAAQ2T,IAAIL,GAC9B,IAAK,MAAMM,KAAK9C,EAAI+C,OACb5O,EAAIpH,IAAI+V,EAAEjV,aACbsG,EAAI7I,IAAIwX,EAAEjV,WAAYiV,GAwB1B,aApB+B5T,QAAQ2T,IACrC,IAAI1O,EAAI6O,UAAU7O,KAAIiL,MAAMpI,IAC1B,MAAM,KAAEjL,EAAI,WAAEsT,EAAU,WAAEC,EAAU,MAAExP,SAAgB9F,KAAKuV,WAAW,CACpEzP,MAAOkH,EACP/E,SAEIgR,EAAW,GACjB,IAAK,MAAMpB,WAAiB7X,KAAKwV,gBAC/BzT,EACAsT,EACAC,EACAxP,GAEIqS,EAAaN,EAAQxW,IAAI,WAAa+W,EAAQP,EAAQ7I,OACxDiK,EAASlT,KAAK8R,GAGlB,OAAOoB,CAAQ,MAGKF,MAC1B,CAEA,iBAAMG,CAAYC,EAAkB5Y,EAAc0H,EAAiB,CAAC,GAClE,MAAM,UAAEiO,EAAS,OAAEzJ,SAAiBzM,KAAKyV,IAAIxB,KAC3C,KAAOgC,MAAM1V,GACb,EACAA,EACA4Y,EACAlR,GAGF,OAAOwE,EAAOD,SAAS,EAAGzB,KAAK/G,IAAIkS,EAAW3V,GAChD,CAEA,gBAAMgV,EAAW,MAAEzP,EAAK,KAAEmC,IACxB,MAAMwE,QAAezM,KAAKkZ,YACxBpT,EAAMrB,KAAKd,cACXmC,EAAMhB,cACNmD,IAIAwE,OAAQ1K,EAAI,WACZsT,EAAU,WACVC,SACQ,QAAgB7I,EAAQ3G,GAClC,MAAO,CAAE/D,OAAMsT,aAAYC,aAAYxP,QACzC,CAEA,qBAAM0P,CACJhL,EACA6K,EACAC,EACAxP,GAEA,IAAIsT,EAAa,EACjB,MAAMC,EAAO,GACb,IAAIC,EAAM,EACNC,GAAQC,KAAKC,MAEjB,KAAOL,EAAa,EAAI5O,EAAGrG,QAAQ,CACjC,MACM6L,EAAWoJ,EAAa,EADZ5O,EAAG/B,YAAY2Q,GACa,EAG9C,GAAI9D,EAAY,CACd,KAAO8D,EAAatT,EAAMrB,KAAKb,cAAgB0R,EAAWgE,OAC1DA,GACF,CAGA,GAAItJ,EAAWxF,EAAGrG,OAAQ,CACxB,MAAM0T,EAAU,IAAI,EAAW,CAC7BxT,MAAO,CACLsJ,UAAWnD,EACXf,MAAO2P,EACP1P,IAAKsG,GAsBPtC,WACE2H,EAAWlR,OAAS,EACE,IAAlBkR,EAAWiE,IACVF,EAAa9D,EAAWgE,IACzBxT,EAAMrB,KAAKb,aACX,EAEA8V,EAAA,EAAMC,OAAOnP,EAAG1D,MAAMsS,EAAYpJ,MAG1CqJ,EAAKtT,KAAK8R,GACN7X,KAAK+U,kBAAoByE,KAAKC,MAAQF,EAAOvZ,KAAK+U,wBAC9C/P,EAAQ,GACduU,GAAQC,KAAKC,MAEjB,CAEAL,EAAapJ,EAAW,CAC1B,CACA,OAAOqJ,CACT,CAEA,eAAMlO,CAAUyO,G,QACd,MAAMpQ,EAAuB,QAAf,EAAAxJ,KAAKsW,kBAAU,eAAGsD,GAChC,YAAiB7U,IAAVyE,IAAwC,QAAV,EAAAxJ,KAAK0V,aAAK,eAAEvK,UAAU3B,GAC7D,CAEA,eAAMrD,CAAUyT,G,MACd,MAAMpQ,EAAuB,QAAf,EAAAxJ,KAAKsW,kBAAU,eAAGsD,GAChC,YAAiB7U,IAAVyE,GAAwBxJ,KAAK0V,MAAY1V,KAAK0V,MAAMvP,UAAUqD,GAAzB,CAC9C,CAEA,cAAMD,CAASqQ,EAAiBnQ,EAAgBC,G,MAC9C,IAAK1J,KAAK0V,MACR,MAAO,SAEH1V,KAAK0V,MAAMxN,QACjB,MAAMsB,EAAuB,QAAf,EAAAxJ,KAAKsW,kBAAU,eAAGsD,GAChC,YAAiB7U,IAAVyE,EAAsB,GAAKxJ,KAAK0V,MAAMnM,SAASC,EAAOC,EAAOC,EACtE,CAEA,oBAAMW,CACJuP,EACAnQ,EACAC,EACAzB,G,MAEA,IAAKjI,KAAK0V,MACR,MAAO,SAEH1V,KAAK0V,MAAMxN,QACjB,MAAMsB,EAAuB,QAAf,EAAAxJ,KAAKsW,kBAAU,eAAGsD,GAChC,YAAiB7U,IAAVyE,EACH,GACAxJ,KAAK0V,MAAMrL,eAAeb,EAAOC,EAAOC,EAAKzB,EACnD,EC7fFmN,eAAe3G,EAAOoL,EAAoB5R,GACxC,MAAM+N,QAAY9Q,QAAQ2T,IACxBgB,EAAI1P,KAAIiL,MAAMtP,IACZ,MAAM,IAAEgU,EAAG,QAAEC,GAAYjU,EACzB,GAAIgU,EAAIpL,WAAW,SACjB,OAAO,KAAOsL,KAAKF,EAAIzM,MAAM,KAAK,GAAI,UACjC,CAIL,MAAM,QAAE4M,KAAYC,GAASH,EACvB/D,QAAYmE,MAAML,EAAK,IACxB7R,EACH8R,QAAS,IAAK9R,aAAI,EAAJA,EAAM8R,WAAYG,KAElC,IAAKlE,EAAIoE,GACP,MAAM,IAAIla,MACR,QAAQ8V,EAAIqE,mBAAmBP,YAAc9D,EAAIxC,UAGrD,OAAO,KAAOwG,WAAWhE,EAAIsE,cAC/B,MAIJ,OAAO,KAAO7L,aAAavJ,QAAQ2T,IAAI7C,EAAI7L,KAAIpJ,IAAO,IAAA2L,OAAM3L,MAC9D,CAEe,MAAMwZ,UAAmBnG,EAKtC,WAAAnT,CAAYgD,GACVuW,MAAM,CAAE1F,QAAQ,IAChB9U,KAAKya,QAAUxW,EAAKwW,QACpBza,KAAK0a,QAAUzW,EAAKyW,OACtB,CAEA,2BAAOpD,CACLL,EACAjT,EACAsG,EACArC,G,MAEA,MACM6R,EAAM,GADI9Z,KAAKya,WAAWza,KAAK0a,yBACAzD,WAAajT,SAAWsG,eACvDiN,EAAuB,QAAf,EAAAvX,KAAKsW,kBAAU,eAAGW,GAChC,QAAclS,IAAVwS,OACI,OACD,CACL,MAAMzU,QAAeqX,MAAML,EAAK,IAAK7R,IACrC,IAAKnF,EAAOsX,GACV,MAAM,IAAIla,MACR,QAAQ4C,EAAOuX,mBAAmBP,YAAchX,EAAO0Q,UAG3D,MAAMzR,QAAae,EAAO6X,OACpBxE,QAAc1H,EAAO1M,EAAK+S,OAAO8F,KAAK9T,MAAM,GAAImB,SAE/CjI,KAAKwX,oBACV,CAEE,CACE/K,OAAQ0J,EACRvR,kBAAcG,EACdJ,IAAK,EACLb,UAAS,IACA,EAETe,eAAc,IACL,GAAGoS,KAAOjT,KAAOsG,IAE1BxF,YAAW,IACF,EAETL,KAAM,CACJb,aAAc,EACdD,cAAe,EACfG,UAAW,IAAM,GAEnBY,KAAM,CACJd,aAAc0C,OAAOC,iBACrB5C,cAAe,EACfG,UAAW,IAAM,GAEnBD,SAAQ,IACC,GAAGoT,KAAOjT,KAAOsG,MAI9BiN,EACAvT,EACAsG,EACArC,EAEJ,CACF,CAEA,gBAAMsN,EAAW,MAAEzP,IACjB,IAAKA,EAAM2G,OACT,MAAM,IAAIvM,MAAM,mCAElB,MAAO,CAAE6B,KAAM+D,EAAM2G,OAAQ4I,WAAY,GAAIC,WAAY,GAAIxP,QAC/D,CAEA,eAAM2Q,CAAUxO,EAAiB,CAAC,GAChC,MAAM6R,EAAM,GAAG9Z,KAAKya,WAAWza,KAAK0a,wCAC9B5X,QAAeqX,MAAML,EAAK7R,GAChC,IAAKnF,EAAOsX,GACV,MAAM,IAAIla,MACR,QAAQ4C,EAAOuX,mBAAmBP,YAAchX,EAAO0Q,UAG3D,MAAMzR,QAAae,EAAO6X,OACpBxE,QAAc1H,EAAO1M,EAAK+S,OAAO8F,KAAM3S,GAE7C,GAAIkO,EAAM1N,YAAY,KAAOsL,EAC3B,MAAM,IAAI7T,MAAM,kBAElB,MAAMkW,EAAUD,EAAM1N,YAAY,GAE5BoS,EAAYtH,EADC4C,EAAMtS,SAAS,OAAQ,EAAG,EAAIuS,IAK3C0E,EAAkD,GAClDC,EAAmC,CAAC,EACpCC,EAAUH,EAAUjM,QAAO3B,GAAe,OAAVA,EAAEiD,MACxC,IAAK,MAAOlI,EAAOiT,KAAWD,EAAQzC,UAAW,CAC/C,IAAI5Q,EAAU,GACVxD,EAAS,EACb,IAAK,MAAM+W,KAAQD,EAAOlZ,KACP,OAAbmZ,EAAKhL,IACPvI,EAAUuT,EAAKzX,MACO,OAAbyX,EAAKhL,MACd/L,GAAU+W,EAAKzX,OAGnBsX,EAASpT,GAAWK,EACpB8S,EAAS9S,GAAS,CAAEL,UAASxD,SAC/B,CAGA,OAFAnE,KAAKsW,WAAayE,EAClB/a,KAAKuW,WAAauE,EACXD,CACT,E,6FC3IFzF,eAAe1I,EAAMyO,GACnB,IACE,IAAIC,EACA9B,EAAM,EACNpV,EAAI,EACR,MAAMoB,EAAS,GACf,IACI+V,EADArR,EAAY,EAEhB,EAAG,CACD,MAAMsR,EAAiBH,EAAU3O,SAAS8M,GAK1C,GAJA+B,EAAW,IAAI,EAAAE,UAEXH,QAASC,GACbA,EAAStV,KAAKuV,EAAgB,EAAAE,cAC1BH,EAASI,IACX,MAAM,IAAIvb,MAAMmb,EAASK,KAG3BpC,GAAO8B,EAAKO,QACZrW,EAAOpB,GAAKmX,EAASvY,OACrBkH,GAAa1E,EAAOpB,GAAGC,OACvBD,GAAK,C,OACEkX,EAAKQ,UAEd,MAAM9Y,EAAS,IAAI+Y,WAAW7R,GAC9B,IAAK,IAAI9F,EAAI,EAAGI,EAAS,EAAGJ,EAAIoB,EAAOnB,OAAQD,IAC7CpB,EAAOxB,IAAIgE,EAAOpB,GAAII,GACtBA,GAAUgB,EAAOpB,GAAGC,OAEtB,OAAO,KAAO6V,KAAKlX,E,CACnB,MAAO+G,GAEP,GAAI,GAAGA,IAAImH,MAAM,0BACf,MAAM,IAAI9Q,MACR,4DAGJ,MAAM2J,C,CAEV,CAgDAuL,eAAe0G,EAAgBX,EAAmBrV,GAChD,IACE,IAAIsV,EACJ,MAAM,KAAE3W,EAAI,KAAEC,GAASoB,EACvB,IAAIiW,EAAOtX,EAAKd,cACZqY,EAAOvX,EAAKb,aAChB,MAAM0B,EAAS,GACT+P,EAAa,GACbC,EAAa,GAEnB,IAAItL,EAAY,EACZ9F,EAAI,EACR,EAAG,CACD,MAAMoX,EAAiBH,EAAU3O,SAASuP,EAAOtX,EAAKd,eAChD0X,EAAW,IAAI,EAAAE,QAIrB,KAFIH,QAASC,GACbA,EAAStV,KAAKuV,EAAgB,EAAAE,cAC1BH,EAASI,IACX,MAAM,IAAIvb,MAAMmb,EAASK,KAG3B,MAAMjP,EAAS4O,EAASvY,OACxBwC,EAAOS,KAAK0G,GACZ,IAAI8F,EAAM9F,EAAOtI,OAEjBkR,EAAWtP,KAAKgW,GAChBzG,EAAWvP,KAAKiW,GACM,IAAlB1W,EAAOnB,QAAgBM,EAAKb,eAE9B0B,EAAO,GAAKA,EAAO,GAAGkH,SAAS/H,EAAKb,cACpC2O,EAAMjN,EAAO,GAAGnB,QAElB,MAAM8X,EAAWF,EAIjB,GAHAA,GAAQX,EAAKO,QACbK,GAAQzJ,EAEJ0J,GAAYvX,EAAKf,cAAe,CAKlC2B,EAAOpB,GAAKoB,EAAOpB,GAAGsI,SACpB,EACA9H,EAAKf,gBAAkBc,EAAKd,cACxBe,EAAKd,aAAea,EAAKb,aAAe,EACxCc,EAAKd,aAAe,GAG1ByR,EAAWtP,KAAKgW,GAChBzG,EAAWvP,KAAKiW,GAChBhS,GAAa1E,EAAOpB,GAAGC,OACvB,K,CAEF6F,GAAa1E,EAAOpB,GAAGC,OACvBD,G,OACOkX,EAAKQ,UAEd,MAAM9Y,EAAS,IAAI+Y,WAAW7R,GAC9B,IAAK,IAAI9F,EAAI,EAAGI,EAAS,EAAGJ,EAAIoB,EAAOnB,OAAQD,IAC7CpB,EAAOxB,IAAIgE,EAAOpB,GAAII,GACtBA,GAAUgB,EAAOpB,GAAGC,OAItB,MAAO,CAAEsI,OAFM,KAAOuN,KAAKlX,GAEVuS,aAAYC,a,CAC7B,MAAOzL,GAEP,GAAI,GAAGA,IAAImH,MAAM,0BACf,MAAM,IAAI9Q,MACR,4DAGJ,MAAM2J,C,CAEV,C,wBC5Ke,MAAMqS,EAKnB,WAAAjb,EAAY,WACV4G,EAAU,KACVsU,IAKA,GAAItU,EACF7H,KAAK6H,WAAaA,MACb,KAAIsU,EAGT,MAAM,IAAI/a,UAAU,6CAFpBpB,KAAK6H,WAAa,IAAI,KAAUsU,E,CAIpC,CAEA,qBAAAC,CAAsB5J,EAAalO,EAAS,EAAG+X,GAAW,GAExD,MAAMjW,EAAO,gBAAiBoM,EAAI1L,MAAMxC,EAAQA,EAAS,GAAI+X,GAC7D,GACEjW,EAAKC,YAAYC,OAAOC,mBACxBH,EAAKI,SAASF,OAAOG,kBAErB,MAAM,IAAIrF,UAAU,oBAGtB,OAAOgF,EAAKM,UACd,CAEA,SAAA4V,GAIE,OAHKtc,KAAK0V,QACR1V,KAAK0V,MAAQ1V,KAAKuc,cAEbvc,KAAK0V,KACd,CAEA,gBAAM6G,GACJ,IAAI/J,EAAM,KAAOgK,YAAY,SACvBxc,KAAK6H,WAAWoM,KAAKzB,EAAK,EAAG,EAAG,GACtC,MAAMiK,EAAazc,KAAKoc,sBAAsB5J,EAAK,GAAG,GACtD,IAAKiK,EACH,MAAO,CAAC,CAAC,EAAG,IAGd,MAAMlE,EAAU,IAAI3R,MAAM6V,EAAa,GACvClE,EAAQ,GAAK,CAAC,EAAG,GAGjB,MAAMmE,EAAU,GAAQD,EACxB,GAAIC,EAAUpW,OAAOC,iBACnB,MAAM,IAAInF,UAAU,oBAEtBoR,EAAM,KAAOgK,YAAYE,SACnB1c,KAAK6H,WAAWoM,KAAKzB,EAAK,EAAGkK,EAAS,GAC5C,IAAK,IAAIC,EAAc,EAAGA,EAAcF,EAAYE,GAAe,EAAG,CACpE,MAAMC,EAAqB5c,KAAKoc,sBAC9B5J,EACc,GAAdmK,GAEIE,EAAuB7c,KAAKoc,sBAChC5J,EACc,GAAdmK,EAAmB,GAErBpE,EAAQoE,EAAc,GAAK,CAACC,EAAoBC,E,CAGlD,OAAOtE,CACT,CAEA,kBAAMuE,GACJ,MAAMvE,QAAgBvY,KAAKsc,YAC3B,GAAK/D,EAAQpU,OAGb,OAAOoU,EAAQA,EAAQpU,OAAS,EAClC,CAEA,8BAAM4Y,CAAyB5Y,EAAgBgV,GAC7C,MAAM6D,EAAc7D,EAAWhV,EAC/B,GAAe,IAAXA,EACF,MAAO,GAET,MAAMoU,QAAgBvY,KAAKsc,YACrBW,EAAW,GAIXC,EAAU,CAACpb,EAAYqb,KAC3B,MAAMN,EAAuB/a,EA/FL,GAgGlBsb,EAA2BD,EAC7BA,EAjGoB,GAkGpBE,IAEJ,OACER,GAAwB1D,GACxBiE,EAA2BjE,EAEpB,EAGL0D,EAAuB1D,GACjB,EAGH,CAAC,EAGV,IAAImE,EAAa,EACbC,EAAahF,EAAQpU,OAAS,EAC9BqZ,EAAiBzS,KAAKQ,MAAMgN,EAAQpU,OAAS,GAE7CsZ,EAAaP,EACf3E,EAAQiF,GACRjF,EAAQiF,EAAiB,IAE3B,KAAsB,IAAfC,GACDA,EAAa,EACfF,EAAaC,EAAiB,EACrBC,EAAa,IACtBH,EAAaE,EAAiB,GAEhCA,EAAiBzS,KAAK2S,MAAMH,EAAaD,GAAc,GAAKA,EAC5DG,EAAaP,EAAQ3E,EAAQiF,GAAiBjF,EAAQiF,EAAiB,IAIzEP,EAASlX,KAAKwS,EAAQiF,IACtB,IAAItZ,EAAIsZ,EAAiB,EACzB,KAAOtZ,EAAIqU,EAAQpU,SACjB8Y,EAASlX,KAAKwS,EAAQrU,MAClBqU,EAAQrU,GAzIY,IAyIiB8Y,IAFhB9Y,GAAK,GAShC,OAHI+Y,EAASA,EAAS9Y,OAAS,GA7IL,GA6IiC6Y,GACzDC,EAASlX,KAAK,IAETkX,CACT,EC/Ia,MAAMU,EAInB,WAAA1c,EAAY,WACV4G,EAAU,KACVsU,EAAI,cACJyB,EAAa,QACbC,IAOA,GAAIhW,EACF7H,KAAK6H,WAAaA,MACb,KAAIsU,EAGT,MAAM,IAAI/a,UAAU,6CAFpBpB,KAAK6H,WAAa,IAAI,KAAUsU,E,CAKlC,IAAKyB,IAAkBC,IAAY1B,EACjC,MAAM,IAAI/a,UAAU,mDAGtBpB,KAAK8d,IAAM,IAAI5B,EAAS,CACtBrU,WAAY+V,EACZzB,KAAOyB,GAAkBC,IAAW1B,EAAiB,GAAGA,QAAb0B,GAE/C,CAEA,UAAM3J,GACJ,MAAM6J,QAAuB/d,KAAK6H,WAAWqM,OAC7C,OAAOtR,OAAOC,OAAOkb,EAAgB,CACnCxd,WAAYP,KAAKge,0BACjBC,YAAQlZ,EACRmZ,aAASnZ,GAEb,CAEA,6BAAMiZ,GAGJ,MAAO,CAAEnB,SAA8B7c,KAAK8d,IAAIhB,gBAE1C,KAAEvc,SAAeP,KAAK6H,WAAWqM,OAEjC1B,EAAM,KAAOgK,YAAY,IAGzB,UAAEtG,SAAoBlW,KAAK6H,WAAWoM,KAAKzB,EAAK,EAAG,EAAGjS,EAAO,GAAK,GACxE,GAAkB,IAAd2V,EACF,MAAM,IAAIhW,MAAM,cAGlB,OAAO2c,EAD2BrK,EAAIjK,aAAa,EAErD,CAEA,6BAAM4V,CACJC,GACCxB,IACAyB,IAED,IAAI9a,EAAO8a,EACN9a,IACHA,SAAcvD,KAAK6H,WAAWqM,QAAQ3T,MAIxC,MAAM+d,EAAwB/a,EAAOqZ,EAcrC,aAZM5c,KAAK6H,WAAWoM,KACpBmK,EACA,EACAE,EACA1B,SAI2BlQ,EAC3B0R,EAAYtX,MAAM,EAAGwX,GAIzB,CAEA,UAAMrK,CAAKzB,EAAalO,EAAgBH,EAAgBgV,GAEtD,MAAMoF,QAAuBve,KAAK8d,IAAIf,yBACpC5Y,EACAgV,GAEIiF,EAAc,KAAO5B,YAAY,OAEvC,IAAIgC,EAAoBla,EACpB4R,EAAY,EAChB,IACE,IAAIuI,EAAW,EACfA,EAAWF,EAAepa,OAAS,EACnCsa,GAAY,EACZ,CAEA,MAAMC,QAA2B1e,KAAKme,wBACpCC,EACAG,EAAeE,GACfF,EAAeE,EAAW,KAErB,CAAE5B,GAAwB0B,EAAeE,GAC1CE,EACJ9B,GAAwB1D,EAAW,EAAIA,EAAW0D,EAC9C+B,EACJ7T,KAAK/G,IACHmV,EAAWhV,EACX0Y,EAAuB6B,EAAmBva,QACxC0Y,EACF8B,GAAgB,GAAKA,EAAeD,EAAmBva,SACzDua,EAAmBG,KAAKrM,EAAKgM,EAAmBG,EAAcC,GAC9DJ,GAAqBI,EAAYD,EACjCzI,GAAa0I,EAAYD,E,CAI7B,MAAO,CAAEzI,YAAWzJ,OAAQ+F,EAC9B,E,mCCnIF,IAAIsM,EAAQ,CACR,EAAY,WAAY,WAAY,WAAY,UAAY,WAAY,WAAY,WACpF,UAAY,WAAY,WAAY,WAAY,UAAY,WAAY,WAAY,WACpF,UAAY,WAAY,WAAY,WAAY,UAAY,WAAY,WAAY,WACpF,UAAY,WAAY,WAAY,WAAY,UAAY,WAAY,WAAY,WACpF,UAAY,WAAY,WAAY,WAAY,WAAY,WAAY,WAAY,WACpF,UAAY,WAAY,WAAY,WAAY,UAAY,WAAY,WAAY,WACpF,UAAY,WAAY,WAAY,WAAY,UAAY,WAAY,WAAY,WACpF,UAAY,WAAY,WAAY,WAAY,UAAY,WAAY,WAAY,WACpF,WAAY,SAAY,WAAY,WAAY,WAAY,UAAY,WAAY,WACpF,WAAY,UAAY,WAAY,WAAY,WAAY,UAAY,WAAY,WACpF,WAAY,UAAY,WAAY,WAAY,WAAY,UAAY,WAAY,WACpF,WAAY,UAAY,WAAY,WAAY,WAAY,UAAY,WAAY,WACpF,WAAY,UAAY,WAAY,WAAY,WAAY,WAAY,WAAY,WACpF,WAAY,UAAY,WAAY,WAAY,WAAY,UAAY,WAAY,WACpF,WAAY,UAAY,WAAY,WAAY,WAAY,UAAY,WAAY,WACpF,WAAY,UAAY,WAAY,WAAY,WAAY,UAAY,WAAY,WACpF,WAAY,WAAY,SAAY,WAAY,WAAY,WAAY,SAAY,WACpF,WAAY,WAAY,UAAY,WAAY,WAAY,WAAY,UAAY,WACpF,WAAY,WAAY,UAAY,WAAY,WAAY,WAAY,UAAY,WACpF,WAAY,WAAY,UAAY,WAAY,WAAY,WAAY,UAAY,WACpF,WAAY,WAAY,UAAY,WAAY,WAAY,WAAY,WAAY,WACpF,WAAY,WAAY,UAAY,WAAY,WAAY,WAAY,UAAY,WACpF,WAAY,WAAY,UAAY,WAAY,WAAY,WAAY,UAAY,WACpF,WAAY,WAAY,UAAY,WAAY,WAAY,WAAY,UAAY,WACpF,WAAY,WAAY,WAAY,SAAY,WAAY,WAAY,WAAY,SACpF,WAAY,WAAY,WAAY,UAAY,WAAY,WAAY,WAAY,UACpF,WAAY,WAAY,WAAY,UAAY,WAAY,WAAY,WAAY,UACpF,WAAY,WAAY,WAAY,UAAY,WAAY,WAAY,WAAY,UACpF,WAAY,WAAY,WAAY,UAAY,WAAY,WAAY,WAAY,WACpF,WAAY,WAAY,WAAY,SAAY,WAAY,WAAY,WAAY,UACpF,WAAY,WAAY,WAAY,UAAY,WAAY,WAAY,WAAY,UACpF,WAAY,WAAY,WAAY,UAAY,WAAY,WAAY,WAAY,WAE9D,oBAAfC,aACPD,EAAQ,IAAIC,WAAWD,I,eCjC3B,MACA,EADqB,CAACrb,EAAOub,IAAa,KAAOhF,KAAKvW,EAAOub,GCD7D,ECDe,SAAmBC,EAAOC,GACrC,MAAMpc,EAAS,CAACW,EAAO0b,IAAaD,EAAW,EAAazb,GAAQ0b,KAAc,EAIlF,OAHArc,EAAO6W,OAAS,CAAClW,EAAO0b,IAAaD,EAAW,EAAazb,GAAQ0b,GACrErc,EAAOuZ,SAAWvZ,EAClBA,EAAOmc,MDHc,SCIdnc,CACX,CDLA,CAAyB,GFoCX,CAACsc,EAASD,KAEpB,IAAIE,EAAmB,IAAbF,EAAiB,GAAMA,EACjC,IAAK,IAAIzJ,EAAQ,EAAGA,EAAQ0J,EAAQjb,OAAQuR,IACxC2J,EAAMP,EAA+B,KAAxBO,EAAMD,EAAQ1J,KAAmB2J,IAAQ,EAE1D,OAAOA,CAAQ,G","sources":["../../../node_modules/@gmod/abortable-promise-cache/src/AggregateAbortController.ts","../../../node_modules/@gmod/abortable-promise-cache/src/AggregateStatusReporter.ts","../../../node_modules/@gmod/abortable-promise-cache/src/AbortablePromiseCache.ts","../../../node_modules/@gmod/bam/src/virtualOffset.ts","../../../node_modules/@gmod/bam/src/chunk.ts","../../../node_modules/@gmod/bam/src/util.ts","../../../node_modules/@gmod/bam/src/indexFile.ts","../../../node_modules/@gmod/bam/src/bai.ts","../../../node_modules/@gmod/bam/src/csi.ts","../../../node_modules/@gmod/bam/src/constants.ts","../../../node_modules/@gmod/bam/src/record.ts","../../../node_modules/@gmod/bam/src/sam.ts","../../../node_modules/@gmod/bam/src/bamFile.ts","../../../node_modules/@gmod/bam/src/htsget.ts","../../../node_modules/@gmod/bgzf-filehandle/src/unzip-pako.ts","../../../node_modules/@gmod/bgzf-filehandle/src/gziIndex.ts","../../../node_modules/@gmod/bgzf-filehandle/src/bgzFilehandle.ts","../../../node_modules/crc/mjs/calculators/crc32.js","../../../node_modules/crc/mjs/create_buffer.js","../../../node_modules/crc/mjs/crc32.js","../../../node_modules/crc/mjs/define_crc.js"],"sourcesContent":["class NullSignal {}\n\n/**\n * aggregates a number of abort signals, will only fire the aggregated\n * abort if all of the input signals have been aborted\n */\nexport default class AggregateAbortController {\n signals = new Set()\n abortController = new AbortController()\n\n /**\n * @param {AbortSignal} [signal] optional AbortSignal to add. if falsy,\n * will be treated as a null-signal, and this abortcontroller will no\n * longer be abortable.\n */\n //@ts-ignore\n addSignal(signal: AbortSignal = new NullSignal()): void {\n if (this.signal.aborted) {\n throw new Error('cannot add a signal, already aborted!')\n }\n\n // note that a NullSignal will never fire, so if we\n // have one this thing will never actually abort\n this.signals.add(signal)\n if (signal.aborted) {\n // handle the abort immediately if it is already aborted\n // for some reason\n this.handleAborted(signal)\n } else if (typeof signal.addEventListener === 'function') {\n signal.addEventListener('abort', () => {\n this.handleAborted(signal)\n })\n }\n }\n\n handleAborted(signal: AbortSignal): void {\n this.signals.delete(signal)\n if (this.signals.size === 0) {\n this.abortController.abort()\n }\n }\n\n get signal(): AbortSignal {\n return this.abortController.signal\n }\n\n abort(): void {\n this.abortController.abort()\n }\n}\n","export default class AggregateStatusReporter {\n callbacks = new Set<Function>()\n currentMessage: unknown\n\n addCallback(callback: Function = () => {}): void {\n this.callbacks.add(callback)\n callback(this.currentMessage)\n }\n\n callback(message: unknown) {\n this.currentMessage = message\n for (const elt of this.callbacks) {\n elt(message)\n }\n }\n}\n","import AggregateAbortController from './AggregateAbortController'\nimport AggregateStatusReporter from './AggregateStatusReporter'\n\ninterface Cache<U> {\n delete: (key: string) => void\n keys: () => Iterator<string>\n get: (key: string) => U | undefined\n set: (key: string, value: U) => void\n has: (key: string) => boolean\n}\ntype FillCallback<T, U> = (\n data: T,\n signal?: AbortSignal,\n statusCallback?: Function,\n) => Promise<U>\n\ninterface Entry<U> {\n aborter: AggregateAbortController\n settled: boolean\n readonly aborted: boolean\n statusReporter: AggregateStatusReporter\n promise: Promise<U>\n}\nexport default class AbortablePromiseCache<T, U> {\n /**\n * @param {object} args constructor args\n * @param {Function} args.fill fill callback, will be called with sig `fill(data, signal)`\n * @param {object} args.cache backing store to use, must implement `get(key)`, `set(key, val)`,\n * `delete(key)`, and `keys() -> iterator`\n */\n\n private cache: Cache<Entry<U>>\n private fillCallback: FillCallback<T, U>\n\n constructor({\n fill,\n cache,\n }: {\n fill: FillCallback<T, U>\n cache: Cache<Entry<U>>\n }) {\n if (typeof fill !== 'function') {\n throw new TypeError('must pass a fill function')\n }\n if (typeof cache !== 'object') {\n throw new TypeError('must pass a cache object')\n }\n if (\n typeof cache.get !== 'function' ||\n typeof cache.set !== 'function' ||\n typeof cache.delete !== 'function'\n ) {\n throw new TypeError(\n 'cache must implement get(key), set(key, val), and and delete(key)',\n )\n }\n\n this.cache = cache\n this.fillCallback = fill\n }\n\n static isAbortException(exception: Error) {\n return (\n // DOMException\n exception.name === 'AbortError' ||\n // standard-ish non-DOM abort exception\n //@ts-ignore\n exception.code === 'ERR_ABORTED' ||\n // stringified DOMException\n exception.message === 'AbortError: aborted' ||\n // stringified standard-ish exception\n exception.message === 'Error: aborted'\n )\n }\n\n evict(key: string, entry: Entry<U>) {\n if (this.cache.get(key) === entry) {\n this.cache.delete(key)\n }\n }\n\n fill(key: string, data: T, signal?: AbortSignal, statusCallback?: Function) {\n const aborter = new AggregateAbortController()\n const statusReporter = new AggregateStatusReporter()\n statusReporter.addCallback(statusCallback)\n const newEntry: Entry<U> = {\n aborter: aborter,\n promise: this.fillCallback(data, aborter.signal, (message: unknown) => {\n statusReporter.callback(message)\n }),\n settled: false,\n statusReporter,\n get aborted() {\n return this.aborter.signal.aborted\n },\n }\n newEntry.aborter.addSignal(signal)\n\n // remove the fill from the cache when its abortcontroller fires, if still in there\n newEntry.aborter.signal.addEventListener('abort', () => {\n if (!newEntry.settled) {\n this.evict(key, newEntry)\n }\n })\n\n // chain off the cached promise to record when it settles\n newEntry.promise\n .then(\n () => {\n newEntry.settled = true\n },\n () => {\n newEntry.settled = true\n\n // if the fill throws an error (including abort) and is still in the cache, remove it\n this.evict(key, newEntry)\n },\n )\n .catch(error => {\n // this will only be reached if there is some kind of\n // bad bug in this library\n console.error(error)\n throw error\n })\n\n this.cache.set(key, newEntry)\n }\n\n static checkSinglePromise<U>(promise: Promise<U>, signal?: AbortSignal) {\n // check just this signal for having been aborted, and abort the\n // promise if it was, regardless of what happened with the cached\n // response\n function checkForSingleAbort() {\n if (signal?.aborted) {\n throw Object.assign(new Error('aborted'), { code: 'ERR_ABORTED' })\n }\n }\n\n return promise.then(\n result => {\n checkForSingleAbort()\n return result\n },\n error => {\n checkForSingleAbort()\n throw error\n },\n )\n }\n\n has(key: string): boolean {\n return this.cache.has(key)\n }\n\n /**\n * Callback for getting status of the pending async\n *\n * @callback statusCallback\n * @param {any} status, current status string or message object\n */\n\n /**\n * @param {any} key cache key to use for this request\n * @param {any} data data passed as the first argument to the fill callback\n * @param {AbortSignal} [signal] optional AbortSignal object that aborts the request\n * @param {statusCallback} a callback to get the current status of a pending async operation\n */\n get(\n key: string,\n data: T,\n signal?: AbortSignal,\n statusCallback?: Function,\n ): Promise<U> {\n if (!signal && data instanceof AbortSignal) {\n throw new TypeError(\n 'second get argument appears to be an AbortSignal, perhaps you meant to pass `null` for the fill data?',\n )\n }\n const cacheEntry = this.cache.get(key)\n\n if (cacheEntry) {\n if (cacheEntry.aborted && !cacheEntry.settled) {\n // if it's aborted but has not realized it yet, evict it and redispatch\n this.evict(key, cacheEntry)\n return this.get(key, data, signal, statusCallback)\n }\n\n if (cacheEntry.settled) {\n // too late to abort, just return it\n return cacheEntry.promise\n }\n\n // request is in-flight, add this signal to its list of signals,\n // or if there is no signal, the aborter will become non-abortable\n cacheEntry.aborter.addSignal(signal)\n cacheEntry.statusReporter.addCallback(statusCallback)\n\n return AbortablePromiseCache.checkSinglePromise(\n cacheEntry.promise,\n signal,\n )\n }\n\n // if we got here, it is not in the cache. fill.\n this.fill(key, data, signal, statusCallback)\n return AbortablePromiseCache.checkSinglePromise(\n //see https://www.typescriptlang.org/docs/handbook/2/everyday-types.html#non-null-assertion-operator-postfix-\n\n this.cache.get(key)!.promise,\n signal,\n )\n }\n\n /**\n * delete the given entry from the cache. if it exists and its fill request has\n * not yet settled, the fill will be signaled to abort.\n *\n * @param {any} key\n */\n delete(key: string) {\n const cachedEntry = this.cache.get(key)\n if (cachedEntry) {\n if (!cachedEntry.settled) {\n cachedEntry.aborter.abort()\n }\n this.cache.delete(key)\n }\n }\n\n /**\n * Clear all requests from the cache. Aborts any that have not settled.\n * @returns {number} count of entries deleted\n */\n clear() {\n // iterate without needing regenerator-runtime\n const keyIter = this.cache.keys()\n let deleteCount = 0\n for (let result = keyIter.next(); !result.done; result = keyIter.next()) {\n this.delete(result.value)\n deleteCount += 1\n }\n return deleteCount\n }\n}\n","export default class VirtualOffset {\n public blockPosition: number\n public dataPosition: number\n constructor(blockPosition: number, dataPosition: number) {\n this.blockPosition = blockPosition // < offset of the compressed data block\n this.dataPosition = dataPosition // < offset into the uncompressed data\n }\n\n toString() {\n return `${this.blockPosition}:${this.dataPosition}`\n }\n\n compareTo(b: VirtualOffset) {\n return (\n this.blockPosition - b.blockPosition || this.dataPosition - b.dataPosition\n )\n }\n\n static min(...args: VirtualOffset[]) {\n let min\n let i = 0\n for (; !min; i += 1) {\n min = args[i]\n }\n for (; i < args.length; i += 1) {\n if (min.compareTo(args[i]) > 0) {\n min = args[i]\n }\n }\n return min\n }\n}\nexport function fromBytes(bytes: Buffer, offset = 0, bigendian = false) {\n if (bigendian) {\n throw new Error('big-endian virtual file offsets not implemented')\n }\n\n return new VirtualOffset(\n bytes[offset + 7] * 0x10000000000 +\n bytes[offset + 6] * 0x100000000 +\n bytes[offset + 5] * 0x1000000 +\n bytes[offset + 4] * 0x10000 +\n bytes[offset + 3] * 0x100 +\n bytes[offset + 2],\n (bytes[offset + 1] << 8) | bytes[offset],\n )\n}\n","import VirtualOffset from './virtualOffset'\n\n// little class representing a chunk in the index\nexport default class Chunk {\n public buffer?: Buffer\n\n constructor(\n public minv: VirtualOffset,\n public maxv: VirtualOffset,\n public bin: number,\n public _fetchedSize?: number,\n ) {}\n\n toUniqueString() {\n return `${this.minv.toString()}..${this.maxv.toString()} (bin ${\n this.bin\n }, fetchedSize ${this.fetchedSize()})`\n }\n\n toString() {\n return this.toUniqueString()\n }\n\n compareTo(b: Chunk) {\n return (\n this.minv.compareTo(b.minv) ||\n this.maxv.compareTo(b.maxv) ||\n this.bin - b.bin\n )\n }\n\n fetchedSize() {\n if (this._fetchedSize !== undefined) {\n return this._fetchedSize\n }\n return this.maxv.blockPosition + (1 << 16) - this.minv.blockPosition\n }\n}\n","import Long from 'long'\nimport Chunk from './chunk'\nimport VirtualOffset from './virtualOffset'\n\nexport function timeout(ms: number) {\n return new Promise(resolve => setTimeout(resolve, ms))\n}\n\nexport function longToNumber(long: Long) {\n if (\n long.greaterThan(Number.MAX_SAFE_INTEGER) ||\n long.lessThan(Number.MIN_SAFE_INTEGER)\n ) {\n throw new Error('integer overflow')\n }\n return long.toNumber()\n}\n\n/**\n * Properly check if the given AbortSignal is aborted.\n * Per the standard, if the signal reads as aborted,\n * this function throws either a DOMException AbortError, or a regular error\n * with a `code` attribute set to `ERR_ABORTED`.\n *\n * For convenience, passing `undefined` is a no-op\n *\n * @param {AbortSignal} [signal] an AbortSignal, or anything with an `aborted` attribute\n * @returns nothing\n */\nexport function checkAbortSignal(signal?: AbortSignal) {\n if (!signal) {\n return\n }\n\n if (signal.aborted) {\n // console.log('bam aborted!')\n if (typeof DOMException === 'undefined') {\n const e = new Error('aborted')\n //@ts-ignore\n e.code = 'ERR_ABORTED'\n throw e\n } else {\n throw new DOMException('aborted', 'AbortError')\n }\n }\n}\n\n/**\n * Skips to the next tick, then runs `checkAbortSignal`.\n * Await this to inside an otherwise synchronous loop to\n * provide a place to break when an abort signal is received.\n * @param {AbortSignal} signal\n */\nexport async function abortBreakPoint(signal?: AbortSignal) {\n await Promise.resolve()\n checkAbortSignal(signal)\n}\n\nexport function canMergeBlocks(chunk1: Chunk, chunk2: Chunk) {\n return (\n chunk2.minv.blockPosition - chunk1.maxv.blockPosition < 65000 &&\n chunk2.maxv.blockPosition - chunk1.minv.blockPosition < 5000000\n )\n}\n\nexport interface BamOpts {\n viewAsPairs?: boolean\n pairAcrossChr?: boolean\n maxInsertSize?: number\n signal?: AbortSignal\n}\n\nexport interface BaseOpts {\n signal?: AbortSignal\n}\n\nexport function makeOpts(obj: AbortSignal | BaseOpts = {}): BaseOpts {\n return 'aborted' in obj ? ({ signal: obj } as BaseOpts) : obj\n}\n\nexport function optimizeChunks(chunks: Chunk[], lowest?: VirtualOffset) {\n const mergedChunks: Chunk[] = []\n let lastChunk: Chunk | undefined\n\n if (chunks.length === 0) {\n return chunks\n }\n\n chunks.sort((c0, c1) => {\n const dif = c0.minv.blockPosition - c1.minv.blockPosition\n return dif === 0 ? c0.minv.dataPosition - c1.minv.dataPosition : dif\n })\n\n for (const chunk of chunks) {\n if (!lowest || chunk.maxv.compareTo(lowest) > 0) {\n if (lastChunk === undefined) {\n mergedChunks.push(chunk)\n lastChunk = chunk\n } else {\n if (canMergeBlocks(lastChunk, chunk)) {\n if (chunk.maxv.compareTo(lastChunk.maxv) > 0) {\n lastChunk.maxv = chunk.maxv\n }\n } else {\n mergedChunks.push(chunk)\n lastChunk = chunk\n }\n }\n }\n }\n\n return mergedChunks\n}\n\nexport function parsePseudoBin(bytes: Buffer, offset: number) {\n const lineCount = longToNumber(\n Long.fromBytesLE(\n Array.prototype.slice.call(bytes, offset, offset + 8),\n true,\n ),\n )\n return { lineCount }\n}\n\nexport function findFirstData(\n firstDataLine: VirtualOffset | undefined,\n virtualOffset: VirtualOffset,\n) {\n return firstDataLine\n ? firstDataLine.compareTo(virtualOffset) > 0\n ? virtualOffset\n : firstDataLine\n : virtualOffset\n}\n\nexport function parseNameBytes(\n namesBytes: Buffer,\n renameRefSeq: (arg: string) => string = s => s,\n) {\n let currRefId = 0\n let currNameStart = 0\n const refIdToName = []\n const refNameToId: Record<string, number> = {}\n for (let i = 0; i < namesBytes.length; i += 1) {\n if (!namesBytes[i]) {\n if (currNameStart < i) {\n let refName = namesBytes.toString('utf8', currNameStart, i)\n refName = renameRefSeq(refName)\n refIdToName[currRefId] = refName\n refNameToId[refName] = currRefId\n }\n currNameStart = i + 1\n currRefId += 1\n }\n }\n return { refNameToId, refIdToName }\n}\n","import { GenericFilehandle } from 'generic-filehandle'\nimport Chunk from './chunk'\nimport { BaseOpts } from './util'\n\nexport default abstract class IndexFile {\n public filehandle: GenericFilehandle\n public renameRefSeq: (s: string) => string\n\n /**\n * @param {filehandle} filehandle\n * @param {function} [renameRefSeqs]\n */\n constructor({\n filehandle,\n renameRefSeq = (n: string) => n,\n }: {\n filehandle: GenericFilehandle\n renameRefSeq?: (a: string) => string\n }) {\n this.filehandle = filehandle\n this.renameRefSeq = renameRefSeq\n }\n public abstract lineCount(refId: number): Promise<number>\n public abstract indexCov(\n refId: number,\n start?: number,\n end?: number,\n ): Promise<{ start: number; end: number; score: number }[]>\n\n public abstract blocksForRange(\n chrId: number,\n start: number,\n end: number,\n opts?: BaseOpts,\n ): Promise<Chunk[]>\n}\n","import VirtualOffset, { fromBytes } from './virtualOffset'\nimport Chunk from './chunk'\n\nimport { optimizeChunks, parsePseudoBin, findFirstData, BaseOpts } from './util'\nimport IndexFile from './indexFile'\n\nconst BAI_MAGIC = 21578050 // BAI\\1\n\nfunction roundDown(n: number, multiple: number) {\n return n - (n % multiple)\n}\nfunction roundUp(n: number, multiple: number) {\n return n - (n % multiple) + multiple\n}\n\nfunction reg2bins(beg: number, end: number) {\n end -= 1\n return [\n [0, 0],\n [1 + (beg >> 26), 1 + (end >> 26)],\n [9 + (beg >> 23), 9 + (end >> 23)],\n [73 + (beg >> 20), 73 + (end >> 20)],\n [585 + (beg >> 17), 585 + (end >> 17)],\n [4681 + (beg >> 14), 4681 + (end >> 14)],\n ]\n}\n\nexport default class BAI extends IndexFile {\n public setupP?: ReturnType<BAI['_parse']>\n\n async lineCount(refId: number, opts?: BaseOpts) {\n const indexData = await this.parse(opts)\n return indexData.indices[refId]?.stats?.lineCount || 0\n }\n\n // fetch and parse the index\n async _parse(opts?: BaseOpts) {\n const bytes = (await this.filehandle.readFile(opts)) as Buffer\n\n // check BAI magic numbers\n if (bytes.readUInt32LE(0) !== BAI_MAGIC) {\n throw new Error('Not a BAI file')\n }\n\n const refCount = bytes.readInt32LE(4)\n const depth = 5\n const binLimit = ((1 << ((depth + 1) * 3)) - 1) / 7\n\n // read the indexes for each reference sequence\n let curr = 8\n let firstDataLine: VirtualOffset | undefined\n\n type BinIndex = Record<string, Chunk[]>\n type LinearIndex = VirtualOffset[]\n const indices = new Array<{\n binIndex: BinIndex\n linearIndex: LinearIndex\n stats?: { lineCount: number }\n }>(refCount)\n for (let i = 0; i < refCount; i++) {\n // the binning index\n const binCount = bytes.readInt32LE(curr)\n let stats\n\n curr += 4\n const binIndex: Record<number, Chunk[]> = {}\n\n for (let j = 0; j < binCount; j += 1) {\n const bin = bytes.readUInt32LE(curr)\n curr += 4\n if (bin === binLimit + 1) {\n curr += 4\n stats = parsePseudoBin(bytes, curr + 16)\n curr += 32\n } else if (bin > binLimit + 1) {\n throw new Error('bai index contains too many bins, please use CSI')\n } else {\n const chunkCount = bytes.readInt32LE(curr)\n curr += 4\n const chunks = new Array<Chunk>(chunkCount)\n for (let k = 0; k < chunkCount; k++) {\n const u = fromBytes(bytes, curr)\n curr += 8\n const v = fromBytes(bytes, curr)\n curr += 8\n firstDataLine = findFirstData(firstDataLine, u)\n chunks[k] = new Chunk(u, v, bin)\n }\n binIndex[bin] = chunks\n }\n }\n\n const linearCount = bytes.readInt32LE(curr)\n curr += 4\n // as we're going through the linear index, figure out the smallest\n // virtual offset in the indexes, which tells us where the BAM header\n // ends\n const linearIndex = new Array<VirtualOffset>(linearCount)\n for (let j = 0; j < linearCount; j++) {\n const offset = fromBytes(bytes, curr)\n curr += 8\n firstDataLine = findFirstData(firstDataLine, offset)\n linearIndex[j] = offset\n }\n\n indices[i] = { binIndex, linearIndex, stats }\n }\n\n return {\n bai: true,\n firstDataLine,\n maxBlockSize: 1 << 16,\n indices,\n refCount,\n }\n }\n\n async indexCov(\n seqId: number,\n start?: number,\n end?: number,\n opts: BaseOpts = {},\n ): Promise<{ start: number; end: number; score: number }[]> {\n const v = 16384\n const range = start !== undefined\n const indexData = await this.parse(opts)\n const seqIdx = indexData.indices[seqId]\n if (!seqIdx) {\n return []\n }\n const { linearIndex = [], stats } = seqIdx\n if (linearIndex.length === 0) {\n return []\n }\n const e = end === undefined ? (linearIndex.length - 1) * v : roundUp(end, v)\n const s = start === undefined ? 0 : roundDown(start, v)\n const depths = range\n ? new Array((e - s) / v)\n : new Array(linearIndex.length - 1)\n const totalSize = linearIndex[linearIndex.length - 1].blockPosition\n if (e > (linearIndex.length - 1) * v) {\n throw new Error('query outside of range of linear index')\n }\n let currentPos = linearIndex[s / v].blockPosition\n for (let i = s / v, j = 0; i < e / v; i++, j++) {\n depths[j] = {\n score: linearIndex[i + 1].blockPosition - currentPos,\n start: i * v,\n end: i * v + v,\n }\n currentPos = linearIndex[i + 1].blockPosition\n }\n return depths.map(d => ({\n ...d,\n score: (d.score * (stats?.lineCount || 0)) / totalSize,\n }))\n }\n\n async blocksForRange(\n refId: number,\n min: number,\n max: number,\n opts: BaseOpts = {},\n ) {\n if (min < 0) {\n min = 0\n }\n\n const indexData = await this.parse(opts)\n if (!indexData) {\n return []\n }\n const ba = indexData.indices[refId]\n if (!ba) {\n return []\n }\n\n // List of bin #s that overlap min, max\n const overlappingBins = reg2bins(min, max)\n const chunks: Chunk[] = []\n\n // Find chunks in overlapping bins. Leaf bins (< 4681) are not pruned\n for (const [start, end] of overlappingBins) {\n for (let bin = start; bin <= end; bin++) {\n if (ba.binIndex[bin]) {\n const binChunks = ba.binIndex[bin]\n for (const binChunk of binChunks) {\n chunks.push(binChunk)\n }\n }\n }\n }\n\n // Use the linear index to find minimum file position of chunks that could\n // contain alignments in the region\n const nintv = ba.linearIndex.length\n let lowest: VirtualOffset | undefined\n const minLin = Math.min(min >> 14, nintv - 1)\n const maxLin = Math.min(max >> 14, nintv - 1)\n for (let i = minLin; i <= maxLin; ++i) {\n const vp = ba.linearIndex[i]\n if (vp && (!lowest || vp.compareTo(lowest) < 0)) {\n lowest = vp\n }\n }\n\n return optimizeChunks(chunks, lowest)\n }\n\n async parse(opts: BaseOpts = {}) {\n if (!this.setupP) {\n this.setupP = this._parse(opts).catch(e => {\n this.setupP = undefined\n throw e\n })\n }\n return this.setupP\n }\n\n async hasRefSeq(seqId: number, opts: BaseOpts = {}) {\n const header = await this.parse(opts)\n return !!header.indices[seqId]?.binIndex\n }\n}\n","import { unzip } from '@gmod/bgzf-filehandle'\nimport VirtualOffset, { fromBytes } from './virtualOffset'\nimport Chunk from './chunk'\nimport {\n optimizeChunks,\n findFirstData,\n parsePseudoBin,\n parseNameBytes,\n BaseOpts,\n} from './util'\n\nimport IndexFile from './indexFile'\n\nconst CSI1_MAGIC = 21582659 // CSI\\1\nconst CSI2_MAGIC = 38359875 // CSI\\2\n\nfunction lshift(num: number, bits: number) {\n return num * 2 ** bits\n}\nfunction rshift(num: number, bits: number) {\n return Math.floor(num / 2 ** bits)\n}\n\nexport default class CSI extends IndexFile {\n private maxBinNumber = 0\n private depth = 0\n private minShift = 0\n\n public setupP?: ReturnType<CSI['_parse']>\n\n async lineCount(refId: number, opts?: BaseOpts) {\n const indexData = await this.parse(opts)\n return indexData.indices[refId]?.stats?.lineCount || 0\n }\n\n async indexCov() {\n return []\n }\n\n parseAuxData(bytes: Buffer, offset: number) {\n const formatFlags = bytes.readInt32LE(offset)\n const coordinateType =\n formatFlags & 0x10000 ? 'zero-based-half-open' : '1-based-closed'\n const format = (\n { 0: 'generic', 1: 'SAM', 2: 'VCF' } as Record<number, string>\n )[formatFlags & 0xf]\n if (!format) {\n throw new Error(`invalid Tabix preset format flags ${formatFlags}`)\n }\n const columnNumbers = {\n ref: bytes.readInt32LE(offset + 4),\n start: bytes.readInt32LE(offset + 8),\n end: bytes.readInt32LE(offset + 12),\n }\n const metaValue = bytes.readInt32LE(offset + 16)\n const metaChar = metaValue ? String.fromCharCode(metaValue) : ''\n const skipLines = bytes.readInt32LE(offset + 20)\n const nameSectionLength = bytes.readInt32LE(offset + 24)\n\n return {\n columnNumbers,\n coordinateType,\n metaValue,\n metaChar,\n skipLines,\n format,\n formatFlags,\n ...parseNameBytes(\n bytes.subarray(offset + 28, offset + 28 + nameSectionLength),\n this.renameRefSeq,\n ),\n }\n }\n\n // fetch and parse the index\n async _parse(opts: { signal?: AbortSignal }) {\n const buffer = await this.filehandle.readFile(opts)\n const bytes = await unzip(buffer)\n\n let csiVersion\n // check TBI magic numbers\n if (bytes.readUInt32LE(0) === CSI1_MAGIC) {\n csiVersion = 1\n } else if (bytes.readUInt32LE(0) === CSI2_MAGIC) {\n csiVersion = 2\n } else {\n throw new Error('Not a CSI file')\n // TODO: do we need to support big-endian CSI files?\n }\n\n this.minShift = bytes.readInt32LE(4)\n this.depth = bytes.readInt32LE(8)\n this.maxBinNumber = ((1 << ((this.depth + 1) * 3)) - 1) / 7\n const auxLength = bytes.readInt32LE(12)\n const aux = auxLength >= 30 ? this.parseAuxData(bytes, 16) : undefined\n const refCount = bytes.readInt32LE(16 + auxLength)\n\n type BinIndex = Record<string, Chunk[]>\n\n // read the indexes for each reference sequence\n let curr = 16 + auxLength + 4\n let firstDataLine: VirtualOffset | undefined\n const indices = new Array<{\n binIndex: BinIndex\n stats?: { lineCount: number }\n }>(refCount)\n for (let i = 0; i < refCount; i++) {\n // the binning index\n const binCount = bytes.readInt32LE(curr)\n curr += 4\n const binIndex: Record<string, Chunk[]> = {}\n let stats // < provided by parsing a pseudo-bin, if present\n for (let j = 0; j < binCount; j++) {\n const bin = bytes.readUInt32LE(curr)\n curr += 4\n if (bin > this.maxBinNumber) {\n stats = parsePseudoBin(bytes, curr + 28)\n curr += 28 + 16\n } else {\n firstDataLine = findFirstData(firstDataLine, fromBytes(bytes, curr))\n curr += 8\n const chunkCount = bytes.readInt32LE(curr)\n curr += 4\n const chunks = new Array<Chunk>(chunkCount)\n for (let k = 0; k < chunkCount; k += 1) {\n const u = fromBytes(bytes, curr)\n curr += 8\n const v = fromBytes(bytes, curr)\n curr += 8\n firstDataLine = findFirstData(firstDataLine, u)\n chunks[k] = new Chunk(u, v, bin)\n }\n binIndex[bin] = chunks\n }\n }\n\n indices[i] = { binIndex, stats }\n }\n\n return {\n csiVersion,\n firstDataLine,\n indices,\n refCount,\n csi: true,\n maxBlockSize: 1 << 16,\n ...aux,\n }\n }\n\n async blocksForRange(\n refId: number,\n min: number,\n max: number,\n opts: BaseOpts = {},\n ) {\n if (min < 0) {\n min = 0\n }\n\n const indexData = await this.parse(opts)\n const ba = indexData?.indices[refId]\n if (!ba) {\n return []\n }\n const overlappingBins = this.reg2bins(min, max)\n\n if (overlappingBins.length === 0) {\n return []\n }\n\n const chunks = []\n // Find chunks in overlapping bins. Leaf bins (< 4681) are not pruned\n for (const [start, end] of overlappingBins) {\n for (let bin = start; bin <= end; bin++) {\n if (ba.binIndex[bin]) {\n const binChunks = ba.binIndex[bin]\n for (const c of binChunks) {\n chunks.push(c)\n }\n }\n }\n }\n\n return optimizeChunks(chunks, new VirtualOffset(0, 0))\n }\n\n /**\n * calculate the list of bins that may overlap with region [beg,end)\n * (zero-based half-open)\n */\n reg2bins(beg: number, end: number) {\n beg -= 1 // < convert to 1-based closed\n if (beg < 1) {\n beg = 1\n }\n if (end > 2 ** 50) {\n end = 2 ** 34\n } // 17 GiB ought to be enough for anybody\n end -= 1\n let l = 0\n let t = 0\n let s = this.minShift + this.depth * 3\n const bins = []\n for (; l <= this.depth; s -= 3, t += lshift(1, l * 3), l += 1) {\n const b = t + rshift(beg, s)\n const e = t + rshift(end, s)\n if (e - b + bins.length > this.maxBinNumber) {\n throw new Error(\n `query ${beg}-${end} is too large for current binning scheme (shift ${this.minShift}, depth ${this.depth}), try a smaller query or a coarser index binning scheme`,\n )\n }\n bins.push([b, e])\n }\n return bins\n }\n\n async parse(opts: BaseOpts = {}) {\n if (!this.setupP) {\n this.setupP = this._parse(opts).catch(e => {\n this.setupP = undefined\n throw e\n })\n }\n return this.setupP\n }\n\n async hasRefSeq(seqId: number, opts: BaseOpts = {}) {\n const header = await this.parse(opts)\n return !!header.indices[seqId]?.binIndex\n }\n}\n","export default {\n // the read is paired in sequencing, no matter whether it is mapped in a pair\n BAM_FPAIRED: 1,\n // the read is mapped in a proper pair\n BAM_FPROPER_PAIR: 2,\n // the read itself is unmapped; conflictive with BAM_FPROPER_PAIR\n BAM_FUNMAP: 4,\n // the mate is unmapped\n BAM_FMUNMAP: 8,\n // the read is mapped to the reverse strand\n BAM_FREVERSE: 16,\n // the mate is mapped to the reverse strand\n BAM_FMREVERSE: 32,\n // this is read1\n BAM_FREAD1: 64,\n // this is read2\n BAM_FREAD2: 128,\n // not primary alignment\n BAM_FSECONDARY: 256,\n // QC failure\n BAM_FQCFAIL: 512,\n // optical or PCR duplicate\n BAM_FDUP: 1024,\n // supplementary alignment\n BAM_FSUPPLEMENTARY: 2048,\n}\n","import Constants from './constants'\n\nconst SEQRET_DECODER = '=ACMGRSVTWYHKDBN'.split('')\nconst CIGAR_DECODER = 'MIDNSHP=X???????'.split('')\n\n/**\n * Class of each BAM record returned by this API.\n */\nexport default class BamRecord {\n private data = {} as Record<string, any>\n private bytes: { start: number; end: number; byteArray: Buffer }\n private _id: number\n private _tagOffset: number | undefined\n private _tagList: string[] = []\n private _allTagsParsed = false\n\n public flags: any\n public _refID: number\n constructor(args: any) {\n const { bytes, fileOffset } = args\n const { byteArray, start } = bytes\n this.data = { start: byteArray.readInt32LE(start + 8) }\n this.bytes = bytes\n this._id = fileOffset\n this._refID = byteArray.readInt32LE(start + 4)\n this.flags = (byteArray.readInt32LE(start + 16) & 0xffff0000) >> 16\n }\n\n get(field: string) {\n //@ts-ignore\n if (this[field]) {\n //@ts-ignore\n if (this.data[field]) {\n return this.data[field]\n }\n //@ts-ignore\n this.data[field] = this[field]()\n return this.data[field]\n }\n return this._get(field.toLowerCase())\n }\n\n end() {\n return this.get('start') + this.get('length_on_ref')\n }\n\n seq_id() {\n return this._refID\n }\n\n // same as get(), except requires lower-case arguments. used\n // internally to save lots of calls to field.toLowerCase()\n _get(field: string) {\n if (field in this.data) {\n return this.data[field]\n }\n this.data[field] = this._parseTag(field)\n return this.data[field]\n }\n\n _tags() {\n this._parseAllTags()\n\n let tags = ['seq']\n\n if (!this.isSegmentUnmapped()) {\n tags.push(\n 'start',\n 'end',\n 'strand',\n 'score',\n 'qual',\n 'MQ',\n 'CIGAR',\n 'length_on_ref',\n 'template_length',\n )\n }\n if (this.isPaired()) {\n tags.push('next_segment_position', 'pair_orientation')\n }\n tags = tags.concat(this._tagList || [])\n\n for (const k of Object.keys(this.data)) {\n if (!k.startsWith('_') && k !== 'next_seq_id') {\n tags.push(k)\n }\n }\n\n const seen: Record<string, boolean> = {}\n return tags.filter(t => {\n if (\n (t in this.data && this.data[t] === undefined) ||\n t === 'CG' ||\n t === 'cg'\n ) {\n return false\n }\n\n const lt = t.toLowerCase()\n const s = seen[lt]\n seen[lt] = true\n return !s\n })\n }\n\n parent() {\n return\n }\n\n children() {\n return this.get('subfeatures')\n }\n\n id() {\n return this._id\n }\n\n // special parsers\n /**\n * Mapping quality score.\n */\n mq() {\n const mq = (this.get('_bin_mq_nl') & 0xff00) >> 8\n return mq === 255 ? undefined : mq\n }\n\n score() {\n return this.get('mq')\n }\n\n qual() {\n return this.qualRaw()?.join(' ')\n }\n\n qualRaw() {\n if (this.isSegmentUnmapped()) {\n return\n }\n\n const { start, byteArray } = this.bytes\n const p =\n start +\n 36 +\n this.get('_l_read_name') +\n this.get('_n_cigar_op') * 4 +\n this.get('_seq_bytes')\n const lseq = this.get('seq_length')\n return byteArray.subarray(p, p + lseq)\n }\n\n strand() {\n return this.isReverseComplemented() ? -1 : 1\n }\n\n multi_segment_next_segment_strand() {\n if (this.isMateUnmapped()) {\n return\n }\n return this.isMateReverseComplemented() ? -1 : 1\n }\n\n name() {\n return this.get('_read_name')\n }\n\n _read_name() {\n const nl = this.get('_l_read_name')\n const { byteArray, start } = this.bytes\n return byteArray.toString('ascii', start + 36, start + 36 + nl - 1)\n }\n\n /**\n * Get the value of a tag, parsing the tags as far as necessary.\n * Only called if we have not already parsed that field.\n */\n _parseTag(tagName?: string) {\n // if all of the tags have been parsed and we're still being\n // called, we already know that we have no such tag, because\n // it would already have been cached.\n if (this._allTagsParsed) {\n return\n }\n\n const { byteArray, start } = this.bytes\n let p =\n this._tagOffset ||\n start +\n 36 +\n this.get('_l_read_name') +\n this.get('_n_cigar_op') * 4 +\n this.get('_seq_bytes') +\n this.get('seq_length')\n\n const blockEnd = this.bytes.end\n let lcTag\n while (p < blockEnd && lcTag !== tagName) {\n const tag = String.fromCharCode(byteArray[p], byteArray[p + 1])\n lcTag = tag.toLowerCase()\n const type = String.fromCharCode(byteArray[p + 2])\n p += 3\n\n let value\n switch (type) {\n case 'A': {\n value = String.fromCharCode(byteArray[p])\n p += 1\n break\n }\n case 'i': {\n value = byteArray.readInt32LE(p)\n p += 4\n break\n }\n case 'I': {\n value = byteArray.readUInt32LE(p)\n p += 4\n break\n }\n case 'c': {\n value = byteArray.readInt8(p)\n p += 1\n break\n }\n case 'C': {\n value = byteArray.readUInt8(p)\n p += 1\n break\n }\n case 's': {\n value = byteArray.readInt16LE(p)\n p += 2\n break\n }\n case 'S': {\n value = byteArray.readUInt16LE(p)\n p += 2\n break\n }\n case 'f': {\n value = byteArray.readFloatLE(p)\n p += 4\n break\n }\n case 'Z':\n case 'H': {\n value = ''\n while (p <= blockEnd) {\n const cc = byteArray[p++]\n if (cc === 0) {\n break\n } else {\n value += String.fromCharCode(cc)\n }\n }\n break\n }\n case 'B': {\n value = ''\n const cc = byteArray[p++]\n const Btype = String.fromCharCode(cc)\n const limit = byteArray.readInt32LE(p)\n p += 4\n if (Btype === 'i') {\n if (tag === 'CG') {\n for (let k = 0; k < limit; k++) {\n const cigop = byteArray.readInt32LE(p)\n const lop = cigop >> 4\n const op = CIGAR_DECODER[cigop & 0xf]\n value += lop + op\n p += 4\n }\n } else {\n for (let k = 0; k < limit; k++) {\n value += byteArray.readInt32LE(p)\n if (k + 1 < limit) {\n value += ','\n }\n p += 4\n }\n }\n }\n if (Btype === 'I') {\n if (tag === 'CG') {\n for (let k = 0; k < limit; k++) {\n const cigop = byteArray.readUInt32LE(p)\n const lop = cigop >> 4\n const op = CIGAR_DECODER[cigop & 0xf]\n value += lop + op\n p += 4\n }\n } else {\n for (let k = 0; k < limit; k++) {\n value += byteArray.readUInt32LE(p)\n if (k + 1 < limit) {\n value += ','\n }\n p += 4\n }\n }\n }\n if (Btype === 's') {\n for (let k = 0; k < limit; k++) {\n value += byteArray.readInt16LE(p)\n if (k + 1 < limit) {\n value += ','\n }\n p += 2\n }\n }\n if (Btype === 'S') {\n for (let k = 0; k < limit; k++) {\n value += byteArray.readUInt16LE(p)\n if (k + 1 < limit) {\n value += ','\n }\n p += 2\n }\n }\n if (Btype === 'c') {\n for (let k = 0; k < limit; k++) {\n value += byteArray.readInt8(p)\n if (k + 1 < limit) {\n value += ','\n }\n p += 1\n }\n }\n if (Btype === 'C') {\n for (let k = 0; k < limit; k++) {\n value += byteArray.readUInt8(p)\n if (k + 1 < limit) {\n value += ','\n }\n p += 1\n }\n }\n if (Btype === 'f') {\n for (let k = 0; k < limit; k++) {\n value += byteArray.readFloatLE(p)\n if (k + 1 < limit) {\n value += ','\n }\n p += 4\n }\n }\n break\n }\n default: {\n console.warn(`Unknown BAM tag type '${type}', tags may be incomplete`)\n value = undefined\n p = blockEnd\n } // stop parsing tags\n }\n\n this._tagOffset = p\n\n this._tagList.push(tag)\n if (lcTag === tagName) {\n return value\n }\n\n this.data[lcTag] = value\n }\n this._allTagsParsed = true\n return\n }\n\n _parseAllTags() {\n this._parseTag('')\n }\n\n _parseCigar(cigar: string) {\n return (\n //@ts-ignore\n cigar\n .match(/\\d+\\D/g)\n //@ts-ignore\n .map(op => [/\\D/.exec(op)[0].toUpperCase(), Number.parseInt(op, 10)])\n )\n }\n\n /**\n * @returns {boolean} true if the read is paired, regardless of whether both\n * segments are mapped\n */\n isPaired() {\n return !!(this.flags & Constants.BAM_FPAIRED)\n }\n\n /** @returns {boolean} true if the read is paired, and both segments are mapped */\n isProperlyPaired() {\n return !!(this.flags & Constants.BAM_FPROPER_PAIR)\n }\n\n /** @returns {boolean} true if the read itself is unmapped; conflictive with isProperlyPaired */\n isSegmentUnmapped() {\n return !!(this.flags & Constants.BAM_FUNMAP)\n }\n\n /** @returns {boolean} true if the read itself is unmapped; conflictive with isProperlyPaired */\n isMateUnmapped() {\n return !!(this.flags & Constants.BAM_FMUNMAP)\n }\n\n /** @returns {boolean} true if the read is mapped to the reverse strand */\n isReverseComplemented() {\n return !!(this.flags & Constants.BAM_FREVERSE)\n }\n\n /** @returns {boolean} true if the mate is mapped to the reverse strand */\n isMateReverseComplemented() {\n return !!(this.flags & Constants.BAM_FMREVERSE)\n }\n\n /** @returns {boolean} true if this is read number 1 in a pair */\n isRead1() {\n return !!(this.flags & Constants.BAM_FREAD1)\n }\n\n /** @returns {boolean} true if this is read number 2 in a pair */\n isRead2() {\n return !!(this.flags & Constants.BAM_FREAD2)\n }\n\n /** @returns {boolean} true if this is a secondary alignment */\n isSecondary() {\n return !!(this.flags & Constants.BAM_FSECONDARY)\n }\n\n /** @returns {boolean} true if this read has failed QC checks */\n isFailedQc() {\n return !!(this.flags & Constants.BAM_FQCFAIL)\n }\n\n /** @returns {boolean} true if the read is an optical or PCR duplicate */\n isDuplicate() {\n return !!(this.flags & Constants.BAM_FDUP)\n }\n\n /** @returns {boolean} true if this is a supplementary alignment */\n isSupplementary() {\n return !!(this.flags & Constants.BAM_FSUPPLEMENTARY)\n }\n\n cigar() {\n if (this.isSegmentUnmapped()) {\n return\n }\n\n const { byteArray, start } = this.bytes\n const numCigarOps = this.get('_n_cigar_op')\n let p = start + 36 + this.get('_l_read_name')\n const seqLen = this.get('seq_length')\n let cigar = ''\n let lref = 0\n\n // check for CG tag by inspecting whether the CIGAR field\n // contains a clip that consumes entire seqLen\n let cigop = byteArray.readInt32LE(p)\n let lop = cigop >> 4\n let op = CIGAR_DECODER[cigop & 0xf]\n if (op === 'S' && lop === seqLen) {\n // if there is a CG the second CIGAR field will\n // be a N tag the represents the length on ref\n p += 4\n cigop = byteArray.readInt32LE(p)\n lop = cigop >> 4\n op = CIGAR_DECODER[cigop & 0xf]\n if (op !== 'N') {\n console.warn('CG tag with no N tag')\n }\n this.data.length_on_ref = lop\n return this.get('CG')\n } else {\n for (let c = 0; c < numCigarOps; ++c) {\n cigop = byteArray.readInt32LE(p)\n lop = cigop >> 4\n op = CIGAR_DECODER[cigop & 0xf]\n cigar += lop + op\n\n // soft clip, hard clip, and insertion don't count toward\n // the length on the reference\n if (op !== 'H' && op !== 'S' && op !== 'I') {\n lref += lop\n }\n\n p += 4\n }\n\n this.data.length_on_ref = lref\n return cigar\n }\n }\n\n length_on_ref() {\n if (this.data.length_on_ref) {\n return this.data.length_on_ref\n } else {\n this.get('cigar') // the length_on_ref is set as a side effect\n return this.data.length_on_ref\n }\n }\n\n _n_cigar_op() {\n return this.get('_flag_nc') & 0xffff\n }\n\n _l_read_name() {\n return this.get('_bin_mq_nl') & 0xff\n }\n\n /**\n * number of bytes in the sequence field\n */\n _seq_bytes() {\n return (this.get('seq_length') + 1) >> 1\n }\n\n getReadBases() {\n return this.seq()\n }\n\n seq() {\n const { byteArray, start } = this.bytes\n const p =\n start + 36 + this.get('_l_read_name') + this.get('_n_cigar_op') * 4\n const seqBytes = this.get('_seq_bytes')\n const len = this.get('seq_length')\n let buf = ''\n let i = 0\n for (let j = 0; j < seqBytes; ++j) {\n const sb = byteArray[p + j]\n buf += SEQRET_DECODER[(sb & 0xf0) >> 4]\n i++\n if (i < len) {\n buf += SEQRET_DECODER[sb & 0x0f]\n i++\n }\n }\n return buf\n }\n\n // adapted from igv.js\n getPairOrientation() {\n if (\n !this.isSegmentUnmapped() &&\n !this.isMateUnmapped() &&\n this._refID === this._next_refid()\n ) {\n const s1 = this.isReverseComplemented() ? 'R' : 'F'\n const s2 = this.isMateReverseComplemented() ? 'R' : 'F'\n let o1 = ' '\n let o2 = ' '\n if (this.isRead1()) {\n o1 = '1'\n o2 = '2'\n } else if (this.isRead2()) {\n o1 = '2'\n o2 = '1'\n }\n\n const tmp = []\n const isize = this.template_length()\n if (isize > 0) {\n tmp[0] = s1\n tmp[1] = o1\n tmp[2] = s2\n tmp[3] = o2\n } else {\n tmp[2] = s1\n tmp[3] = o1\n tmp[0] = s2\n tmp[1] = o2\n }\n return tmp.join('')\n }\n return ''\n }\n\n _bin_mq_nl() {\n return this.bytes.byteArray.readInt32LE(this.bytes.start + 12)\n }\n\n _flag_nc() {\n return this.bytes.byteArray.readInt32LE(this.bytes.start + 16)\n }\n\n seq_length() {\n return this.bytes.byteArray.readInt32LE(this.bytes.start + 20)\n }\n\n _next_refid() {\n return this.bytes.byteArray.readInt32LE(this.bytes.start + 24)\n }\n\n _next_pos() {\n return this.bytes.byteArray.readInt32LE(this.bytes.start + 28)\n }\n\n template_length() {\n return this.bytes.byteArray.readInt32LE(this.bytes.start + 32)\n }\n\n toJSON() {\n const data: Record<string, any> = {}\n for (const k of Object.keys(this)) {\n if (k.startsWith('_') || k === 'bytes') {\n continue\n }\n //@ts-ignore\n data[k] = this[k]\n }\n\n return data\n }\n}\n","export function parseHeaderText(text: string) {\n const lines = text.split(/\\r?\\n/)\n const data: { tag: string; data: { tag: string; value: string }[] }[] = []\n for (const line of lines) {\n const [tag, ...fields] = line.split(/\\t/)\n if (tag) {\n data.push({\n tag: tag.slice(1),\n data: fields.map(f => {\n const r = f.indexOf(':')\n const fieldTag = f.slice(0, r)\n const value = f.slice(r + 1)\n return { tag: fieldTag, value }\n }),\n })\n }\n }\n return data\n}\n","import { Buffer } from 'buffer'\nimport crc32 from 'crc/crc32'\nimport { unzip, unzipChunkSlice } from '@gmod/bgzf-filehandle'\nimport { LocalFile, RemoteFile, GenericFilehandle } from 'generic-filehandle'\nimport AbortablePromiseCache from '@gmod/abortable-promise-cache'\nimport QuickLRU from 'quick-lru'\n\n// locals\nimport BAI from './bai'\nimport CSI from './csi'\nimport Chunk from './chunk'\nimport BAMFeature from './record'\nimport { parseHeaderText } from './sam'\nimport { checkAbortSignal, timeout, makeOpts, BamOpts, BaseOpts } from './util'\n\nexport const BAM_MAGIC = 21840194\n\nconst blockLen = 1 << 16\n\nasync function gen2array<T>(gen: AsyncIterable<T[]>): Promise<T[]> {\n let out: T[] = []\n for await (const x of gen) {\n out = out.concat(x)\n }\n return out\n}\n\ninterface Args {\n chunk: Chunk\n opts: BaseOpts\n}\n\nclass NullFilehandle {\n public read(): Promise<any> {\n throw new Error('never called')\n }\n public stat(): Promise<any> {\n throw new Error('never called')\n }\n\n public readFile(): Promise<any> {\n throw new Error('never called')\n }\n\n public close(): Promise<any> {\n throw new Error('never called')\n }\n}\nexport default class BamFile {\n public renameRefSeq: (a: string) => string\n public bam: GenericFilehandle\n public header?: string\n public chrToIndex?: Record<string, number>\n public indexToChr?: { refName: string; length: number }[]\n public yieldThreadTime: number\n public index?: BAI | CSI\n public htsget = false\n public headerP?: ReturnType<BamFile['getHeaderPre']>\n\n private featureCache = new AbortablePromiseCache<Args, BAMFeature[]>({\n cache: new QuickLRU({\n maxSize: 50,\n }),\n fill: async (args: Args, signal) => {\n const { chunk, opts } = args\n const { data, cpositions, dpositions } = await this._readChunk({\n chunk,\n opts: { ...opts, signal },\n })\n return this.readBamFeatures(data, cpositions, dpositions, chunk)\n },\n })\n\n constructor({\n bamFilehandle,\n bamPath,\n bamUrl,\n baiPath,\n baiFilehandle,\n baiUrl,\n csiPath,\n csiFilehandle,\n csiUrl,\n htsget,\n yieldThreadTime = 100,\n renameRefSeqs = n => n,\n }: {\n bamFilehandle?: GenericFilehandle\n bamPath?: string\n bamUrl?: string\n baiPath?: string\n baiFilehandle?: GenericFilehandle\n baiUrl?: string\n csiPath?: string\n csiFilehandle?: GenericFilehandle\n csiUrl?: string\n renameRefSeqs?: (a: string) => string\n yieldThreadTime?: number\n htsget?: boolean\n }) {\n this.renameRefSeq = renameRefSeqs\n\n if (bamFilehandle) {\n this.bam = bamFilehandle\n } else if (bamPath) {\n this.bam = new LocalFile(bamPath)\n } else if (bamUrl) {\n this.bam = new RemoteFile(bamUrl)\n } else if (htsget) {\n this.htsget = true\n this.bam = new NullFilehandle()\n } else {\n throw new Error('unable to initialize bam')\n }\n if (csiFilehandle) {\n this.index = new CSI({ filehandle: csiFilehandle })\n } else if (csiPath) {\n this.index = new CSI({ filehandle: new LocalFile(csiPath) })\n } else if (csiUrl) {\n this.index = new CSI({ filehandle: new RemoteFile(csiUrl) })\n } else if (baiFilehandle) {\n this.index = new BAI({ filehandle: baiFilehandle })\n } else if (baiPath) {\n this.index = new BAI({ filehandle: new LocalFile(baiPath) })\n } else if (baiUrl) {\n this.index = new BAI({ filehandle: new RemoteFile(baiUrl) })\n } else if (bamPath) {\n this.index = new BAI({ filehandle: new LocalFile(`${bamPath}.bai`) })\n } else if (bamUrl) {\n this.index = new BAI({ filehandle: new RemoteFile(`${bamUrl}.bai`) })\n } else if (htsget) {\n this.htsget = true\n } else {\n throw new Error('unable to infer index format')\n }\n this.yieldThreadTime = yieldThreadTime\n }\n\n async getHeaderPre(origOpts?: BaseOpts) {\n const opts = makeOpts(origOpts)\n if (!this.index) {\n return\n }\n const indexData = await this.index.parse(opts)\n const ret = indexData.firstDataLine\n ? indexData.firstDataLine.blockPosition + 65535\n : undefined\n let buffer\n if (ret) {\n const s = ret + blockLen\n const res = await this.bam.read(Buffer.alloc(s), 0, s, 0, opts)\n if (!res.bytesRead) {\n throw new Error('Error reading header')\n }\n buffer = res.buffer.subarray(0, Math.min(res.bytesRead, ret))\n } else {\n buffer = await this.bam.readFile(opts)\n }\n\n const uncba = await unzip(buffer)\n\n if (uncba.readInt32LE(0) !== BAM_MAGIC) {\n throw new Error('Not a BAM file')\n }\n const headLen = uncba.readInt32LE(4)\n\n this.header = uncba.toString('utf8', 8, 8 + headLen)\n const { chrToIndex, indexToChr } = await this._readRefSeqs(\n headLen + 8,\n 65535,\n opts,\n )\n this.chrToIndex = chrToIndex\n this.indexToChr = indexToChr\n\n return parseHeaderText(this.header)\n }\n\n getHeader(opts?: BaseOpts) {\n if (!this.headerP) {\n this.headerP = this.getHeaderPre(opts).catch(e => {\n this.headerP = undefined\n throw e\n })\n }\n return this.headerP\n }\n\n async getHeaderText(opts: BaseOpts = {}) {\n await this.getHeader(opts)\n return this.header\n }\n\n // the full length of the refseq block is not given in advance so this grabs\n // a chunk and doubles it if all refseqs haven't been processed\n async _readRefSeqs(\n start: number,\n refSeqBytes: number,\n opts?: BaseOpts,\n ): Promise<{\n chrToIndex: Record<string, number>\n indexToChr: { refName: string; length: number }[]\n }> {\n if (start > refSeqBytes) {\n return this._readRefSeqs(start, refSeqBytes * 2, opts)\n }\n const size = refSeqBytes + blockLen\n const { bytesRead, buffer } = await this.bam.read(\n Buffer.alloc(size),\n 0,\n refSeqBytes,\n 0,\n opts,\n )\n if (!bytesRead) {\n throw new Error('Error reading refseqs from header')\n }\n const uncba = await unzip(\n buffer.subarray(0, Math.min(bytesRead, refSeqBytes)),\n )\n const nRef = uncba.readInt32LE(start)\n let p = start + 4\n const chrToIndex: Record<string, number> = {}\n const indexToChr: { refName: string; length: number }[] = []\n for (let i = 0; i < nRef; i += 1) {\n const lName = uncba.readInt32LE(p)\n const refName = this.renameRefSeq(\n uncba.toString('utf8', p + 4, p + 4 + lName - 1),\n )\n const lRef = uncba.readInt32LE(p + lName + 4)\n\n chrToIndex[refName] = i\n indexToChr.push({ refName, length: lRef })\n\n p = p + 8 + lName\n if (p > uncba.length) {\n console.warn(\n `BAM header is very big. Re-fetching ${refSeqBytes} bytes.`,\n )\n return this._readRefSeqs(start, refSeqBytes * 2, opts)\n }\n }\n return { chrToIndex, indexToChr }\n }\n\n async getRecordsForRange(\n chr: string,\n min: number,\n max: number,\n opts?: BamOpts,\n ) {\n return gen2array(this.streamRecordsForRange(chr, min, max, opts))\n }\n\n async *streamRecordsForRange(\n chr: string,\n min: number,\n max: number,\n opts?: BamOpts,\n ) {\n await this.getHeader(opts)\n const chrId = this.chrToIndex?.[chr]\n if (chrId === undefined || !this.index) {\n yield []\n } else {\n const chunks = await this.index.blocksForRange(chrId, min - 1, max, opts)\n yield* this._fetchChunkFeatures(chunks, chrId, min, max, opts)\n }\n }\n\n async *_fetchChunkFeatures(\n chunks: Chunk[],\n chrId: number,\n min: number,\n max: number,\n opts: BamOpts = {},\n ) {\n const { viewAsPairs } = opts\n const feats = [] as BAMFeature[][]\n let done = false\n\n for (const chunk of chunks) {\n const records = await this.featureCache.get(\n chunk.toString(),\n { chunk, opts },\n opts.signal,\n )\n\n const recs = [] as BAMFeature[]\n for (const feature of records) {\n if (feature.seq_id() === chrId) {\n if (feature.get('start') >= max) {\n // past end of range, can stop iterating\n done = true\n break\n } else if (feature.get('end') >= min) {\n // must be in range\n recs.push(feature)\n }\n }\n }\n feats.push(recs)\n yield recs\n if (done) {\n break\n }\n }\n\n checkAbortSignal(opts.signal)\n if (viewAsPairs) {\n yield this.fetchPairs(chrId, feats, opts)\n }\n }\n\n async fetchPairs(chrId: number, feats: BAMFeature[][], opts: BamOpts) {\n const { pairAcrossChr, maxInsertSize = 200000 } = opts\n const unmatedPairs: Record<string, boolean> = {}\n const readIds: Record<string, number> = {}\n feats.map(ret => {\n const readNames: Record<string, number> = {}\n for (const element of ret) {\n const name = element.name()\n const id = element.id()\n if (!readNames[name]) {\n readNames[name] = 0\n }\n readNames[name]++\n readIds[id] = 1\n }\n for (const [k, v] of Object.entries(readNames)) {\n if (v === 1) {\n unmatedPairs[k] = true\n }\n }\n })\n\n const matePromises: Promise<Chunk[]>[] = []\n feats.map(ret => {\n for (const f of ret) {\n const name = f.name()\n const start = f.get('start')\n const pnext = f._next_pos()\n const rnext = f._next_refid()\n if (\n this.index &&\n unmatedPairs[name] &&\n (pairAcrossChr ||\n (rnext === chrId && Math.abs(start - pnext) < maxInsertSize))\n ) {\n matePromises.push(\n this.index.blocksForRange(rnext, pnext, pnext + 1, opts),\n )\n }\n }\n })\n\n // filter out duplicate chunks (the blocks are lists of chunks, blocks are\n // concatenated, then filter dup chunks)\n const map = new Map<string, Chunk>()\n const res = await Promise.all(matePromises)\n for (const m of res.flat()) {\n if (!map.has(m.toString())) {\n map.set(m.toString(), m)\n }\n }\n\n const mateFeatPromises = await Promise.all(\n [...map.values()].map(async c => {\n const { data, cpositions, dpositions, chunk } = await this._readChunk({\n chunk: c,\n opts,\n })\n const mateRecs = [] as BAMFeature[]\n for (const feature of await this.readBamFeatures(\n data,\n cpositions,\n dpositions,\n chunk,\n )) {\n if (unmatedPairs[feature.get('name')] && !readIds[feature.id()]) {\n mateRecs.push(feature)\n }\n }\n return mateRecs\n }),\n )\n return mateFeatPromises.flat()\n }\n\n async _readRegion(position: number, size: number, opts: BaseOpts = {}) {\n const { bytesRead, buffer } = await this.bam.read(\n Buffer.alloc(size),\n 0,\n size,\n position,\n opts,\n )\n\n return buffer.subarray(0, Math.min(bytesRead, size))\n }\n\n async _readChunk({ chunk, opts }: { chunk: Chunk; opts: BaseOpts }) {\n const buffer = await this._readRegion(\n chunk.minv.blockPosition,\n chunk.fetchedSize(),\n opts,\n )\n\n const {\n buffer: data,\n cpositions,\n dpositions,\n } = await unzipChunkSlice(buffer, chunk)\n return { data, cpositions, dpositions, chunk }\n }\n\n async readBamFeatures(\n ba: Buffer,\n cpositions: number[],\n dpositions: number[],\n chunk: Chunk,\n ) {\n let blockStart = 0\n const sink = [] as BAMFeature[]\n let pos = 0\n let last = +Date.now()\n\n while (blockStart + 4 < ba.length) {\n const blockSize = ba.readInt32LE(blockStart)\n const blockEnd = blockStart + 4 + blockSize - 1\n\n // increment position to the current decompressed status\n if (dpositions) {\n while (blockStart + chunk.minv.dataPosition >= dpositions[pos++]) {}\n pos--\n }\n\n // only try to read the feature if we have all the bytes for it\n if (blockEnd < ba.length) {\n const feature = new BAMFeature({\n bytes: {\n byteArray: ba,\n start: blockStart,\n end: blockEnd,\n },\n // the below results in an automatically calculated file-offset based\n // ID if the info for that is available, otherwise crc32 of the\n // features\n //\n // cpositions[pos] refers to actual file offset of a bgzip block\n // boundaries\n //\n // we multiply by (1 <<8) in order to make sure each block has a\n // \"unique\" address space so that data in that block could never\n // overlap\n //\n // then the blockStart-dpositions is an uncompressed file offset from\n // that bgzip block boundary, and since the cpositions are multiplied\n // by (1 << 8) these uncompressed offsets get a unique space\n //\n // this has an extra chunk.minv.dataPosition added on because it\n // blockStart starts at 0 instead of chunk.minv.dataPosition\n //\n // the +1 is just to avoid any possible uniqueId 0 but this does not\n // realistically happen\n fileOffset:\n cpositions.length > 0\n ? cpositions[pos] * (1 << 8) +\n (blockStart - dpositions[pos]) +\n chunk.minv.dataPosition +\n 1\n : // must be slice, not subarray for buffer polyfill on web\n crc32.signed(ba.slice(blockStart, blockEnd)),\n })\n\n sink.push(feature)\n if (this.yieldThreadTime && +Date.now() - last > this.yieldThreadTime) {\n await timeout(1)\n last = +Date.now()\n }\n }\n\n blockStart = blockEnd + 1\n }\n return sink\n }\n\n async hasRefSeq(seqName: string) {\n const seqId = this.chrToIndex?.[seqName]\n return seqId === undefined ? false : this.index?.hasRefSeq(seqId)\n }\n\n async lineCount(seqName: string) {\n const seqId = this.chrToIndex?.[seqName]\n return seqId === undefined || !this.index ? 0 : this.index.lineCount(seqId)\n }\n\n async indexCov(seqName: string, start?: number, end?: number) {\n if (!this.index) {\n return []\n }\n await this.index.parse()\n const seqId = this.chrToIndex?.[seqName]\n return seqId === undefined ? [] : this.index.indexCov(seqId, start, end)\n }\n\n async blocksForRange(\n seqName: string,\n start: number,\n end: number,\n opts?: BaseOpts,\n ) {\n if (!this.index) {\n return []\n }\n await this.index.parse()\n const seqId = this.chrToIndex?.[seqName]\n return seqId === undefined\n ? []\n : this.index.blocksForRange(seqId, start, end, opts)\n }\n}\n","import { unzip } from '@gmod/bgzf-filehandle'\nimport { Buffer } from 'buffer'\nimport { BaseOpts, BamOpts } from './util'\nimport BamFile, { BAM_MAGIC } from './bamFile'\nimport Chunk from './chunk'\nimport { parseHeaderText } from './sam'\n\ninterface HtsgetChunk {\n url: string\n headers?: Record<string, string>\n}\nasync function concat(arr: HtsgetChunk[], opts?: Record<string, any>) {\n const res = await Promise.all(\n arr.map(async chunk => {\n const { url, headers } = chunk\n if (url.startsWith('data:')) {\n return Buffer.from(url.split(',')[1], 'base64')\n } else {\n //remove referer header, it is not even allowed to be specified\n // @ts-expect-error\n\n const { referer, ...rest } = headers\n const res = await fetch(url, {\n ...opts,\n headers: { ...opts?.headers, ...rest },\n })\n if (!res.ok) {\n throw new Error(\n `HTTP ${res.status} fetching ${url}: ${await res.text()}`,\n )\n }\n return Buffer.from(await res.arrayBuffer())\n }\n }),\n )\n\n return Buffer.concat(await Promise.all(res.map(elt => unzip(elt))))\n}\n\nexport default class HtsgetFile extends BamFile {\n private baseUrl: string\n\n private trackId: string\n\n constructor(args: { trackId: string; baseUrl: string }) {\n super({ htsget: true })\n this.baseUrl = args.baseUrl\n this.trackId = args.trackId\n }\n\n async *streamRecordsForRange(\n chr: string,\n min: number,\n max: number,\n opts?: BamOpts,\n ) {\n const base = `${this.baseUrl}/${this.trackId}`\n const url = `${base}?referenceName=${chr}&start=${min}&end=${max}&format=BAM`\n const chrId = this.chrToIndex?.[chr]\n if (chrId === undefined) {\n yield []\n } else {\n const result = await fetch(url, { ...opts })\n if (!result.ok) {\n throw new Error(\n `HTTP ${result.status} fetching ${url}: ${await result.text()}`,\n )\n }\n const data = await result.json()\n const uncba = await concat(data.htsget.urls.slice(1), opts)\n\n yield* this._fetchChunkFeatures(\n [\n // fake stuff to pretend to be a Chunk\n {\n buffer: uncba,\n _fetchedSize: undefined,\n bin: 0,\n compareTo() {\n return 0\n },\n toUniqueString() {\n return `${chr}_${min}_${max}`\n },\n fetchedSize() {\n return 0\n },\n minv: {\n dataPosition: 0,\n blockPosition: 0,\n compareTo: () => 0,\n },\n maxv: {\n dataPosition: Number.MAX_SAFE_INTEGER,\n blockPosition: 0,\n compareTo: () => 0,\n },\n toString() {\n return `${chr}_${min}_${max}`\n },\n },\n ],\n chrId,\n min,\n max,\n opts,\n )\n }\n }\n\n async _readChunk({ chunk }: { chunk: Chunk; opts: BaseOpts }) {\n if (!chunk.buffer) {\n throw new Error('expected chunk.buffer in htsget')\n }\n return { data: chunk.buffer, cpositions: [], dpositions: [], chunk }\n }\n\n async getHeader(opts: BaseOpts = {}) {\n const url = `${this.baseUrl}/${this.trackId}?referenceName=na&class=header`\n const result = await fetch(url, opts)\n if (!result.ok) {\n throw new Error(\n `HTTP ${result.status} fetching ${url}: ${await result.text()}`,\n )\n }\n const data = await result.json()\n const uncba = await concat(data.htsget.urls, opts)\n\n if (uncba.readInt32LE(0) !== BAM_MAGIC) {\n throw new Error('Not a BAM file')\n }\n const headLen = uncba.readInt32LE(4)\n const headerText = uncba.toString('utf8', 8, 8 + headLen)\n const samHeader = parseHeaderText(headerText)\n\n // use the @SQ lines in the header to figure out the\n // mapping between ref ref ID numbers and names\n const idToName: { refName: string; length: number }[] = []\n const nameToId: Record<string, number> = {}\n const sqLines = samHeader.filter(l => l.tag === 'SQ')\n for (const [refId, sqLine] of sqLines.entries()) {\n let refName = ''\n let length = 0\n for (const item of sqLine.data) {\n if (item.tag === 'SN') {\n refName = item.value\n } else if (item.tag === 'LN') {\n length = +item.value\n }\n }\n nameToId[refName] = refId\n idToName[refId] = { refName, length }\n }\n this.chrToIndex = nameToId\n this.indexToChr = idToName\n return samHeader\n }\n}\n","import { Buffer } from 'buffer'\n//@ts-ignore\nimport { Z_SYNC_FLUSH, Inflate } from 'pako'\n\ninterface VirtualOffset {\n blockPosition: number\n dataPosition: number\n}\ninterface Chunk {\n minv: VirtualOffset\n maxv: VirtualOffset\n}\n\n// browserify-zlib, which is the zlib shim used by default in webpacked code,\n// does not properly uncompress bgzf chunks that contain more than\n// one bgzf block, so export an unzip function that uses pako directly\n// if we are running in a browser.\nasync function unzip(inputData: Buffer) {\n try {\n let strm\n let pos = 0\n let i = 0\n const chunks = []\n let totalSize = 0\n let inflator\n do {\n const remainingInput = inputData.subarray(pos)\n inflator = new Inflate()\n //@ts-ignore\n ;({ strm } = inflator)\n inflator.push(remainingInput, Z_SYNC_FLUSH)\n if (inflator.err) {\n throw new Error(inflator.msg)\n }\n\n pos += strm.next_in\n chunks[i] = inflator.result as Uint8Array\n totalSize += chunks[i].length\n i += 1\n } while (strm.avail_in)\n\n const result = new Uint8Array(totalSize)\n for (let i = 0, offset = 0; i < chunks.length; i++) {\n result.set(chunks[i], offset)\n offset += chunks[i].length\n }\n return Buffer.from(result)\n } catch (e) {\n //cleanup error message\n if (`${e}`.match(/incorrect header check/)) {\n throw new Error(\n 'problem decompressing block: incorrect gzip header check',\n )\n }\n throw e\n }\n}\n\n// similar to pakounzip, except it does extra counting\n// to return the positions of compressed and decompressed\n// data offsets\nasync function unzipChunk(inputData: Buffer) {\n try {\n let strm\n let cpos = 0\n let dpos = 0\n const blocks = []\n const cpositions = []\n const dpositions = []\n do {\n const remainingInput = inputData.slice(cpos)\n const inflator = new Inflate()\n // @ts-ignore\n ;({ strm } = inflator)\n inflator.push(remainingInput, Z_SYNC_FLUSH)\n if (inflator.err) {\n throw new Error(inflator.msg)\n }\n\n const buffer = Buffer.from(inflator.result)\n blocks.push(buffer)\n\n cpositions.push(cpos)\n dpositions.push(dpos)\n\n cpos += strm.next_in\n dpos += buffer.length\n } while (strm.avail_in)\n\n const buffer = Buffer.concat(blocks)\n return { buffer, cpositions, dpositions }\n } catch (e) {\n //cleanup error message\n if (`${e}`.match(/incorrect header check/)) {\n throw new Error(\n 'problem decompressing block: incorrect gzip header check',\n )\n }\n throw e\n }\n}\n\n// similar to unzipChunk above but slices (0,minv.dataPosition) and\n// (maxv.dataPosition,end) off\nasync function unzipChunkSlice(inputData: Buffer, chunk: Chunk) {\n try {\n let strm\n const { minv, maxv } = chunk\n let cpos = minv.blockPosition\n let dpos = minv.dataPosition\n const chunks = []\n const cpositions = []\n const dpositions = []\n\n let totalSize = 0\n let i = 0\n do {\n const remainingInput = inputData.subarray(cpos - minv.blockPosition)\n const inflator = new Inflate()\n // @ts-ignore\n ;({ strm } = inflator)\n inflator.push(remainingInput, Z_SYNC_FLUSH)\n if (inflator.err) {\n throw new Error(inflator.msg)\n }\n\n const buffer = inflator.result\n chunks.push(buffer as Uint8Array)\n let len = buffer.length\n\n cpositions.push(cpos)\n dpositions.push(dpos)\n if (chunks.length === 1 && minv.dataPosition) {\n // this is the first chunk, trim it\n chunks[0] = chunks[0].subarray(minv.dataPosition)\n len = chunks[0].length\n }\n const origCpos = cpos\n cpos += strm.next_in\n dpos += len\n\n if (origCpos >= maxv.blockPosition) {\n // this is the last chunk, trim it and stop decompressing\n // note if it is the same block is minv it subtracts that already\n // trimmed part of the slice length\n\n chunks[i] = chunks[i].subarray(\n 0,\n maxv.blockPosition === minv.blockPosition\n ? maxv.dataPosition - minv.dataPosition + 1\n : maxv.dataPosition + 1,\n )\n\n cpositions.push(cpos)\n dpositions.push(dpos)\n totalSize += chunks[i].length\n break\n }\n totalSize += chunks[i].length\n i++\n } while (strm.avail_in)\n\n const result = new Uint8Array(totalSize)\n for (let i = 0, offset = 0; i < chunks.length; i++) {\n result.set(chunks[i], offset)\n offset += chunks[i].length\n }\n const buffer = Buffer.from(result)\n\n return { buffer, cpositions, dpositions }\n } catch (e) {\n //cleanup error message\n if (`${e}`.match(/incorrect header check/)) {\n throw new Error(\n 'problem decompressing block: incorrect gzip header check',\n )\n }\n throw e\n }\n}\n\nfunction nodeUnzip() {\n throw new Error('nodeUnzip not implemented.')\n}\n\nexport { unzip, unzipChunk, unzipChunkSlice, unzip as pakoUnzip, nodeUnzip }\n","import Long from 'long'\nimport { Buffer } from 'buffer'\nimport { LocalFile, GenericFilehandle } from 'generic-filehandle'\n\n// const COMPRESSED_POSITION = 0\nconst UNCOMPRESSED_POSITION = 1\n\nexport default class GziIndex {\n filehandle: GenericFilehandle\n\n index?: any\n\n constructor({\n filehandle,\n path,\n }: {\n filehandle?: GenericFilehandle\n path?: string\n }) {\n if (filehandle) {\n this.filehandle = filehandle\n } else if (path) {\n this.filehandle = new LocalFile(path)\n } else {\n throw new TypeError('either filehandle or path must be defined')\n }\n }\n\n _readLongWithOverflow(buf: Buffer, offset = 0, unsigned = true) {\n //@ts-ignore\n const long = Long.fromBytesLE(buf.slice(offset, offset + 8), unsigned)\n if (\n long.greaterThan(Number.MAX_SAFE_INTEGER) ||\n long.lessThan(Number.MIN_SAFE_INTEGER)\n ) {\n throw new TypeError('integer overflow')\n }\n\n return long.toNumber()\n }\n\n _getIndex() {\n if (!this.index) {\n this.index = this._readIndex()\n }\n return this.index\n }\n\n async _readIndex() {\n let buf = Buffer.allocUnsafe(8)\n await this.filehandle.read(buf, 0, 8, 0)\n const numEntries = this._readLongWithOverflow(buf, 0, true)\n if (!numEntries) {\n return [[0, 0]]\n }\n\n const entries = new Array(numEntries + 1)\n entries[0] = [0, 0]\n\n // TODO rewrite this to make an index-index that stays in memory\n const bufSize = 8 * 2 * numEntries\n if (bufSize > Number.MAX_SAFE_INTEGER) {\n throw new TypeError('integer overflow')\n }\n buf = Buffer.allocUnsafe(bufSize)\n await this.filehandle.read(buf, 0, bufSize, 8)\n for (let entryNumber = 0; entryNumber < numEntries; entryNumber += 1) {\n const compressedPosition = this._readLongWithOverflow(\n buf,\n entryNumber * 16,\n )\n const uncompressedPosition = this._readLongWithOverflow(\n buf,\n entryNumber * 16 + 8,\n )\n entries[entryNumber + 1] = [compressedPosition, uncompressedPosition]\n }\n\n return entries\n }\n\n async getLastBlock() {\n const entries = await this._getIndex()\n if (!entries.length) {\n return undefined\n }\n return entries[entries.length - 1]\n }\n\n async getRelevantBlocksForRead(length: number, position: number) {\n const endPosition = position + length\n if (length === 0) {\n return []\n }\n const entries = await this._getIndex()\n const relevant = []\n\n // binary search to find the block that the\n // read starts in and extend forward from that\n const compare = (entry: any, nextEntry: any) => {\n const uncompressedPosition = entry[UNCOMPRESSED_POSITION]\n const nextUncompressedPosition = nextEntry\n ? nextEntry[UNCOMPRESSED_POSITION]\n : Infinity\n // block overlaps read start\n if (\n uncompressedPosition <= position &&\n nextUncompressedPosition > position\n ) {\n return 0\n // block is before read start\n }\n if (uncompressedPosition < position) {\n return -1\n }\n // block is after read start\n return 1\n }\n\n let lowerBound = 0\n let upperBound = entries.length - 1\n let searchPosition = Math.floor(entries.length / 2)\n\n let comparison = compare(\n entries[searchPosition],\n entries[searchPosition + 1],\n )\n while (comparison !== 0) {\n if (comparison > 0) {\n upperBound = searchPosition - 1\n } else if (comparison < 0) {\n lowerBound = searchPosition + 1\n }\n searchPosition = Math.ceil((upperBound - lowerBound) / 2) + lowerBound\n comparison = compare(entries[searchPosition], entries[searchPosition + 1])\n }\n\n // here's where we read forward\n relevant.push(entries[searchPosition])\n let i = searchPosition + 1\n for (; i < entries.length; i += 1) {\n relevant.push(entries[i])\n if (entries[i][UNCOMPRESSED_POSITION] >= endPosition) {\n break\n }\n }\n if (relevant[relevant.length - 1][UNCOMPRESSED_POSITION] < endPosition) {\n relevant.push([])\n }\n return relevant\n }\n}\n","import { Buffer } from 'buffer'\nimport { LocalFile, GenericFilehandle } from 'generic-filehandle'\n\n// locals\nimport { unzip } from './unzip'\nimport GziIndex from './gziIndex'\n\nexport default class BgzFilehandle {\n filehandle: GenericFilehandle\n gzi: GziIndex\n\n constructor({\n filehandle,\n path,\n gziFilehandle,\n gziPath,\n }: {\n filehandle?: GenericFilehandle\n path?: string\n gziFilehandle?: GenericFilehandle\n gziPath?: string\n }) {\n if (filehandle) {\n this.filehandle = filehandle\n } else if (path) {\n this.filehandle = new LocalFile(path)\n } else {\n throw new TypeError('either filehandle or path must be defined')\n }\n\n if (!gziFilehandle && !gziPath && !path) {\n throw new TypeError('either gziFilehandle or gziPath must be defined')\n }\n\n this.gzi = new GziIndex({\n filehandle: gziFilehandle,\n path: !gziFilehandle && !gziPath && path ? gziPath : `${path}.gzi`,\n })\n }\n\n async stat() {\n const compressedStat = await this.filehandle.stat()\n return Object.assign(compressedStat, {\n size: await this.getUncompressedFileSize(),\n blocks: undefined,\n blksize: undefined,\n })\n }\n\n async getUncompressedFileSize() {\n // read the last block's ISIZE (see gzip RFC),\n // and add it to its uncompressedPosition\n const [, uncompressedPosition] = await this.gzi.getLastBlock()\n\n const { size } = await this.filehandle.stat()\n\n const buf = Buffer.allocUnsafe(4)\n // note: there should be a 28-byte EOF marker (an empty block) at\n // the end of the file, so we skip backward past that\n const { bytesRead } = await this.filehandle.read(buf, 0, 4, size - 28 - 4)\n if (bytesRead !== 4) {\n throw new Error('read error')\n }\n const lastBlockUncompressedSize = buf.readUInt32LE(0)\n return uncompressedPosition + lastBlockUncompressedSize\n }\n\n async _readAndUncompressBlock(\n blockBuffer: Buffer,\n [compressedPosition]: [number],\n [nextCompressedPosition]: [number],\n ) {\n let next = nextCompressedPosition\n if (!next) {\n next = (await this.filehandle.stat()).size\n }\n\n // read the compressed data into the block buffer\n const blockCompressedLength = next - compressedPosition\n\n await this.filehandle.read(\n blockBuffer,\n 0,\n blockCompressedLength,\n compressedPosition,\n )\n\n // uncompress it\n const unzippedBuffer = await unzip(\n blockBuffer.slice(0, blockCompressedLength),\n )\n\n return unzippedBuffer as Buffer\n }\n\n async read(buf: Buffer, offset: number, length: number, position: number) {\n // get the block positions for this read\n const blockPositions = await this.gzi.getRelevantBlocksForRead(\n length,\n position,\n )\n const blockBuffer = Buffer.allocUnsafe(32768 * 2)\n // uncompress the blocks and read from them one at a time to keep memory usage down\n let destinationOffset = offset\n let bytesRead = 0\n for (\n let blockNum = 0;\n blockNum < blockPositions.length - 1;\n blockNum += 1\n ) {\n // eslint-disable-next-line no-await-in-loop\n const uncompressedBuffer = await this._readAndUncompressBlock(\n blockBuffer,\n blockPositions[blockNum],\n blockPositions[blockNum + 1],\n )\n const [, uncompressedPosition] = blockPositions[blockNum]\n const sourceOffset =\n uncompressedPosition >= position ? 0 : position - uncompressedPosition\n const sourceEnd =\n Math.min(\n position + length,\n uncompressedPosition + uncompressedBuffer.length,\n ) - uncompressedPosition\n if (sourceOffset >= 0 && sourceOffset < uncompressedBuffer.length) {\n uncompressedBuffer.copy(buf, destinationOffset, sourceOffset, sourceEnd)\n destinationOffset += sourceEnd - sourceOffset\n bytesRead += sourceEnd - sourceOffset\n }\n }\n\n return { bytesRead, buffer: buf }\n }\n}\n","// Generated by `./pycrc.py --algorithm=table-driven --model=crc-32 --generate=c`\nlet TABLE = [\n 0x00000000, 0x77073096, 0xee0e612c, 0x990951ba, 0x076dc419, 0x706af48f, 0xe963a535, 0x9e6495a3,\n 0x0edb8832, 0x79dcb8a4, 0xe0d5e91e, 0x97d2d988, 0x09b64c2b, 0x7eb17cbd, 0xe7b82d07, 0x90bf1d91,\n 0x1db71064, 0x6ab020f2, 0xf3b97148, 0x84be41de, 0x1adad47d, 0x6ddde4eb, 0xf4d4b551, 0x83d385c7,\n 0x136c9856, 0x646ba8c0, 0xfd62f97a, 0x8a65c9ec, 0x14015c4f, 0x63066cd9, 0xfa0f3d63, 0x8d080df5,\n 0x3b6e20c8, 0x4c69105e, 0xd56041e4, 0xa2677172, 0x3c03e4d1, 0x4b04d447, 0xd20d85fd, 0xa50ab56b,\n 0x35b5a8fa, 0x42b2986c, 0xdbbbc9d6, 0xacbcf940, 0x32d86ce3, 0x45df5c75, 0xdcd60dcf, 0xabd13d59,\n 0x26d930ac, 0x51de003a, 0xc8d75180, 0xbfd06116, 0x21b4f4b5, 0x56b3c423, 0xcfba9599, 0xb8bda50f,\n 0x2802b89e, 0x5f058808, 0xc60cd9b2, 0xb10be924, 0x2f6f7c87, 0x58684c11, 0xc1611dab, 0xb6662d3d,\n 0x76dc4190, 0x01db7106, 0x98d220bc, 0xefd5102a, 0x71b18589, 0x06b6b51f, 0x9fbfe4a5, 0xe8b8d433,\n 0x7807c9a2, 0x0f00f934, 0x9609a88e, 0xe10e9818, 0x7f6a0dbb, 0x086d3d2d, 0x91646c97, 0xe6635c01,\n 0x6b6b51f4, 0x1c6c6162, 0x856530d8, 0xf262004e, 0x6c0695ed, 0x1b01a57b, 0x8208f4c1, 0xf50fc457,\n 0x65b0d9c6, 0x12b7e950, 0x8bbeb8ea, 0xfcb9887c, 0x62dd1ddf, 0x15da2d49, 0x8cd37cf3, 0xfbd44c65,\n 0x4db26158, 0x3ab551ce, 0xa3bc0074, 0xd4bb30e2, 0x4adfa541, 0x3dd895d7, 0xa4d1c46d, 0xd3d6f4fb,\n 0x4369e96a, 0x346ed9fc, 0xad678846, 0xda60b8d0, 0x44042d73, 0x33031de5, 0xaa0a4c5f, 0xdd0d7cc9,\n 0x5005713c, 0x270241aa, 0xbe0b1010, 0xc90c2086, 0x5768b525, 0x206f85b3, 0xb966d409, 0xce61e49f,\n 0x5edef90e, 0x29d9c998, 0xb0d09822, 0xc7d7a8b4, 0x59b33d17, 0x2eb40d81, 0xb7bd5c3b, 0xc0ba6cad,\n 0xedb88320, 0x9abfb3b6, 0x03b6e20c, 0x74b1d29a, 0xead54739, 0x9dd277af, 0x04db2615, 0x73dc1683,\n 0xe3630b12, 0x94643b84, 0x0d6d6a3e, 0x7a6a5aa8, 0xe40ecf0b, 0x9309ff9d, 0x0a00ae27, 0x7d079eb1,\n 0xf00f9344, 0x8708a3d2, 0x1e01f268, 0x6906c2fe, 0xf762575d, 0x806567cb, 0x196c3671, 0x6e6b06e7,\n 0xfed41b76, 0x89d32be0, 0x10da7a5a, 0x67dd4acc, 0xf9b9df6f, 0x8ebeeff9, 0x17b7be43, 0x60b08ed5,\n 0xd6d6a3e8, 0xa1d1937e, 0x38d8c2c4, 0x4fdff252, 0xd1bb67f1, 0xa6bc5767, 0x3fb506dd, 0x48b2364b,\n 0xd80d2bda, 0xaf0a1b4c, 0x36034af6, 0x41047a60, 0xdf60efc3, 0xa867df55, 0x316e8eef, 0x4669be79,\n 0xcb61b38c, 0xbc66831a, 0x256fd2a0, 0x5268e236, 0xcc0c7795, 0xbb0b4703, 0x220216b9, 0x5505262f,\n 0xc5ba3bbe, 0xb2bd0b28, 0x2bb45a92, 0x5cb36a04, 0xc2d7ffa7, 0xb5d0cf31, 0x2cd99e8b, 0x5bdeae1d,\n 0x9b64c2b0, 0xec63f226, 0x756aa39c, 0x026d930a, 0x9c0906a9, 0xeb0e363f, 0x72076785, 0x05005713,\n 0x95bf4a82, 0xe2b87a14, 0x7bb12bae, 0x0cb61b38, 0x92d28e9b, 0xe5d5be0d, 0x7cdcefb7, 0x0bdbdf21,\n 0x86d3d2d4, 0xf1d4e242, 0x68ddb3f8, 0x1fda836e, 0x81be16cd, 0xf6b9265b, 0x6fb077e1, 0x18b74777,\n 0x88085ae6, 0xff0f6a70, 0x66063bca, 0x11010b5c, 0x8f659eff, 0xf862ae69, 0x616bffd3, 0x166ccf45,\n 0xa00ae278, 0xd70dd2ee, 0x4e048354, 0x3903b3c2, 0xa7672661, 0xd06016f7, 0x4969474d, 0x3e6e77db,\n 0xaed16a4a, 0xd9d65adc, 0x40df0b66, 0x37d83bf0, 0xa9bcae53, 0xdebb9ec5, 0x47b2cf7f, 0x30b5ffe9,\n 0xbdbdf21c, 0xcabac28a, 0x53b39330, 0x24b4a3a6, 0xbad03605, 0xcdd70693, 0x54de5729, 0x23d967bf,\n 0xb3667a2e, 0xc4614ab8, 0x5d681b02, 0x2a6f2b94, 0xb40bbe37, 0xc30c8ea1, 0x5a05df1b, 0x2d02ef8d,\n];\nif (typeof Int32Array !== 'undefined') {\n TABLE = new Int32Array(TABLE);\n}\nconst crc32 = (current, previous) => {\n // eslint-disable-next-line @typescript-eslint/no-non-null-assertion\n let crc = previous === 0 ? 0 : ~~previous ^ -1;\n for (let index = 0; index < current.length; index++) {\n crc = TABLE[(crc ^ current[index]) & 0xff] ^ (crc >>> 8);\n }\n return crc ^ -1;\n};\nexport default crc32;\n","/* eslint-disable @typescript-eslint/no-explicit-any */\n/* eslint-disable no-prototype-builtins */\nimport { Buffer } from 'buffer';\nconst createBuffer = (value, encoding) => Buffer.from(value, encoding);\nexport default createBuffer;\n","import crc32 from './calculators/crc32.js';\nimport defineCrc from './define_crc.js';\nexport default defineCrc('crc-32', crc32);\n","import createBuffer from './create_buffer.js';\nexport default function defineCrc(model, calculator) {\n const result = (value, previous) => calculator(createBuffer(value), previous) >>> 0;\n result.signed = (value, previous) => calculator(createBuffer(value), previous);\n result.unsigned = result;\n result.model = model;\n return result;\n}\n"],"names":["NullSignal","AggregateAbortController","signals","Set","abortController","AbortController","addSignal","signal","this","aborted","Error","add","handleAborted","addEventListener","delete","size","abort","AggregateStatusReporter","callbacks","addCallback","callback","currentMessage","message","elt","AbortablePromiseCache","constructor","fill","cache","TypeError","get","set","fillCallback","isAbortException","exception","name","code","evict","key","entry","data","statusCallback","aborter","statusReporter","newEntry","promise","settled","then","catch","error","console","checkSinglePromise","checkForSingleAbort","Object","assign","result","has","AbortSignal","cacheEntry","cachedEntry","clear","keyIter","keys","deleteCount","next","done","value","VirtualOffset","blockPosition","dataPosition","toString","compareTo","b","min","args","i","length","fromBytes","bytes","offset","bigendian","Chunk","minv","maxv","bin","_fetchedSize","toUniqueString","fetchedSize","undefined","timeout","ms","Promise","resolve","setTimeout","optimizeChunks","chunks","lowest","mergedChunks","lastChunk","sort","c0","c1","dif","chunk","push","chunk1","chunk2","parsePseudoBin","lineCount","long","greaterThan","Number","MAX_SAFE_INTEGER","lessThan","MIN_SAFE_INTEGER","toNumber","longToNumber","Array","prototype","slice","call","findFirstData","firstDataLine","virtualOffset","parseNameBytes","namesBytes","renameRefSeq","s","currRefId","currNameStart","refIdToName","refNameToId","refName","IndexFile","filehandle","n","BAI","refId","opts","parse","indices","stats","_parse","readFile","readUInt32LE","refCount","readInt32LE","curr","binCount","binIndex","j","binLimit","chunkCount","k","u","v","linearCount","linearIndex","bai","maxBlockSize","indexCov","seqId","start","end","range","seqIdx","e","roundDown","depths","totalSize","currentPos","score","map","d","blocksForRange","max","indexData","ba","overlappingBins","beg","binChunks","binChunk","nintv","minLin","Math","maxLin","vp","setupP","hasRefSeq","rshift","num","bits","floor","CSI","maxBinNumber","depth","minShift","parseAuxData","formatFlags","coordinateType","format","columnNumbers","ref","metaValue","metaChar","String","fromCharCode","skipLines","nameSectionLength","subarray","buffer","unzip","csiVersion","auxLength","aux","csi","reg2bins","c","l","t","bins","SEQRET_DECODER","split","CIGAR_DECODER","BamRecord","_tagList","_allTagsParsed","fileOffset","byteArray","_id","_refID","flags","field","_get","toLowerCase","seq_id","_parseTag","_tags","_parseAllTags","tags","isSegmentUnmapped","isPaired","concat","startsWith","seen","filter","lt","parent","children","id","mq","qual","qualRaw","join","p","lseq","strand","isReverseComplemented","multi_segment_next_segment_strand","isMateUnmapped","isMateReverseComplemented","_read_name","nl","tagName","_tagOffset","blockEnd","lcTag","tag","type","readInt8","readUInt8","readInt16LE","readUInt16LE","readFloatLE","cc","Btype","limit","cigop","warn","_parseCigar","cigar","match","op","exec","toUpperCase","parseInt","isProperlyPaired","isRead1","isRead2","isSecondary","isFailedQc","isDuplicate","isSupplementary","numCigarOps","seqLen","lref","lop","length_on_ref","_n_cigar_op","_l_read_name","_seq_bytes","getReadBases","seq","seqBytes","len","buf","sb","getPairOrientation","_next_refid","s1","s2","o1","o2","tmp","template_length","_bin_mq_nl","_flag_nc","seq_length","_next_pos","toJSON","parseHeaderText","text","lines","line","fields","f","r","indexOf","BAM_MAGIC","NullFilehandle","read","stat","close","BamFile","bamFilehandle","bamPath","bamUrl","baiPath","baiFilehandle","baiUrl","csiPath","csiFilehandle","csiUrl","htsget","yieldThreadTime","renameRefSeqs","featureCache","A","maxSize","async","cpositions","dpositions","_readChunk","readBamFeatures","bam","index","getHeaderPre","origOpts","obj","makeOpts","ret","res","alloc","bytesRead","uncba","headLen","header","chrToIndex","indexToChr","_readRefSeqs","getHeader","headerP","getHeaderText","refSeqBytes","nRef","lName","lRef","getRecordsForRange","chr","gen","out","x","gen2array","streamRecordsForRange","chrId","_fetchChunkFeatures","viewAsPairs","feats","records","recs","feature","DOMException","checkAbortSignal","fetchPairs","pairAcrossChr","maxInsertSize","unmatedPairs","readIds","readNames","element","entries","matePromises","pnext","rnext","abs","Map","all","m","flat","values","mateRecs","_readRegion","position","blockStart","sink","pos","last","Date","now","crc32","signed","seqName","arr","url","headers","from","referer","rest","fetch","ok","status","arrayBuffer","HtsgetFile","super","baseUrl","trackId","json","urls","samHeader","idToName","nameToId","sqLines","sqLine","item","inputData","strm","inflator","remainingInput","Inflate","Z_SYNC_FLUSH","err","msg","next_in","avail_in","Uint8Array","unzipChunkSlice","cpos","dpos","origCpos","GziIndex","path","_readLongWithOverflow","unsigned","_getIndex","_readIndex","allocUnsafe","numEntries","bufSize","entryNumber","compressedPosition","uncompressedPosition","getLastBlock","getRelevantBlocksForRead","endPosition","relevant","compare","nextEntry","nextUncompressedPosition","Infinity","lowerBound","upperBound","searchPosition","comparison","ceil","BgzFilehandle","gziFilehandle","gziPath","gzi","compressedStat","getUncompressedFileSize","blocks","blksize","_readAndUncompressBlock","blockBuffer","nextCompressedPosition","blockCompressedLength","blockPositions","destinationOffset","blockNum","uncompressedBuffer","sourceOffset","sourceEnd","copy","TABLE","Int32Array","encoding","model","calculator","previous","current","crc"],"sourceRoot":""}