# HG changeset patch # User richard-burhans # Date 1713377214 0 # Node ID 5c72425b7f1b6198f358e248167e148c52d72f3a planemo upload for repository https://github.com/richard-burhans/galaxytools/tree/main/tools/segalign commit 98a4dd44360447aa96d92143384d78e116d7581b diff -r 000000000000 -r 5c72425b7f1b diagonal_partition.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/diagonal_partition.py Wed Apr 17 18:06:54 2024 +0000 @@ -0,0 +1,207 @@ +#!/usr/bin/env python3 + +""" +Diagonal partitioning for segment files output by SegAlign. + +Usage: +diagonal_partition +""" + +import os +import sys + + +def chunks(lst, n): + """Yield successive n-sized chunks from list.""" + for i in range(0, len(lst), n): + yield lst[i: i + n] + + +if __name__ == "__main__": + + DELETE_AFTER_CHUNKING = True + + # input_params = "10000 sad sadsa sad --segments=tmp10.block5.r1239937044.plus.segments dsa sa --strand=plus --output=out.maf sadads 2> logging.err" + # sys.argv = [sys.argv[0]] + input_params.split(' ') + chunk_size = int(sys.argv[1]) # first parameter contains chunk size + params = sys.argv[2:] + + # Parsing command output from SegAlign + segment_key = "--segments=" + segment_index = None + input_file = None + + for index, value in enumerate(params): + if value[: len(segment_key)] == segment_key: + segment_index = index + input_file = value[len(segment_key):] + break + if segment_index is None: + print(f"Error: could not segment key {segment_key} in parameters {params}") + exit(0) + + if not os.path.isfile(input_file): + print(f"Error: File {input_file} does not exist") + exit(0) + + if ( + chunk_size == 0 or sum(1 for _ in open(input_file)) <= chunk_size + ): # no need to sort if number of lines <= chunk_size + print(" ".join(params), flush=True) + exit(0) + + # Find rest of relevant parameters + output_key = "--output=" + output_index = None + output_alignment_file = None + output_alignment_file_base = None + output_format = None + + strand_key = "--strand=" + strand_index = None + for index, value in enumerate(params): + if value[: len(output_key)] == output_key: + output_index = index + output_alignment_file = value[len(output_key):] + output_alignment_file_base, output_format = output_alignment_file.rsplit( + ".", 1 + ) + if value[: len(strand_key)] == strand_key: + strand_index = index + if segment_index is None: + print(f"Error: could not output key {output_key} in parameters {params}") + exit(0) + if strand_index is None: + print(f"Error: could not output key {strand_key} in parameters {params}") + exit(0) + + err_index = -1 # error file is at very end + err_name_base = params[-1].split(".err", 1)[0] + + data = {} # dict of list of tuple (x, y, str) + + direction = None + if "plus" in params[strand_index]: + direction = "f" + elif "minus" in params[strand_index]: + direction = "r" + else: + print( + f"Error: could not figure out direction from strand value {params[strand_index]}" + ) + exit(0) + + for line in open(input_file, "r"): + if line == "": + continue + ( + seq1_name, + seq1_start, + seq1_end, + seq2_name, + seq2_start, + seq2_end, + _dir, + score, + ) = line.split() + # data.append((int(seq1_start), int(seq2_start), line)) + half_dist = int((int(seq1_end) - int(seq1_start)) // 2) + assert int(seq1_end) > int(seq1_start) + assert int(seq2_end) > int(seq2_start) + seq1_mid = int(seq1_start) + half_dist + seq2_mid = int(seq2_start) + half_dist + data.setdefault((seq1_name, seq2_name), []).append((seq1_mid, seq2_mid, line)) + + # If there are chromosome pairs with segment count <= chunk_size + # then no need to sort and split these pairs into separate files. + # It is better to keep these pairs in a single segment file. + skip_pairs = [] # pairs that have count <= chunk_size. + # these will not be sorted + if len(data.keys()) > 1: + for pair in data.keys(): + if len(data[pair]) <= chunk_size: + skip_pairs.append(pair) + + # sorting for forward segments + if direction == "r": + for pair in data.keys(): + if pair not in skip_pairs: + data[pair] = sorted( + data[pair], key=lambda coord: (coord[1] - coord[0], coord[0]) + ) + # sorting for reverse segments + elif direction == "f": + for pair in data.keys(): + if pair not in skip_pairs: + data[pair] = sorted( + data[pair], key=lambda coord: (coord[1] + coord[0], coord[0]) + ) + else: + print(f"INVALID DIRECTION VALUE: {direction}") + exit(0) + + # Writing file in chunks + ctr = 0 + for pair in data.keys() - skip_pairs: + for chunk in chunks(list(zip(*data[pair]))[2], chunk_size): + ctr += 1 + name_addition = f".split{ctr}" + fname = input_file.split(".segments", 1)[0] + name_addition + ".segments" + + with open(fname, "w") as f: + f.writelines(chunk) + # update segment file in command + params[segment_index] = segment_key + fname + # update output file in command + params[output_index] = ( + output_key + + output_alignment_file_base + + name_addition + + "." + + output_format + ) + # update error file in command + params[-1] = err_name_base + name_addition + ".err" + print(" ".join(params), flush=True) + + # writing unsorted skipped pairs + if len(skip_pairs) > 0: + skip_pairs_with_len = sorted( + [(len(data[p]), p) for p in skip_pairs] + ) # list of tuples of (pair length, pair) + aggregated_skip_pairs = [] # list of list of pair names + current_count = 0 + aggregated_skip_pairs.append([]) + for count, pair in skip_pairs_with_len: + if current_count + count <= chunk_size: + current_count += count + aggregated_skip_pairs[-1].append(pair) + else: + aggregated_skip_pairs.append([]) + current_count = count + aggregated_skip_pairs[-1].append(pair) + + for aggregate in aggregated_skip_pairs: + ctr += 1 + name_addition = f".split{ctr}" + fname = input_file.split(".segments", 1)[0] + name_addition + ".segments" + with open(fname, "w") as f: + for pair in sorted(aggregate, key=lambda p: (p[1], p[0])): + chunk = list(zip(*data[pair]))[2] + f.writelines(chunk) + # update segment file in command + params[segment_index] = segment_key + fname + # update output file in command + params[output_index] = ( + output_key + + output_alignment_file_base + + name_addition + + "." + + output_format + ) + # update error file in command + params[-1] = err_name_base + name_addition + ".err" + print(" ".join(params), flush=True) + + if DELETE_AFTER_CHUNKING: + os.remove(input_file) diff -r 000000000000 -r 5c72425b7f1b gapped_extension_options.xml --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/gapped_extension_options.xml Wed Apr 17 18:06:54 2024 +0000 @@ -0,0 +1,10 @@ + + +
+ + + + +
+
+
diff -r 000000000000 -r 5c72425b7f1b lastz-cmd.ini --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/lastz-cmd.ini Wed Apr 17 18:06:54 2024 +0000 @@ -0,0 +1,94 @@ +[arguments] +flag_args = + allgappedbounds + anyornone + gapped + gfextend + markend + nocensus + noentropy + nofilter + nogapped + nogfextend + nomirror + norecoverseeds + notransition + notrivial + notwins + noytrim + recoverseeds + self + version + yasra75 + yasra85 + yasra85short + yasra90 + yasra95 + yasra95short + yasra98 + +str_args = + action:query + action:target + ambiguous + ball + chores + filter + format + gap + hspthresh + include + match + maxwordcount + mismatch + nochain + output + outputmasking + outputmasking+ + outputmasking+:soft + outputmasking:soft + querydepth + queryhsplimit + rdotplot + readgroup + scores + seed + segments + show + strand + targetcapsule + twins + writecapsule + writesegments + +int_args = + allocate:query + allocate:target + allocate:traceback + exact + gappedthresh + inner + masking + queryhspbest + step + word + xdrop + ydrop + +bool_str_args= + census + census16 + census32 + chain + entropy + help + infer + inferonly + infscores + tableonly + +bool_int_args= + progress + progress+masking + transition + diff -r 000000000000 -r 5c72425b7f1b macros.xml --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/macros.xml Wed Apr 17 18:06:54 2024 +0000 @@ -0,0 +1,22 @@ + + + + segalign-full + bashlex + + + + 0.1.2.1 + 0 + 21.05 + + + operation_0491 + + + + + 10.1109/SC41405.2020.00043 + + + diff -r 000000000000 -r 5c72425b7f1b output_options.xml --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/output_options.xml Wed Apr 17 18:06:54 2024 +0000 @@ -0,0 +1,8 @@ + + +
+ + +
+
+
diff -r 000000000000 -r 5c72425b7f1b package_output.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/package_output.py Wed Apr 17 18:06:54 2024 +0000 @@ -0,0 +1,286 @@ +#!/usr/bin/env python + +import argparse +import configparser +import json +import os +import sys +import tarfile +import typing + +import bashlex + + +class PackageFile: + def __init__( + self, + pathname: str = "data_package.tgz", + top_dir: str = "galaxy", + data_dir: str = "files", + config_file: str = "commands.json", + ) -> None: + self.pathname: str = os.path.realpath(pathname) + self.data_root: str = os.path.join(top_dir, data_dir) + self.config_path: str = os.path.join(top_dir, config_file) + self.config_file: str = config_file + self.tarfile: typing.Optional[tarfile.TarFile] = None + self.name_cache: typing.Dict[typing.Any, typing.Any] = {} + self.working_dir: str = os.path.realpath(os.getcwd()) + + def _initialize(self) -> None: + if self.tarfile is None: + self.tarfile = tarfile.open( + name=self.pathname, + mode="w:gz", + format=tarfile.GNU_FORMAT, + compresslevel=1, + ) + + def add_config(self, pathname: str) -> None: + if self.tarfile is None: + self._initialize() + + source_path = os.path.realpath(pathname) + + if self.tarfile is not None: + self.tarfile.add(source_path, arcname=self.config_path, recursive=False) + + def add_file(self, pathname: str, arcname: typing.Optional[str] = None) -> None: + if self.tarfile is None: + self._initialize() + + source_path = os.path.realpath(pathname) + + dest_path = None + + if arcname is None: + dest_path = os.path.join(self.data_root, os.path.basename(source_path)) + else: + arc_path = os.path.realpath(arcname) + rel_path = os.path.relpath(arc_path, self.working_dir) + if not (os.path.isabs(rel_path) or rel_path.startswith("../")): + dest_path = os.path.join(self.data_root, rel_path) + else: + sys.exit("path fail") + + if dest_path is not None: + if self.tarfile is not None: + if dest_path not in self.name_cache: + try: + self.tarfile.add( + source_path, arcname=dest_path, recursive=False + ) + except FileNotFoundError: + sys.exit(f"missing source file {source_path}") + + self.name_cache[dest_path] = 1 + # print(f"added: {dest_path}", flush=True) + + def close(self) -> None: + if self.tarfile is not None: + self.tarfile.close() + self.tarfile = None + + +class bashCommandLineFile: + def __init__( + self, + pathname: str, + config: configparser.ConfigParser, + package_file: PackageFile, + ) -> None: + self.pathname: str = pathname + self.config = config + self.package_file = package_file + self.executable: typing.Optional[str] = None + self._parse_lines() + + def _parse_lines(self) -> None: + with open("commands.json", "w") as ofh: + with open(self.pathname) as f: + line: str + for line in f: + line = line.rstrip("\n") + command_dict = self._parse_line(line) + # we may want to re-write args here + new_args_list = [] + + args_list = command_dict.get("args", []) + for arg in args_list: + if arg.startswith("--target="): + pathname = arg[9:] + new_args_list.append(arg) + if "[" in pathname: + elems = pathname.split("[") + sequence_file = elems.pop(0) + self.package_file.add_file(sequence_file, sequence_file) + for elem in elems: + if elem.endswith("]"): + elem = elem[:-1] + if elem.startswith("subset="): + subset_file = elem[7:] + self.package_file.add_file(subset_file) + + elif arg.startswith("--query="): + pathname = arg[8:] + new_args_list.append(arg) + if "[" in pathname: + elems = pathname.split("[") + sequence_file = elems.pop(0) + self.package_file.add_file(sequence_file, sequence_file) + for elem in elems: + if elem.endswith("]"): + elem = elem[:-1] + if elem.startswith("subset="): + subset_file = elem[7:] + self.package_file.add_file(subset_file) + elif arg.startswith("--segments="): + pathname = arg[11:] + new_args_list.append(arg) + self.package_file.add_file(pathname) + elif arg.startswith("--scores="): + pathname = arg[9:] + new_args_list.append("--scores=data/scores.txt") + self.package_file.add_file(pathname, "data/scores.txt") + else: + new_args_list.append(arg) + + command_dict["args"] = new_args_list + print(json.dumps(command_dict), file=ofh) + + self.package_file.add_config("commands.json") + + def _parse_line(self, line: str) -> typing.Dict[str, typing.Any]: + # resolve shell redirects + trees: typing.List[typing.Any] = bashlex.parse(line, strictmode=False) # type: ignore[attr-defined] + positions: typing.List[typing.Tuple[int, int]] = [] + + for tree in trees: + visitor = nodevisitor(positions) + visitor.visit(tree) + + # do replacements from the end so the indicies will be correct + positions.reverse() + + processed = list(line) + for start, end in positions: + processed[start:end] = "" + + processed_line: str = "".join(processed) + + command_dict = self._parse_processed_line(processed_line) + command_dict["stdin"] = visitor.stdin + command_dict["stdout"] = visitor.stdout + command_dict["stderr"] = visitor.stderr + + return command_dict + + def _parse_processed_line(self, line: str) -> typing.Dict[str, typing.Any]: + argv: typing.List[str] = list(bashlex.split(line)) # type: ignore[attr-defined] + self.executable = argv.pop(0) + + parser: argparse.ArgumentParser = argparse.ArgumentParser(add_help=False) + if "arguments" in self.config: + arguments_section = self.config["arguments"] + + arg: str + if "flag_args" in arguments_section: + for arg in arguments_section["flag_args"].split(): + parser.add_argument(f"--{arg}", action="store_true") + + if "str_args" in arguments_section: + for arg in arguments_section["str_args"].split(): + parser.add_argument(f"--{arg}", type=str) + + if "bool_str_args" in arguments_section: + for arg in arguments_section["bool_str_args"].split(): + parser.add_argument( + f"--{arg}", nargs="?", const=True, default=False + ) + + if "int_args" in arguments_section: + for arg in arguments_section["int_args"].split(): + parser.add_argument(f"--{arg}", type=int) + + if "bool_int_args" in arguments_section: + for arg in arguments_section["bool_int_args"].split(): + parser.add_argument( + f"--{arg}", nargs="?", const=True, default=False + ) + + namespace, rest = parser.parse_known_intermixed_args(argv) + vars_dict = vars(namespace) + + command_dict: typing.Dict[str, typing.Any] = { + "executable": self.executable, + "args": [], + } + + for var in vars_dict.keys(): + value = vars_dict[var] + if value is not None: + if isinstance(value, bool): + if value is True: + command_dict["args"].append(f"--{var}") + else: + command_dict["args"].append(f"--{var}={value}") + + if len(rest) >= 0: + value = rest.pop(0) + command_dict["args"].append(f"--target={value}") + + if len(rest) >= 0: + value = rest.pop(0) + command_dict["args"].append(f"--query={value}") + + return command_dict + + +class nodevisitor(bashlex.ast.nodevisitor): # type: ignore[name-defined,misc] + def __init__(self, positions: typing.List[typing.Tuple[int, int]]) -> None: + self.positions = positions + self.stdin = None + self.stdout = None + self.stderr = None + + def visitredirect( + self, + n: bashlex.ast.node, # type: ignore[name-defined] + n_input: int, + n_type: str, + output: typing.Any, + heredoc: typing.Any, + ) -> None: + if isinstance(n_input, int) and 0 <= n_input <= 2: + if isinstance(output, bashlex.ast.node) and output.kind == "word": # type: ignore[attr-defined] + self.positions.append(n.pos) + if n_input == 0: + self.stdin = output.word + elif n_input == 1: + self.stdout = output.word + elif n_input == 2: + self.stderr = output.word + else: + sys.exit(f"oops 1: {type(n_input)}") + else: + sys.exit(f"oops 2: {type(n_input)}") + + def visitheredoc(self, n: bashlex.ast.node, value: typing.Any) -> None: # type: ignore[name-defined] + pass + + +def main() -> None: + our_dirname: str = os.path.dirname(os.path.realpath(__file__)) + lastz_command_config_file: str = os.path.join(our_dirname, "lastz-cmd.ini") + + config: configparser.ConfigParser = configparser.ConfigParser() + config.read(lastz_command_config_file) + + package_file = PackageFile() + lastz_command_file = "lastz_commands.txt" + bashCommandLineFile(lastz_command_file, config, package_file) + package_file.close() + + +if __name__ == "__main__": + main() diff -r 000000000000 -r 5c72425b7f1b run_segalign_diagonal_partition --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/run_segalign_diagonal_partition Wed Apr 17 18:06:54 2024 +0000 @@ -0,0 +1,105 @@ +#!/usr/bin/env bash + +set -o errexit +set -o nounset +set -o pipefail +#set -o xtrace + +## +## parse arguments +## + +SEGALIGN_ARGS=() + +MAX_SEGMENT_SIZE="" +OUTPUT_FILENAME="" +LASTZ_COMMAND_FILE="lastz_commands.txt" +HELP=0 + +while [[ $# -gt 0 ]]; do + case $1 in + --help) + HELP=1 + shift + ;; + --max_segments) + MAX_SEGMENT_SIZE="$2" + shift 2 + ;; + --output) + OUTPUT_FILENAME=$(readlink -f "$2") + shift 2 + ;; + --tool_directory) + TOOL_DIRECTORY="$2" + shift 2 + ;; + *) + SEGALIGN_ARGS+=("$1") + shift + esac +done + +set -- "${SEGALIGN_ARGS[@]}" + +## +## check arguments +## + +if [[ $# == 0 || "$HELP" == "1" ]]; then + segalign --help + exit 0 +fi + +if [[ $# -lt 2 ]]; then + echo "run_segalign_diagonal_partition: missing target and query sequences" 1>&2 + exit 1 +fi + +ref_path=$(readlink -f "$1") +test -e "$ref_path" || { + echo "run_segalign_diagonal_partition: target file \"$ref_path\" does not exist" 1>&2 + exit 1 +} +query_path=$(readlink -f "$2") +test -e "$query_path" || { + echo "run_segalign_diagonal_partition: query file \"$query_path\" does not exist" 1>&2 + exit 1 +} +shift 2 + +DATA_FOLDER="data" +mkdir -p "$DATA_FOLDER" || { + echo "run_segalign_diagonal_partition: cannont create data directory \"$DATA_FOLDER\"" 1>&2 + exit 1 +} + +cd $DATA_FOLDER/.. +echo "" +echo "Converting fasta files to 2bit format" 1>&2 + +## +## convert target and query to 2bit +## +faToTwoBit "$ref_path" "$DATA_FOLDER/ref.2bit" || { + echo "run_segalign_diagonal_partition: cannot convert \"$ref_path\" to 2bit" 1>&2 + exit 1 +} +faToTwoBit "$query_path" "$DATA_FOLDER/query.2bit" || { + echo "run_segalign_diagonal_partition: cannont convert \"$ref_path\" to 2bit" 1>&2 + exit 1 +} + + + +time { + while IFS= read -r line; do + "$TOOL_DIRECTORY/diagonal_partition.py" $MAX_SEGMENT_SIZE $line >> $LASTZ_COMMAND_FILE + # segalign sort writes out the partitioned segment files to the working + # directory and prints the modified lastz commands. + done < <(stdbuf -oL segalign $ref_path $query_path "${DATA_FOLDER}/" "$@" ) # segalign begins running in this line, +} 1>&2 # and every newline written to stdout, get assigned to $line which + # gets sent to diagonal_partition for diagonal partitioning + +exit 0 + diff -r 000000000000 -r 5c72425b7f1b scoring_options.xml --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/scoring_options.xml Wed Apr 17 18:06:54 2024 +0000 @@ -0,0 +1,25 @@ + + +
+ + + + + + + + + + + + + + + + + + + +
+
+
diff -r 000000000000 -r 5c72425b7f1b seeding_options.xml --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/seeding_options.xml Wed Apr 17 18:06:54 2024 +0000 @@ -0,0 +1,34 @@ + + +
+ + + + + + + + + + + + + + + + ^[01T]+$ + + + + + + + + + + + + +
+
+
diff -r 000000000000 -r 5c72425b7f1b segalign.xml --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/segalign.xml Wed Apr 17 18:06:54 2024 +0000 @@ -0,0 +1,246 @@ + + A Scalable GPU System for Pairwise Whole Genome Alignments based on LASTZ's seed-filter-extend paradigm + + macros.xml + sequence_options.xml + scoring_options.xml + seeding_options.xml + ungapped_extension_options.xml + gapped_extension_options.xml + output_options.xml + segalign_output_options.xml + system_options.xml + + + + '${output}' + ## .end if + ## .if $output_format.rplot: + ## && + ## Rscript $r_plot > /dev/null 2>&1 + ## .end if + #if $segalign_mode == "segalign" + --output '$segalign_output' + #end if +#else if $segalign_mode == "segalign_repeat_masker" + --M '$mode.output_options.M' + --output '$segalign_repeat_masker_output' +#end if +#if str($mode.output_options.markend) == "true" + --markend +#end if + +## System Options ----------------------------------------------------- + + --wga_chunk_size '$mode.system_options.wga_chunk_size' + --lastz_interval_size '$mode.system_options.lastz_interval_size' + --seq_block_size '$mode.system_options.seq_block_size' + --num_gpu '$mode.system_options.num_gpu' +#if str($mode.system_options.debug) == "true" + --debug +#end if + +## ------------------------------------------------------------------- + +#if $segalign_mode == "segalign_diagonal_partition" + && + '$__tool_directory__/package_output.py' +#end if + + ]]> + + + + + + + + + + + + + + + + + + +
+ + +
+
+ + + + + + + + + + + + + +
+
+ + + + + + + + mode['mode_selector'] == 'segalign' and mode['diagonal_partition_options']['diagonal_partition'] is False + + + mode['mode_selector'] == 'segalign' and mode['diagonal_partition_options']['diagonal_partition'] is True + + + mode['mode_selector'] == 'segalign_repeat_masker' + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
diff -r 000000000000 -r 5c72425b7f1b segalign_output_options.xml --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/segalign_output_options.xml Wed Apr 17 18:06:54 2024 +0000 @@ -0,0 +1,93 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff -r 000000000000 -r 5c72425b7f1b sequence_options.xml --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/sequence_options.xml Wed Apr 17 18:06:54 2024 +0000 @@ -0,0 +1,12 @@ + + +
+ + + + + + +
+
+
diff -r 000000000000 -r 5c72425b7f1b system_options.xml --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/system_options.xml Wed Apr 17 18:06:54 2024 +0000 @@ -0,0 +1,11 @@ + + +
+ + + + + +
+
+
diff -r 000000000000 -r 5c72425b7f1b test-data/hg38.chr20.chunk.fa.gz Binary file test-data/hg38.chr20.chunk.fa.gz has changed diff -r 000000000000 -r 5c72425b7f1b test-data/mm39.chr2.chunk.fa.gz Binary file test-data/mm39.chr2.chunk.fa.gz has changed diff -r 000000000000 -r 5c72425b7f1b test-data/segalign-output.maf.gz Binary file test-data/segalign-output.maf.gz has changed diff -r 000000000000 -r 5c72425b7f1b test-data/segalign-repeat-masker-output.tab.gz Binary file test-data/segalign-repeat-masker-output.tab.gz has changed diff -r 000000000000 -r 5c72425b7f1b ungapped_extension_options.xml --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/ungapped_extension_options.xml Wed Apr 17 18:06:54 2024 +0000 @@ -0,0 +1,9 @@ + + +
+ + + +
+
+