# HG changeset patch
# User dave
# Date 1606838860 0
# Node ID bd47b9f87d677cde8443cf0eafc16a3401502a76
"planemo upload for repository https://github.com/galaxyproject/tools-iuc/tree/master/data_managers/data_manager_build_kraken2_database/ commit 68cd9a8ae50c5dfe6b667062a5172010511bcaff-dirty"
diff -r 000000000000 -r bd47b9f87d67 data_manager/kraken2_build_database.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/data_manager/kraken2_build_database.py Tue Dec 01 16:07:40 2020 +0000
@@ -0,0 +1,382 @@
+#!/usr/bin/env python
+
+from __future__ import print_function
+
+import argparse
+import datetime
+import errno
+import json
+import os
+import shutil
+import subprocess
+import sys
+import tarfile
+from enum import Enum
+
+try:
+ # Python3
+ from urllib.request import urlopen
+except ImportError:
+ from urllib2 import urlopen
+
+
+DATA_TABLE_NAME = "kraken2_databases"
+
+
+class KrakenDatabaseTypes(Enum):
+ standard = 'standard'
+ minikraken = 'minikraken'
+ special = 'special'
+ custom = 'custom'
+
+ def __str__(self):
+ return self.value
+
+
+class SpecialDatabaseTypes(Enum):
+ rdp = 'rdp'
+ greengenes = 'greengenes'
+ silva = 'silva'
+
+ def __str__(self):
+ return self.value
+
+
+class Minikraken2Versions(Enum):
+ v1 = 'v1'
+ v2 = 'v2'
+
+ def __str__(self):
+ return self.value
+
+class Minikraken2Releases(Enum):
+ March_2020 = 'March_2020'
+ April_2019 = 'April_2019'
+
+ def __str__(self):
+ return self.value
+
+
+def kraken2_build_standard(kraken2_args, target_directory, data_table_name=DATA_TABLE_NAME):
+ now = datetime.datetime.utcnow().strftime("%Y-%m-%dT%H%M%SZ")
+
+ database_value = "_".join([
+ now,
+ "standard",
+ "kmer-len", str(kraken2_args["kmer_len"]),
+ "minimizer-len", str(kraken2_args["minimizer_len"]),
+ "minimizer-spaces", str(kraken2_args["minimizer_spaces"]),
+ "load-factor", str(kraken2_args["load_factor"]),
+ ])
+
+ database_name = " ".join([
+ "Standard",
+ "(Created:",
+ now + ",",
+ "kmer-len=" + str(kraken2_args["kmer_len"]) + ",",
+ "minimizer-len=" + str(kraken2_args["minimizer_len"]) + ",",
+ "minimizer-spaces=" + str(kraken2_args["minimizer_spaces"]) + ")",
+ "load-factor", str(kraken2_args["load_factor"]),
+ ])
+
+ database_path = database_value
+
+ args = [
+ '--threads', str(kraken2_args["threads"]),
+ '--standard',
+ '--kmer-len', str(kraken2_args["kmer_len"]),
+ '--minimizer-len', str(kraken2_args["minimizer_len"]),
+ '--minimizer-spaces', str(kraken2_args["minimizer_spaces"]),
+ '--load-factor', str(kraken2_args["load_factor"]),
+ '--db', database_path
+ ]
+
+ subprocess.check_call(['kraken2-build'] + args, cwd=target_directory)
+
+ if kraken2_args["clean"]:
+ args = [
+ '--threads', str(kraken2_args["threads"]),
+ '--clean',
+ '--db', database_path
+ ]
+
+ subprocess.check_call(['kraken2-build'] + args, cwd=target_directory)
+
+ data_table_entry = {
+ 'data_tables': {
+ data_table_name: [
+ {
+ "value": database_value,
+ "name": database_name,
+ "path": database_path,
+ }
+ ]
+ }
+ }
+
+ return data_table_entry
+
+
+def kraken2_build_minikraken(minikraken2_version, minikraken2_release, target_directory, data_table_name=DATA_TABLE_NAME):
+
+ now = datetime.datetime.utcnow().strftime("%Y-%m-%dT%H%M%SZ")
+
+ value_parts = [now, "minikraken2", minikraken2_release, "8GB"]
+ name_parts = ["Minikraken2", minikraken2_release, "8GB", "(Created: %s)" % now]
+
+ if minikraken2_release == 'April_2019':
+ value_parts.insert(3, minikraken2_version)
+ name_parts.insert(2, minikraken2_version)
+ src = urlopen(
+ 'ftp://ftp.ccb.jhu.edu/pub/data/kraken2_dbs/old/minikraken2_%s_8GB_201904.tgz'
+ % minikraken2_version
+ )
+ else:
+ src = urlopen('ftp://ftp.ccb.jhu.edu/pub/data/kraken2_dbs/minikraken_8GB_202003.tgz')
+
+ database_value = "_".join(value_parts)
+
+ database_name = " ".join(name_parts)
+
+ database_path = database_value
+
+ # download the minikraken2 data
+ with open('tmp_data.tar.gz', 'wb') as dst:
+ shutil.copyfileobj(src, dst)
+ # unpack the downloaded archive to the target directory
+ with tarfile.open('tmp_data.tar.gz', 'r:gz') as fh:
+ for member in fh.getmembers():
+ if member.isreg():
+ member.name = os.path.basename(member.name)
+ fh.extract(member, os.path.join(target_directory, database_path))
+
+ data_table_entry = {
+ 'data_tables': {
+ data_table_name: [
+ {
+ "value": database_value,
+ "name": database_name,
+ "path": database_path,
+ }
+ ]
+ }
+ }
+
+ return data_table_entry
+
+
+def kraken2_build_special(kraken2_args, target_directory, data_table_name=DATA_TABLE_NAME):
+
+ now = datetime.datetime.utcnow().strftime("%Y-%m-%dT%H%M%SZ")
+
+ special_database_names = {
+ "rdp": "RDP",
+ "greengenes": "Greengenes",
+ "silva": "Silva",
+ }
+
+ database_value = "_".join([
+ now,
+ kraken2_args["special_database_type"],
+ "kmer-len", str(kraken2_args["kmer_len"]),
+ "minimizer-len", str(kraken2_args["minimizer_len"]),
+ "minimizer-spaces", str(kraken2_args["minimizer_spaces"]),
+ "load-factor", str(kraken2_args["load_factor"]),
+ ])
+
+ database_name = " ".join([
+ special_database_names[kraken2_args["special_database_type"]],
+ "(Created:",
+ now + ",",
+ "kmer-len=" + str(kraken2_args["kmer_len"]) + ",",
+ "minimizer-len=" + str(kraken2_args["minimizer_len"]) + ",",
+ "minimizer-spaces=" + str(kraken2_args["minimizer_spaces"]) + ")",
+ "load-factor=" + str(kraken2_args["load_factor"]) + ")",
+ ])
+
+ database_path = database_value
+
+ args = [
+ '--threads', str(kraken2_args["threads"]),
+ '--special', kraken2_args["special_database_type"],
+ '--kmer-len', str(kraken2_args["kmer_len"]),
+ '--minimizer-len', str(kraken2_args["minimizer_len"]),
+ '--minimizer-spaces', str(kraken2_args["minimizer_spaces"]),
+ '--load-factor', str(kraken2_args["load_factor"]),
+ '--db', database_path
+ ]
+
+ subprocess.check_call(['kraken2-build'] + args, cwd=target_directory)
+
+ if kraken2_args["clean"]:
+ args = [
+ '--threads', str(kraken2_args["threads"]),
+ '--clean',
+ '--db', database_path
+ ]
+
+ subprocess.check_call(['kraken2-build'] + args, cwd=target_directory)
+
+ data_table_entry = {
+ 'data_tables': {
+ data_table_name: [
+ {
+ "value": database_value,
+ "name": database_name,
+ "path": database_path,
+ }
+ ]
+ }
+ }
+
+ return data_table_entry
+
+
+def kraken2_build_custom(kraken2_args, custom_database_name, target_directory, data_table_name=DATA_TABLE_NAME):
+
+ args = [
+ '--threads', str(kraken2_args["threads"]),
+ '--download-taxonomy',
+ '--db', custom_database_name,
+ ]
+
+ if kraken2_args['skip_maps']:
+ args.append('--skip-maps')
+
+ subprocess.check_call(['kraken2-build'] + args, cwd=target_directory)
+
+ args = [
+ '--threads', str(kraken2_args["threads"]),
+ '--add-to-library', kraken2_args["custom_fasta"],
+ '--db', custom_database_name
+ ]
+
+ subprocess.check_call(['kraken2-build'] + args, cwd=target_directory)
+
+ args = [
+ '--threads', str(kraken2_args["threads"]),
+ '--build',
+ '--kmer-len', str(kraken2_args["kmer_len"]),
+ '--minimizer-len', str(kraken2_args["minimizer_len"]),
+ '--minimizer-spaces', str(kraken2_args["minimizer_spaces"]),
+ '--load-factor', str(kraken2_args["load_factor"]),
+ '--db', custom_database_name
+ ]
+
+ subprocess.check_call(['kraken2-build'] + args, cwd=target_directory)
+
+ if kraken2_args["clean"]:
+ args = [
+ '--threads', str(kraken2_args["threads"]),
+ '--clean',
+ '--db', custom_database_name
+ ]
+
+ subprocess.check_call(['kraken2-build'] + args, cwd=target_directory)
+
+ data_table_entry = {
+ 'data_tables': {
+ data_table_name: [
+ {
+ "value": custom_database_name,
+ "name": custom_database_name,
+ "path": custom_database_name
+ }
+ ]
+ }
+ }
+
+ return data_table_entry
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('data_manager_json')
+ parser.add_argument('--kmer-len', dest='kmer_len', type=int, default=35, help='kmer length')
+ parser.add_argument('--minimizer-len', dest='minimizer_len', type=int, default=31, help='minimizer length')
+ parser.add_argument('--minimizer-spaces', dest='minimizer_spaces', default=6, help='minimizer spaces')
+ parser.add_argument('--load-factor', dest='load_factor', type=float, default=0.7, help='load factor')
+ parser.add_argument('--threads', dest='threads', default=1, help='threads')
+ parser.add_argument('--database-type', dest='database_type', type=KrakenDatabaseTypes, choices=list(KrakenDatabaseTypes), required=True, help='type of kraken database to build')
+ parser.add_argument('--minikraken2-version', dest='minikraken2_version', type=Minikraken2Versions, choices=list(Minikraken2Versions), help='MiniKraken2 version (only applies to --database-type minikraken) and the Mar2019 release')
+ parser.add_argument('--minikraken2-release', dest='minikraken2_release', type=Minikraken2Releases, choices=list(Minikraken2Releases), help='MiniKraken2 release (only applies to --database-type minikraken)')
+ parser.add_argument('--special-database-type', dest='special_database_type', type=SpecialDatabaseTypes, choices=list(SpecialDatabaseTypes), help='type of special database to build (only applies to --database-type special)')
+ parser.add_argument('--custom-fasta', dest='custom_fasta', help='fasta file for custom database (only applies to --database-type custom)')
+ parser.add_argument('--custom-database-name', dest='custom_database_name', help='Name for custom database (only applies to --database-type custom)')
+ parser.add_argument('--skip-maps', dest='skip_maps', action='store_true', help='')
+ parser.add_argument('--clean', dest='clean', action='store_true', help='Clean up extra files')
+ args = parser.parse_args()
+
+ with open(args.data_manager_json) as fh:
+ data_manager_input = json.load(fh)
+
+ target_directory = data_manager_input['output_data'][0]['extra_files_path']
+
+ try:
+ os.mkdir(target_directory)
+ except OSError as exc:
+ if exc.errno == errno.EEXIST and os.path.isdir(target_directory):
+ pass
+ else:
+ raise
+
+ data_manager_output = {}
+
+ if str(args.database_type) == 'standard':
+ kraken2_args = {
+ "kmer_len": args.kmer_len,
+ "minimizer_len": args.minimizer_len,
+ "minimizer_spaces": args.minimizer_spaces,
+ "load_factor": args.load_factor,
+ "threads": args.threads,
+ "clean": args.clean,
+ }
+ data_manager_output = kraken2_build_standard(
+ kraken2_args,
+ target_directory,
+ )
+ elif str(args.database_type) == 'minikraken':
+ data_manager_output = kraken2_build_minikraken(
+ str(args.minikraken2_version),
+ str(args.minikraken2_release),
+ target_directory
+ )
+ elif str(args.database_type) == 'special':
+ kraken2_args = {
+ "special_database_type": str(args.special_database_type),
+ "kmer_len": args.kmer_len,
+ "minimizer_len": args.minimizer_len,
+ "minimizer_spaces": args.minimizer_spaces,
+ "load_factor": args.load_factor,
+ "threads": args.threads,
+ "clean": args.clean,
+ }
+ data_manager_output = kraken2_build_special(
+ kraken2_args,
+ target_directory,
+ )
+ elif str(args.database_type) == 'custom':
+ kraken2_args = {
+ "custom_fasta": args.custom_fasta,
+ "skip_maps": args.skip_maps,
+ "kmer_len": args.kmer_len,
+ "minimizer_len": args.minimizer_len,
+ "minimizer_spaces": args.minimizer_spaces,
+ "load_factor": args.load_factor,
+ "threads": args.threads,
+ "clean": args.clean,
+ }
+ data_manager_output = kraken2_build_custom(
+ kraken2_args,
+ args.custom_database_name,
+ target_directory,
+ )
+ else:
+ sys.exit("Invalid database type")
+
+ with open(args.data_manager_json, 'w') as fh:
+ json.dump(data_manager_output, fh, sort_keys=True)
+
+
+if __name__ == "__main__":
+ main()
diff -r 000000000000 -r bd47b9f87d67 data_manager/kraken2_build_database.xml
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/data_manager/kraken2_build_database.xml Tue Dec 01 16:07:40 2020 +0000
@@ -0,0 +1,116 @@
+
+
+
+ 2.1.1
+
+
+
+
+
+
+
+
+ database builder
+
+ kraken2
+ python
+
+ kraken2 -version | head -n 1 | awk '{print $NF}'
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 10.1186/gb-2014-15-3-r46
+
+
diff -r 000000000000 -r bd47b9f87d67 data_manager_conf.xml
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/data_manager_conf.xml Tue Dec 01 16:07:40 2020 +0000
@@ -0,0 +1,18 @@
+
+
+
+
+
+
+
diff -r 000000000000 -r bd47b9f87d67 test-data/adapter.fa
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/adapter.fa Tue Dec 01 16:07:40 2020 +0000
@@ -0,0 +1,2 @@
+>sequence16|kraken:taxid|32630 Adapter sequence
+CAAGCAGAAGACGGCATACGAGATCTTCGAGTGACTGGAGTTCCTTGGCACCCGAGAATTCCA
diff -r 000000000000 -r bd47b9f87d67 test-data/adapter.fastq
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/adapter.fastq Tue Dec 01 16:07:40 2020 +0000
@@ -0,0 +1,4 @@
+@sequence16
+CAAGCAGAAGACGGCATACGAGATCTTCGAGTGACTGGAGTTCCTTGGCACCCGAGAATTCCA
++
+IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII
diff -r 000000000000 -r bd47b9f87d67 test-data/kraken2_custom_data_manager.json
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/kraken2_custom_data_manager.json Tue Dec 01 16:07:40 2020 +0000
@@ -0,0 +1,1 @@
+{"data_tables": {"kraken2_databases": [{"name": "database", "path": "database", "value": "database"}]}}
\ No newline at end of file
diff -r 000000000000 -r bd47b9f87d67 tool-data/kraken2_databases.loc.sample
diff -r 000000000000 -r bd47b9f87d67 tool_data_table_conf.xml.sample
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tool_data_table_conf.xml.sample Tue Dec 01 16:07:40 2020 +0000
@@ -0,0 +1,8 @@
+
+
+
+
+