Previous changeset 0:ab7a7e798c34 (2017-11-06) Next changeset 2:0e532fc0a0a6 (2020-09-17) |
Commit message:
"planemo upload for repository https://github.com/galaxyproject/tools-iuc/tree/master/data_managers/data_manager_mothur_toolsuite/ commit 57f71aa633a43ab02bbf05acd0c6d7f406e01f1e" |
modified:
data_manager/data_manager_fetch_mothur_reference_data.xml data_manager/fetch_mothur_reference_data.py |
b |
diff -r ab7a7e798c34 -r aec831b54a5b data_manager/data_manager_fetch_mothur_reference_data.xml --- a/data_manager/data_manager_fetch_mothur_reference_data.xml Mon Nov 06 06:21:50 2017 -0500 +++ b/data_manager/data_manager_fetch_mothur_reference_data.xml Thu Nov 28 15:47:32 2019 -0500 |
b |
@@ -1,5 +1,5 @@ <?xml version="1.0"?> -<tool id="data_manager_fetch_mothur_reference_data" name="Fetch Mothur toolsuite reference data" version="0.1.3" tool_type="manage_data"> +<tool id="data_manager_fetch_mothur_reference_data" name="Fetch Mothur toolsuite reference data" version="0.1.4" tool_type="manage_data" profile="19.05"> <description>Fetch and install reference data for Mothur</description> <requirements> <requirement type="package" version="2.7">python</requirement> @@ -66,7 +66,7 @@ </outputs> <tests> <test> - <param name="data_sourece|ref_data" value="lookup_titanium"/> + <param name="data_source|ref_data" value="lookup_titanium"/> <output name="out_file"> <assert_contents> <has_text text="GS FLX Titanium" /> |
b |
diff -r ab7a7e798c34 -r aec831b54a5b data_manager/fetch_mothur_reference_data.py --- a/data_manager/fetch_mothur_reference_data.py Mon Nov 06 06:21:50 2017 -0500 +++ b/data_manager/fetch_mothur_reference_data.py Thu Nov 28 15:47:32 2019 -0500 |
[ |
@@ -228,12 +228,12 @@ Returns the name that the file is saved with. """ - print "Downloading %s" % url + print("Downloading %s" % url) if not target: target = os.path.basename(url) if wd: target = os.path.join(wd, target) - print "Saving to %s" % target + print("Saving to %s" % target) open(target, 'wb').write(urllib2.urlopen(url).read()) return target @@ -254,13 +254,13 @@ """ if not zipfile.is_zipfile(filen): - print "%s: not ZIP formatted file" + print("%s: not ZIP formatted file") return [filen] file_list = [] z = zipfile.ZipFile(filen) for name in z.namelist(): if reduce(lambda x, y: x or name.startswith(y), IGNORE_PATHS, False): - print "Ignoring %s" % name + print("Ignoring %s" % name) continue if wd: target = os.path.join(wd, name) @@ -268,21 +268,21 @@ target = name if name.endswith('/'): # Make directory - print "Creating dir %s" % target + print("Creating dir %s" % target) try: os.makedirs(target) except OSError: pass else: # Extract file - print "Extracting %s" % name + print("Extracting %s" % name) try: os.makedirs(os.path.dirname(target)) except OSError: pass open(target, 'wb').write(z.read(name)) file_list.append(target) - print "Removing %s" % filen + print("Removing %s" % filen) os.remove(filen) return file_list @@ -305,23 +305,23 @@ """ file_list = [] if not tarfile.is_tarfile(filen): - print "%s: not TAR file" + print("%s: not TAR file") return [filen] t = tarfile.open(filen) for name in t.getnames(): # Check for unwanted files if reduce(lambda x, y: x or name.startswith(y), IGNORE_PATHS, False): - print "Ignoring %s" % name + print("Ignoring %s" % name) continue # Extract file - print "Extracting %s" % name + print("Extracting %s" % name) t.extract(name, wd) if wd: target = os.path.join(wd, name) else: target = name file_list.append(target) - print "Removing %s" % filen + print("Removing %s" % filen) os.remove(filen) return file_list @@ -339,9 +339,9 @@ current working directory. """ - print "Unpack %s" % filen + print("Unpack %s" % filen) ext = os.path.splitext(filen)[1] - print "Extension: %s" % ext + print("Extension: %s" % ext) if ext == ".zip": return unpack_zip_archive(filen, wd=wd) elif ext == ".tgz": @@ -382,7 +382,7 @@ try: return MOTHUR_FILE_TYPES[ext] except KeyError: - print "WARNING: unknown file type for " + filen + ", skipping" + print("WARNING: unknown file type for " + filen + ", skipping") return None @@ -415,26 +415,26 @@ """ # Make working dir wd = tempfile.mkdtemp(suffix=".mothur", dir=os.getcwd()) - print "Working dir %s" % wd + print("Working dir %s" % wd) # Iterate over all requested reference data URLs for dataset in datasets: - print "Handling dataset '%s'" % dataset + print("Handling dataset '%s'" % dataset) for name in MOTHUR_REFERENCE_DATA[dataset]: for f in fetch_files(MOTHUR_REFERENCE_DATA[dataset][name], wd=wd): type_ = identify_type(f) entry_name = "%s (%s)" % (os.path.splitext(os.path.basename(f))[0], name) - print "%s\t\'%s'\t.../%s" % (type_, entry_name, os.path.basename(f)) + print("%s\t\'%s'\t.../%s" % (type_, entry_name, os.path.basename(f))) if type_ is not None: # Move to target dir ref_data_file = os.path.basename(f) f1 = os.path.join(target_dir, ref_data_file) - print "Moving %s to %s" % (f, f1) + print("Moving %s to %s" % (f, f1)) os.rename(f, f1) # Add entry to data table table_name = "mothur_%s" % type_ add_data_table_entry(data_tables, table_name, dict(name=entry_name, value=ref_data_file)) # Remove working dir - print "Removing %s" % wd + print("Removing %s" % wd) shutil.rmtree(wd) @@ -450,7 +450,7 @@ files = [] for path in paths: path = os.path.abspath(path) - print "Examining '%s'..." % path + print("Examining '%s'..." % path) if os.path.isfile(path): # Store full path for file files.append(path) @@ -459,7 +459,7 @@ for f in os.listdir(path): files.extend(files_from_filesystem_paths((os.path.join(path, f), ))) else: - print "Not a file or directory, ignored" + print("Not a file or directory, ignored") return files @@ -489,14 +489,14 @@ for f in files: type_ = identify_type(f) if type_ is None: - print "%s: unrecognised type, skipped" % f + print("%s: unrecognised type, skipped" % f) continue ref_data_file = os.path.basename(f) target_file = os.path.join(target_dir, ref_data_file) entry_name = "%s" % os.path.splitext(ref_data_file)[0] if description: entry_name += " (%s)" % description - print "%s\t\'%s'\t.../%s" % (type_, entry_name, ref_data_file) + print("%s\t\'%s'\t.../%s" % (type_, entry_name, ref_data_file)) # Link to or copy the data if link_to_data: os.symlink(f, target_file) @@ -508,7 +508,7 @@ if __name__ == "__main__": - print "Starting..." + print("Starting...") # Read command line parser = optparse.OptionParser() @@ -518,8 +518,8 @@ parser.add_option('--description', action='store', dest='description', default='') parser.add_option('--link', action='store_true', dest='link_to_data') options, args = parser.parse_args() - print "options: %s" % options - print "args : %s" % args + print("options: %s" % options) + print("args : %s" % args) # Check for JSON file if len(args) != 1: @@ -532,7 +532,7 @@ params, target_dir = read_input_json(jsonfile) # Make the target directory - print "Making %s" % target_dir + print("Making %s" % target_dir) os.mkdir(target_dir) # Set up data tables dictionary @@ -554,7 +554,7 @@ paths = options.paths.replace('__cn__', '\n').replace('__cr__', '\r').split() import_from_server(data_tables, target_dir, paths, description, link_to_data=options.link_to_data) # Write output JSON - print "Outputting JSON" - print str(json.dumps(data_tables)) - open(jsonfile, 'wb').write(json.dumps(data_tables)) - print "Done." + print("Outputting JSON") + print(json.dumps(data_tables)) + open(jsonfile, 'w').write(json.dumps(data_tables, sort_keys=True)) + print("Done.") |