Previous changeset 1:6524e573d9c2 (2018-01-15) Next changeset 3:31f44a9f507e (2019-10-16) |
Commit message:
"planemo upload for repository https://github.com/galaxyproject/tools-iuc/tree/master/data_managers/data_manager_manual commit f2ab0315fa6df9e3e917bbc112e29f384c3affba" |
modified:
data_manager/data_manager_manual.py |
b |
diff -r 6524e573d9c2 -r 1c1e228884d3 data_manager/data_manager_manual.py --- a/data_manager/data_manager_manual.py Mon Jan 15 12:03:01 2018 -0500 +++ b/data_manager/data_manager_manual.py Mon Sep 23 10:53:17 2019 -0400 |
[ |
b'@@ -7,7 +7,6 @@\n import os\n import shutil\n import tempfile\n-import urllib2\n from xml.etree.ElementTree import tostring\n \n try:\n@@ -20,71 +19,73 @@\n try:\n # For Python 3.0 and later\n from urllib.request import urlretrieve\n+ from urllib.parse import urlsplit\n except ImportError:\n # Fall back to Python 2 imports\n from urllib import urlretrieve\n+ from urlparse import urlsplit\n \n _log_name = __name__\n if _log_name == \'__builtin__\':\n _log_name = \'toolshed.installed.manual.data.manager\'\n-log = logging.getLogger( _log_name )\n+log = logging.getLogger(_log_name)\n \n \n # --- These methods are called by/within the Galaxy Application\n-def exec_before_job( app, inp_data, out_data, param_dict, tool=None, **kwd ):\n+def exec_before_job(app, inp_data, out_data, param_dict, tool=None, **kwd):\n # Look for any data tables that haven\'t been defined for this data manager before and dynamically add them to Galaxy\n- param_dict = dict( **param_dict )\n- data_tables_param = param_dict.get( \'data_tables\', [] )\n- if not isinstance( data_tables_param, list ):\n+ param_dict = dict(**param_dict)\n+ data_tables_param = param_dict.get(\'data_tables\', [])\n+ if not isinstance(data_tables_param, list):\n data_tables_param = [data_tables_param]\n if tool:\n tool_shed_repository = tool.tool_shed_repository\n else:\n tool_shed_repository = None\n tdtm = None\n- data_manager = app.data_managers.get_manager( tool.data_manager_id, None )\n+ data_manager = app.data_managers.get_manager(tool.data_manager_id, None)\n for data_table_param in data_tables_param:\n- data_table_name = data_table_param.get( \'data_table_name\', None )\n+ data_table_name = data_table_param.get(\'data_table_name\')\n if data_table_name:\n # get data table managed by this data Manager\n- data_table = app.tool_data_tables.get_tables().get( str( data_table_name ), None )\n+ data_table = app.tool_data_tables.get_tables().get(data_table_name)\n if data_table:\n- data_table_filename = data_table.get_filename_for_source( data_manager, None )\n+ data_table_filename = data_table.get_filename_for_source(data_manager, None)\n if not data_table_filename:\n if tdtm is None:\n from tool_shed.tools import data_table_manager\n- tdtm = data_table_manager.ToolDataTableManager( app )\n- target_dir, tool_path, relative_target_dir = tdtm.get_target_install_dir( tool_shed_repository )\n+ tdtm = data_table_manager.ToolDataTableManager(app)\n+ target_dir, tool_path, relative_target_dir = tdtm.get_target_install_dir(tool_shed_repository)\n # Dynamically add this data table\n- log.debug( "Attempting to dynamically create a missing Tool Data Table named %s." % data_table_name )\n- repo_info = tdtm.generate_repository_info_elem_from_repository( tool_shed_repository, parent_elem=None )\n+ log.debug("Attempting to dynamically create a missing Tool Data Table named %s." % data_table_name)\n+ repo_info = tdtm.generate_repository_info_elem_from_repository(tool_shed_repository, parent_elem=None)\n if repo_info is not None:\n- repo_info = tostring( repo_info )\n+ repo_info = tostring(repo_info)\n tmp_file = tempfile.NamedTemporaryFile()\n- tmp_file.write( __get_new_xml_definition( app, data_table, data_manager, repo_info, target_dir ) )\n+ tmp_file.write(__get_new_xml_definition(app, data_table, data_manager, repo_info, target_dir))\n tmp_file.flush()\n- app.tool_data_tables.add_new_entries_from_config_file( tmp_file.name, None, app.config.shed_tool_data_table_config, persist=True )\n+'..b'le_name, [])\n+ rval[data_table_name].append(entry_dict)\n return rval\n \n \n-def get_file_content( params, target_directory ):\n- directory_content = params.get( \'directory_content\', [] )\n+def get_file_content(params, target_directory):\n+ directory_content = params.get(\'directory_content\', [])\n for content in directory_content:\n- target_path = os.path.join( target_directory, content.get( \'subdir\', \'\' ) )\n+ target_path = os.path.join(target_directory, content.get(\'subdir\', \'\'))\n try:\n- os.makedirs( target_path )\n+ os.makedirs(target_path)\n except OSError:\n pass\n- if content.get( \'file_source\', {}).get( \'file_source_selector\', None ) == \'URL\':\n- ( filename, headers ) = urlretrieve( content.get( \'file_source\', {}).get( \'file_URL\', None ) )\n+ if content.get(\'file_source\', {}).get(\'file_source_selector\') == \'URL\':\n+ (filename, headers) = urlretrieve(content.get(\'file_source\', {}).get(\'file_URL\'))\n try:\n bname = headers[\'Content-Disposition\']\n except KeyError:\n- bname = os.path.basename( urllib2.urlparse.urlsplit( content.get( \'file_source\', {}).get( \'file_URL\', None ) ).path )\n+ bname = os.path.basename(urlsplit(content.get(\'file_source\', {}).get(\'file_URL\')).path)\n else:\n- filename = content.get( \'file_source\', {}).get( \'file_history\', None )\n- bname = os.path.basename( filename )\n- file_action = content.get( \'file_action\', {}).get( \'file_action_selector\', None )\n+ filename = content.get(\'file_source\', {}).get(\'file_history\')\n+ bname = os.path.basename(filename)\n+ file_action = content.get(\'file_action\', {}).get(\'file_action_selector\')\n if file_action == \'unpack\':\n- unpack_archive( filename, target_path )\n+ unpack_archive(filename, target_path)\n else:\n- filename_override = content.get( \'file_action\', {}).get( \'filename_override\', None )\n+ filename_override = content.get(\'file_action\', {}).get(\'filename_override\')\n if filename_override:\n- target_path = os.path.join( target_path, filename_override )\n+ target_path = os.path.join(target_path, filename_override)\n else:\n- target_path = os.path.join( target_path, bname )\n- shutil.copyfile( filename, target_path )\n- return len( directory_content )\n+ target_path = os.path.join(target_path, bname)\n+ shutil.copyfile(filename, target_path)\n+ return len(directory_content)\n \n \n def main():\n parser = optparse.OptionParser()\n- parser.add_option( \'\', \'--galaxy_data_manager_data_path\', dest=\'galaxy_data_manager_data_path\', default=\'\', help=\'Root path for galaxy_data_manager_data_path\' )\n+ parser.add_option(\'\', \'--galaxy_data_manager_data_path\', dest=\'galaxy_data_manager_data_path\', default=\'\', help=\'Root path for galaxy_data_manager_data_path\')\n (options, args) = parser.parse_args()\n \n filename = args[0]\n \n- params = json.loads( open( filename ).read() )\n- target_directory = params[ \'output_data\' ][0][\'extra_files_path\']\n+ with open(filename) as fh:\n+ params = json.loads(fh.read())\n+ target_directory = params[\'output_data\'][0][\'extra_files_path\']\n \n- data_table_entries = get_data_table_entries( params[\'param_dict\'], options.galaxy_data_manager_data_path )\n+ data_table_entries = get_data_table_entries(params[\'param_dict\'], options.galaxy_data_manager_data_path)\n \n # save info to json file\n- open( filename, \'wb\' ).write( json.dumps( { "data_tables": data_table_entries} ) )\n+ with open(filename, \'wb\') as fh:\n+ fh.write(json.dumps({"data_tables": data_table_entries}))\n \n- get_file_content( params[\'param_dict\'], target_directory )\n+ get_file_content(params[\'param_dict\'], target_directory)\n \n \n if __name__ == "__main__":\n' |