# HG changeset patch
# User brenninc
# Date 1462652975 14400
# Node ID 33e7d904fdc30328ee3a78e05b336069b9d5fb97
Uploaded original version
diff -r 000000000000 -r 33e7d904fdc3 data_manager/all_fasta_by_path_data_manager.xml
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/data_manager/all_fasta_by_path_data_manager.xml Sat May 07 16:29:35 2016 -0400
@@ -0,0 +1,37 @@
+
+ path inputer
+
+ path_name_value_key_manager.py
+ --value "${value}"
+ --dbkey "${dbkey}"
+ --name "${name}"
+ --path "${path}"
+ --data_table_name "all_fasta"
+ --json_output_file "${json_output_file}"
+
+
+
+
+
+
+
+
+
+
+
+
+Adds a server path to the all_fasta data table.
+
+The tool will check the path exists but NOT check that it holds the expected data type.
+
+If name is not provided the filename from path less the extension is used.
+
+If value is not provided, the name will be used (or its default)
+
+If dbkey is not provided, the value will be used (or its default)
+
+
+
+
+
+
diff -r 000000000000 -r 33e7d904fdc3 data_manager/path_name_value_key_manager.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/data_manager/path_name_value_key_manager.py Sat May 07 16:29:35 2016 -0400
@@ -0,0 +1,70 @@
+#!/usr/bin/env python
+
+import json
+import optparse
+import os.path
+
+def _add_data_table_entry( data_manager_dict, data_table_name, data_table_entry ):
+ data_manager_dict['data_tables'] = data_manager_dict.get( 'data_tables', {} )
+ data_manager_dict['data_tables'][ data_table_name ] = data_manager_dict['data_tables'].get( data_table_name, [] )
+ data_manager_dict['data_tables'][ data_table_name ].append( data_table_entry )
+ return data_manager_dict
+
+
+def check_param(name, value, default=None, check_tab=True):
+ if value in [ None, '', '?' ]:
+ if default:
+ print "Using {0} for {1} as no value provided".format( default, name )
+ value = default
+ else:
+ raise Exception( '{0} is not a valid {1}. You must specify a valid {1}.'.format( value, name ) )
+ if check_tab and "\t" in value:
+ raise Exception( '{0} is not a valid {1}. It may not contain a tab because these are used as seperators by galaxy .'.format( value, name ) )
+ return value
+
+
+def main():
+
+ #value = "test_value"
+ #name = "test_name"
+ #print '{0} other {1} more{0}'.format(value, name )
+ #print '{0} is not a valid {1}. It may not contain a tab.'.format( value, name )
+
+ #Parse Command Line
+ parser = optparse.OptionParser()
+ parser.add_option( '--value', action='store', type="string", default=None, help='value' )
+ parser.add_option( '--dbkey', action='store', type="string", default=None, help='dbkey' )
+ parser.add_option( '--name', action='store', type="string", default=None, help='name' )
+ parser.add_option( '--path', action='store', type="string", default=None, help='path' )
+ parser.add_option( '--data_table_name', action='store', type="string", default=None, help='path' )
+ parser.add_option( '--json_output_file', action='store', type="string", default=None, help='path' )
+ (options, args) = parser.parse_args()
+
+ path = check_param("path", options.path)
+ if not os.path.exists(path):
+ raise Exception( 'Unable to find path {0}.'.format( path ) )
+ basename = os.path.basename(path)
+ filename = os.path.splitext(basename)[0]
+ name = check_param("name", options.name, default=filename)
+ value = check_param("value", options.value, default=name)
+ dbkey = check_param("dbkey", options.dbkey, default=value)
+ data_table_name = check_param("data_table_name", options.data_table_name)
+ json_output_file = check_param("json_output_file", options.json_output_file, check_tab=False)
+
+ if os.path.exists(json_output_file):
+ params = json.loads( open( json_output_file ).read() )
+ print "params", params
+ else:
+ params = {}
+
+ data_manager_dict = {}
+ data_table_entry = dict( value=value, dbkey=dbkey, name=name, path=path )
+ _add_data_table_entry( data_manager_dict, data_table_name, data_table_entry )
+
+ #save info to json file
+ with open( json_output_file, 'wb' ) as output_file:
+ output_file.write( json.dumps( data_manager_dict ) )
+ output_file.write( "\n" )
+
+if __name__ == "__main__":
+ main()
diff -r 000000000000 -r 33e7d904fdc3 data_manager_conf.xml
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/data_manager_conf.xml Sat May 07 16:29:35 2016 -0400
@@ -0,0 +1,14 @@
+
+
+
+
+
+
+
+
+
diff -r 000000000000 -r 33e7d904fdc3 tool-data/all_fasta.loc.sample
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tool-data/all_fasta.loc.sample Sat May 07 16:29:35 2016 -0400
@@ -0,0 +1,17 @@
+#This file lists the locations and dbkeys of all the fasta files
+
+#This file has the format (white space characters are TAB characters):
+#
+#
+#
+#So, all_fasta.loc could look something like this:
+#
+#apiMel3 apiMel3 Honeybee (Apis mellifera): apiMel3 /path/to/genome/apiMel3/apiMel3.fa
+#hg19canon hg19 Human (Homo sapiens): hg19 Canonical /path/to/genome/hg19/hg19canon.fa
+#hg19full hg19 Human (Homo sapiens): hg19 Full /path/to/genome/hg19/hg19full.fa
+#
+#Your all_fasta.loc file should contain an entry for each individual
+#fasta file. So there will be multiple fasta files for each build,
+#such as with hg19 above.
+#
+
diff -r 000000000000 -r 33e7d904fdc3 tool_data_table_conf.xml.sample
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tool_data_table_conf.xml.sample Sat May 07 16:29:35 2016 -0400
@@ -0,0 +1,7 @@
+
+
+
+ value, dbkey, name, path
+
+
+