annotate ncbi_connector.py @ 13:7012080f02d9 draft default tip

updated to production URL
author Matt Shirley <mdshw5@gmail.com>
date Thu, 14 Nov 2013 12:51:53 -0500
parents 58917de44665
children
Ignore whitespace changes - Everywhere: Within whitespace: At end of lines:
rev   line source
9
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
1 #!/usr/bin/env python
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
2 # Retrieves data from external data source applications and stores in a dataset file.
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
3 # Data source application parameters are temporarily stored in the dataset file.
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
4 import socket
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
5 import urllib
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
6 import sys
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
7 import os
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
8 import optparse
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
9 import xml.etree.ElementTree as ElementTree
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
10 from galaxy import eggs #eggs needs to be imported so that galaxy.util can find docutils egg...
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
11 from galaxy.util.json import from_json_string, to_json_string
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
12 from galaxy.util import get_charset_from_http_headers
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
13 import galaxy.model # need to import model before sniff to resolve a circular import dependency
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
14 from galaxy.datatypes import sniff
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
15 from galaxy.datatypes.registry import Registry
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
16 from galaxy.jobs import TOOL_PROVIDED_JOB_METADATA_FILE
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
17
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
18 assert sys.version_info[:2] >= ( 2, 4 )
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
19
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
20 def stop_err( msg ):
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
21 sys.stderr.write( msg )
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
22 sys.exit()
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
23
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
24 GALAXY_PARAM_PREFIX = 'GALAXY'
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
25 GALAXY_ROOT_DIR = os.path.realpath( os.path.join( os.path.split( os.path.realpath( __file__ ) )[0], '..', '..' ) )
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
26 GALAXY_DATATYPES_CONF_FILE = os.path.join( GALAXY_ROOT_DIR, 'datatypes_conf.xml' )
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
27
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
28 def load_input_parameters( filename, erase_file = True ):
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
29 datasource_params = {}
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
30 try:
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
31 json_params = from_json_string( open( filename, 'r' ).read() )
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
32 datasource_params = json_params.get( 'param_dict' )
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
33 except:
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
34 json_params = None
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
35 for line in open( filename, 'r' ):
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
36 try:
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
37 line = line.strip()
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
38 fields = line.split( '\t' )
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
39 datasource_params[ fields[0] ] = fields[1]
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
40 except:
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
41 continue
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
42 if erase_file:
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
43 open( filename, 'w' ).close() #open file for writing, then close, removes params from file
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
44 return json_params, datasource_params
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
45
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
46 def deconstruct_multi_filename( multi_filename ):
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
47 keys = ['primary', 'id', 'name', 'visible', 'file_type', 'dbkey']
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
48 return ( dict( zip( keys, multi_filename.split('_') ) ) )
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
49
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
50 def construct_multi_filename( id, name, file_type, dbkey=None):
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
51 """ Implementation of *Number of Output datasets cannot be determined until tool run* from documentation_.
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
52 .. _documentation: http://wiki.galaxyproject.org/Admin/Tools/Multiple%20Output%20Files
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
53 """
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
54 if dbkey:
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
55 filename = "%s_%s_%s_%s_%s_%s" % ( 'primary', id, name, 'visible', file_type, dbkey )
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
56 else:
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
57 filename = "%s_%s_%s_%s_%s" % ( 'primary', id, name, 'visible', file_type )
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
58
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
59 return filename
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
60
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
61 def xml_save_to_newfile_directory( xmlfile, directory, id, enhanced_handling=False, datatypes_registry=None ):
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
62 """ Open xmlfile, parse all URLs to fetch. Fetch each file, saving to ``directory``.
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
63 Save first file for last and return for ``page``.
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
64
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
65 Schema
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
66
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
67 ::
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
68
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
69 <?xml version="1.0"?>
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
70 <!DOCTYPE downloads [
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
71 <!ELEMENT downloads (download)>
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
72 <!ELEMENT download (resource,url,meta)>
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
73 <!ELEMENT resource (#PCDATA)>
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
74 <!ELEMENT url (#PCDATA)>
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
75 <!ELEMENT id (#PCDATA)>
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
76 <!ELEMENT meta (id,format,type,summary,feature,genome,technique,instrument,assay,sample,description,PMID)>
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
77 <!ELEMENT id (#PCDATA)>
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
78 <!ELEMENT format (#PCDATA)>
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
79 <!ELEMENT type (#PCDATA)>
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
80 <!ELEMENT summary (#PCDATA)>
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
81 <!ELEMENT feature (#PCDATA)>
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
82 <!ELEMENT genome (#PCDATA)>
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
83 <!ELEMENT technique (#PCDATA)>
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
84 <!ELEMENT instrument (#PCDATA)>
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
85 <!ELEMENT assay (#PCDATA)>
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
86 <!ELEMENT sample (#PCDATA)>
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
87 <!ELEMENT description (#PCDATA)>
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
88 <!ELEMENT PMID (#PCDATA)>
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
89 ]>
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
90 """
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
91 root = ElementTree.fromstring(xmlfile.read())
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
92 if root.tag != 'downloads':
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
93 stop_err( 'The remote data source application has not sent back a URL parameter in the request.' )
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
94 # traverse xml schema to find URLs, names, and dbkeys
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
95 files_to_fetch = []
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
96 complete = False
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
97
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
98 for child in root:
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
99 if (child.tag == 'download') and (complete == True):
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
100 files_to_fetch.append( ( construct_multi_filename( id, name, file_type, dbkey ), URL ) )
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
101
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
102 for sub in child:
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
103 if sub.tag == 'url':
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
104 URL = sub.text
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
105 elif sub.tag == 'meta':
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
106
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
107 for meta in sub:
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
108 if meta.tag == 'id':
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
109 name = meta.text
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
110 elif meta.tag == 'genome':
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
111 dbkey = meta.text
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
112 elif meta.tag == 'format':
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
113 file_type = meta.text
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
114 # hit the end of our schema
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
115 files_to_fetch.append( ( construct_multi_filename( id, name, file_type, dbkey ), URL ) )
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
116
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
117 if len(files_to_fetch) > 1:
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
118 for filename, URL in files_to_fetch[1:]:
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
119 try:
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
120 cur_filename = os.path.join( directory, filename )
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
121 page = urllib.urlopen( URL )
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
122 multi_dict = deconstruct_multi_filename( filename )
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
123 cur_filename, is_multi_byte = sniff.stream_to_open_named_file( page, os.open( cur_filename, os.O_WRONLY | os.O_CREAT ), cur_filename, source_encoding=get_charset_from_http_headers( page.headers ) )
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
124 if enhanced_handling:
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
125 ext = sniff.handle_uploaded_dataset_file( cur_filename, datatypes_registry, ext = multi_dict['file_type'], is_multi_byte = is_multi_byte )
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
126 except Exception, e:
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
127 stop_err( 'Unable to fetch %s:\n%s' % ( URL, e ) )
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
128
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
129 # pass page back to main
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
130 return ( files_to_fetch[0][0], urllib.urlopen( files_to_fetch[0][1] ) )
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
131
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
132 def __main__():
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
133 # Parse the command line options
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
134 usage = "Usage: ncbi_connector.py filename max_size [options]"
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
135 parser = optparse.OptionParser(usage = usage)
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
136 parser.add_option("-x", "--xml",
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
137 action="store_true", dest="xmlfile", help="filename defines external resources as xml")
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
138 parser.add_option("-i", "--id", type="string",
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
139 action="store", dest="id", help="output id")
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
140 parser.add_option("-p", "--path", type="string",
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
141 action="store", dest="newfilepath", help="new file path")
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
142
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
143 (options, args) = parser.parse_args()
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
144
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
145 filename = args[0]
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
146 try:
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
147 max_file_size = int( args[1] )
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
148 except:
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
149 max_file_size = 0
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
150
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
151 job_params, params = load_input_parameters( filename )
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
152 if job_params is None: #using an older tabular file
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
153 enhanced_handling = False
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
154 job_params = dict( param_dict = params )
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
155 job_params[ 'output_data' ] = [ dict( out_data_name = 'output',
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
156 ext = 'data',
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
157 file_name = filename,
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
158 extra_files_path = None ) ]
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
159 job_params[ 'job_config' ] = dict( GALAXY_ROOT_DIR=GALAXY_ROOT_DIR, GALAXY_DATATYPES_CONF_FILE=GALAXY_DATATYPES_CONF_FILE, TOOL_PROVIDED_JOB_METADATA_FILE = TOOL_PROVIDED_JOB_METADATA_FILE )
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
160 else:
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
161 enhanced_handling = True
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
162 json_file = open( job_params[ 'job_config' ][ 'TOOL_PROVIDED_JOB_METADATA_FILE' ], 'w' ) #specially named file for output junk to pass onto set metadata
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
163
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
164 datatypes_registry = Registry()
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
165 datatypes_registry.load_datatypes( root_dir = job_params[ 'job_config' ][ 'GALAXY_ROOT_DIR' ], config = job_params[ 'job_config' ][ 'GALAXY_DATATYPES_CONF_FILE' ] )
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
166
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
167 URL = params.get( 'URL', None ) #using exactly URL indicates that only one dataset is being downloaded
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
168 URL_method = params.get( 'URL_method', None )
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
169
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
170 # The Python support for fetching resources from the web is layered. urllib uses the httplib
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
171 # library, which in turn uses the socket library. As of Python 2.3 you can specify how long
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
172 # a socket should wait for a response before timing out. By default the socket module has no
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
173 # timeout and can hang. Currently, the socket timeout is not exposed at the httplib or urllib2
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
174 # levels. However, you can set the default timeout ( in seconds ) globally for all sockets by
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
175 # doing the following.
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
176 socket.setdefaulttimeout( 600 )
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
177
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
178 for data_dict in job_params[ 'output_data' ]:
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
179 cur_filename = data_dict.get( 'file_name', filename )
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
180 cur_URL = params.get( '%s|%s|URL' % ( GALAXY_PARAM_PREFIX, data_dict[ 'out_data_name' ] ), URL )
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
181 if not cur_URL:
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
182 open( cur_filename, 'w' ).write( "" )
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
183 stop_err( 'The remote data source application has not sent back a URL parameter in the request.' )
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
184
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
185 # The following calls to urllib.urlopen() will use the above default timeout
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
186 try:
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
187 if not URL_method or URL_method == 'get':
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
188 page = urllib.urlopen( cur_URL )
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
189 elif URL_method == 'post':
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
190 page = urllib.urlopen( cur_URL, urllib.urlencode( params ) )
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
191 except Exception, e:
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
192 stop_err( 'The remote data source application may be off line, please try again later. Error: %s' % str( e ) )
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
193 if max_file_size:
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
194 file_size = int( page.info().get( 'Content-Length', 0 ) )
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
195 if file_size > max_file_size:
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
196 stop_err( 'The size of the data (%d bytes) you have requested exceeds the maximum allowed (%d bytes) on this server.' % ( file_size, max_file_size ) )
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
197 # If xmlfile is provided, fetch files 2 through n and save to new_file_path, replace page with file 1
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
198 if options.xmlfile:
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
199 multi_filename, page = xml_save_to_newfile_directory( page, options.newfilepath, options.id, enhanced_handling, datatypes_registry )
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
200 multi_dict = deconstruct_multi_filename( multi_filename )
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
201
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
202 #do sniff stream for multi_byte
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
203 try:
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
204 cur_filename, is_multi_byte = sniff.stream_to_open_named_file( page, os.open( cur_filename, os.O_WRONLY | os.O_CREAT ), cur_filename, source_encoding=get_charset_from_http_headers( page.headers ) )
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
205 except Exception, e:
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
206 stop_err( 'Unable to fetch %s:\n%s' % ( cur_URL, e ) )
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
207
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
208 #here import checks that upload tool performs
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
209 if enhanced_handling:
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
210 try:
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
211 ext = sniff.handle_uploaded_dataset_file( filename, datatypes_registry, ext = data_dict[ 'ext' ], is_multi_byte = is_multi_byte )
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
212 except Exception, e:
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
213 stop_err( str( e ) )
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
214 info = dict( type = 'dataset',
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
215 dataset_id = data_dict[ 'dataset_id' ],
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
216 ext = ext)
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
217
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
218 json_file.write( "%s\n" % to_json_string( info ) )
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
219
58917de44665 Uploaded
matt-shirley
parents:
diff changeset
220 if __name__ == "__main__": __main__()