comparison env/lib/python3.9/site-packages/requests/adapters.py @ 0:4f3585e2f14b draft default tip

"planemo upload commit 60cee0fc7c0cda8592644e1aad72851dec82c959"
author shellac
date Mon, 22 Mar 2021 18:12:50 +0000
parents
children
comparison
equal deleted inserted replaced
-1:000000000000 0:4f3585e2f14b
1 # -*- coding: utf-8 -*-
2
3 """
4 requests.adapters
5 ~~~~~~~~~~~~~~~~~
6
7 This module contains the transport adapters that Requests uses to define
8 and maintain connections.
9 """
10
11 import os.path
12 import socket
13
14 from urllib3.poolmanager import PoolManager, proxy_from_url
15 from urllib3.response import HTTPResponse
16 from urllib3.util import parse_url
17 from urllib3.util import Timeout as TimeoutSauce
18 from urllib3.util.retry import Retry
19 from urllib3.exceptions import ClosedPoolError
20 from urllib3.exceptions import ConnectTimeoutError
21 from urllib3.exceptions import HTTPError as _HTTPError
22 from urllib3.exceptions import MaxRetryError
23 from urllib3.exceptions import NewConnectionError
24 from urllib3.exceptions import ProxyError as _ProxyError
25 from urllib3.exceptions import ProtocolError
26 from urllib3.exceptions import ReadTimeoutError
27 from urllib3.exceptions import SSLError as _SSLError
28 from urllib3.exceptions import ResponseError
29 from urllib3.exceptions import LocationValueError
30
31 from .models import Response
32 from .compat import urlparse, basestring
33 from .utils import (DEFAULT_CA_BUNDLE_PATH, extract_zipped_paths,
34 get_encoding_from_headers, prepend_scheme_if_needed,
35 get_auth_from_url, urldefragauth, select_proxy)
36 from .structures import CaseInsensitiveDict
37 from .cookies import extract_cookies_to_jar
38 from .exceptions import (ConnectionError, ConnectTimeout, ReadTimeout, SSLError,
39 ProxyError, RetryError, InvalidSchema, InvalidProxyURL,
40 InvalidURL)
41 from .auth import _basic_auth_str
42
43 try:
44 from urllib3.contrib.socks import SOCKSProxyManager
45 except ImportError:
46 def SOCKSProxyManager(*args, **kwargs):
47 raise InvalidSchema("Missing dependencies for SOCKS support.")
48
49 DEFAULT_POOLBLOCK = False
50 DEFAULT_POOLSIZE = 10
51 DEFAULT_RETRIES = 0
52 DEFAULT_POOL_TIMEOUT = None
53
54
55 class BaseAdapter(object):
56 """The Base Transport Adapter"""
57
58 def __init__(self):
59 super(BaseAdapter, self).__init__()
60
61 def send(self, request, stream=False, timeout=None, verify=True,
62 cert=None, proxies=None):
63 """Sends PreparedRequest object. Returns Response object.
64
65 :param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
66 :param stream: (optional) Whether to stream the request content.
67 :param timeout: (optional) How long to wait for the server to send
68 data before giving up, as a float, or a :ref:`(connect timeout,
69 read timeout) <timeouts>` tuple.
70 :type timeout: float or tuple
71 :param verify: (optional) Either a boolean, in which case it controls whether we verify
72 the server's TLS certificate, or a string, in which case it must be a path
73 to a CA bundle to use
74 :param cert: (optional) Any user-provided SSL certificate to be trusted.
75 :param proxies: (optional) The proxies dictionary to apply to the request.
76 """
77 raise NotImplementedError
78
79 def close(self):
80 """Cleans up adapter specific items."""
81 raise NotImplementedError
82
83
84 class HTTPAdapter(BaseAdapter):
85 """The built-in HTTP Adapter for urllib3.
86
87 Provides a general-case interface for Requests sessions to contact HTTP and
88 HTTPS urls by implementing the Transport Adapter interface. This class will
89 usually be created by the :class:`Session <Session>` class under the
90 covers.
91
92 :param pool_connections: The number of urllib3 connection pools to cache.
93 :param pool_maxsize: The maximum number of connections to save in the pool.
94 :param max_retries: The maximum number of retries each connection
95 should attempt. Note, this applies only to failed DNS lookups, socket
96 connections and connection timeouts, never to requests where data has
97 made it to the server. By default, Requests does not retry failed
98 connections. If you need granular control over the conditions under
99 which we retry a request, import urllib3's ``Retry`` class and pass
100 that instead.
101 :param pool_block: Whether the connection pool should block for connections.
102
103 Usage::
104
105 >>> import requests
106 >>> s = requests.Session()
107 >>> a = requests.adapters.HTTPAdapter(max_retries=3)
108 >>> s.mount('http://', a)
109 """
110 __attrs__ = ['max_retries', 'config', '_pool_connections', '_pool_maxsize',
111 '_pool_block']
112
113 def __init__(self, pool_connections=DEFAULT_POOLSIZE,
114 pool_maxsize=DEFAULT_POOLSIZE, max_retries=DEFAULT_RETRIES,
115 pool_block=DEFAULT_POOLBLOCK):
116 if max_retries == DEFAULT_RETRIES:
117 self.max_retries = Retry(0, read=False)
118 else:
119 self.max_retries = Retry.from_int(max_retries)
120 self.config = {}
121 self.proxy_manager = {}
122
123 super(HTTPAdapter, self).__init__()
124
125 self._pool_connections = pool_connections
126 self._pool_maxsize = pool_maxsize
127 self._pool_block = pool_block
128
129 self.init_poolmanager(pool_connections, pool_maxsize, block=pool_block)
130
131 def __getstate__(self):
132 return {attr: getattr(self, attr, None) for attr in self.__attrs__}
133
134 def __setstate__(self, state):
135 # Can't handle by adding 'proxy_manager' to self.__attrs__ because
136 # self.poolmanager uses a lambda function, which isn't pickleable.
137 self.proxy_manager = {}
138 self.config = {}
139
140 for attr, value in state.items():
141 setattr(self, attr, value)
142
143 self.init_poolmanager(self._pool_connections, self._pool_maxsize,
144 block=self._pool_block)
145
146 def init_poolmanager(self, connections, maxsize, block=DEFAULT_POOLBLOCK, **pool_kwargs):
147 """Initializes a urllib3 PoolManager.
148
149 This method should not be called from user code, and is only
150 exposed for use when subclassing the
151 :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
152
153 :param connections: The number of urllib3 connection pools to cache.
154 :param maxsize: The maximum number of connections to save in the pool.
155 :param block: Block when no free connections are available.
156 :param pool_kwargs: Extra keyword arguments used to initialize the Pool Manager.
157 """
158 # save these values for pickling
159 self._pool_connections = connections
160 self._pool_maxsize = maxsize
161 self._pool_block = block
162
163 self.poolmanager = PoolManager(num_pools=connections, maxsize=maxsize,
164 block=block, strict=True, **pool_kwargs)
165
166 def proxy_manager_for(self, proxy, **proxy_kwargs):
167 """Return urllib3 ProxyManager for the given proxy.
168
169 This method should not be called from user code, and is only
170 exposed for use when subclassing the
171 :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
172
173 :param proxy: The proxy to return a urllib3 ProxyManager for.
174 :param proxy_kwargs: Extra keyword arguments used to configure the Proxy Manager.
175 :returns: ProxyManager
176 :rtype: urllib3.ProxyManager
177 """
178 if proxy in self.proxy_manager:
179 manager = self.proxy_manager[proxy]
180 elif proxy.lower().startswith('socks'):
181 username, password = get_auth_from_url(proxy)
182 manager = self.proxy_manager[proxy] = SOCKSProxyManager(
183 proxy,
184 username=username,
185 password=password,
186 num_pools=self._pool_connections,
187 maxsize=self._pool_maxsize,
188 block=self._pool_block,
189 **proxy_kwargs
190 )
191 else:
192 proxy_headers = self.proxy_headers(proxy)
193 manager = self.proxy_manager[proxy] = proxy_from_url(
194 proxy,
195 proxy_headers=proxy_headers,
196 num_pools=self._pool_connections,
197 maxsize=self._pool_maxsize,
198 block=self._pool_block,
199 **proxy_kwargs)
200
201 return manager
202
203 def cert_verify(self, conn, url, verify, cert):
204 """Verify a SSL certificate. This method should not be called from user
205 code, and is only exposed for use when subclassing the
206 :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
207
208 :param conn: The urllib3 connection object associated with the cert.
209 :param url: The requested URL.
210 :param verify: Either a boolean, in which case it controls whether we verify
211 the server's TLS certificate, or a string, in which case it must be a path
212 to a CA bundle to use
213 :param cert: The SSL certificate to verify.
214 """
215 if url.lower().startswith('https') and verify:
216
217 cert_loc = None
218
219 # Allow self-specified cert location.
220 if verify is not True:
221 cert_loc = verify
222
223 if not cert_loc:
224 cert_loc = extract_zipped_paths(DEFAULT_CA_BUNDLE_PATH)
225
226 if not cert_loc or not os.path.exists(cert_loc):
227 raise IOError("Could not find a suitable TLS CA certificate bundle, "
228 "invalid path: {}".format(cert_loc))
229
230 conn.cert_reqs = 'CERT_REQUIRED'
231
232 if not os.path.isdir(cert_loc):
233 conn.ca_certs = cert_loc
234 else:
235 conn.ca_cert_dir = cert_loc
236 else:
237 conn.cert_reqs = 'CERT_NONE'
238 conn.ca_certs = None
239 conn.ca_cert_dir = None
240
241 if cert:
242 if not isinstance(cert, basestring):
243 conn.cert_file = cert[0]
244 conn.key_file = cert[1]
245 else:
246 conn.cert_file = cert
247 conn.key_file = None
248 if conn.cert_file and not os.path.exists(conn.cert_file):
249 raise IOError("Could not find the TLS certificate file, "
250 "invalid path: {}".format(conn.cert_file))
251 if conn.key_file and not os.path.exists(conn.key_file):
252 raise IOError("Could not find the TLS key file, "
253 "invalid path: {}".format(conn.key_file))
254
255 def build_response(self, req, resp):
256 """Builds a :class:`Response <requests.Response>` object from a urllib3
257 response. This should not be called from user code, and is only exposed
258 for use when subclassing the
259 :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`
260
261 :param req: The :class:`PreparedRequest <PreparedRequest>` used to generate the response.
262 :param resp: The urllib3 response object.
263 :rtype: requests.Response
264 """
265 response = Response()
266
267 # Fallback to None if there's no status_code, for whatever reason.
268 response.status_code = getattr(resp, 'status', None)
269
270 # Make headers case-insensitive.
271 response.headers = CaseInsensitiveDict(getattr(resp, 'headers', {}))
272
273 # Set encoding.
274 response.encoding = get_encoding_from_headers(response.headers)
275 response.raw = resp
276 response.reason = response.raw.reason
277
278 if isinstance(req.url, bytes):
279 response.url = req.url.decode('utf-8')
280 else:
281 response.url = req.url
282
283 # Add new cookies from the server.
284 extract_cookies_to_jar(response.cookies, req, resp)
285
286 # Give the Response some context.
287 response.request = req
288 response.connection = self
289
290 return response
291
292 def get_connection(self, url, proxies=None):
293 """Returns a urllib3 connection for the given URL. This should not be
294 called from user code, and is only exposed for use when subclassing the
295 :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
296
297 :param url: The URL to connect to.
298 :param proxies: (optional) A Requests-style dictionary of proxies used on this request.
299 :rtype: urllib3.ConnectionPool
300 """
301 proxy = select_proxy(url, proxies)
302
303 if proxy:
304 proxy = prepend_scheme_if_needed(proxy, 'http')
305 proxy_url = parse_url(proxy)
306 if not proxy_url.host:
307 raise InvalidProxyURL("Please check proxy URL. It is malformed"
308 " and could be missing the host.")
309 proxy_manager = self.proxy_manager_for(proxy)
310 conn = proxy_manager.connection_from_url(url)
311 else:
312 # Only scheme should be lower case
313 parsed = urlparse(url)
314 url = parsed.geturl()
315 conn = self.poolmanager.connection_from_url(url)
316
317 return conn
318
319 def close(self):
320 """Disposes of any internal state.
321
322 Currently, this closes the PoolManager and any active ProxyManager,
323 which closes any pooled connections.
324 """
325 self.poolmanager.clear()
326 for proxy in self.proxy_manager.values():
327 proxy.clear()
328
329 def request_url(self, request, proxies):
330 """Obtain the url to use when making the final request.
331
332 If the message is being sent through a HTTP proxy, the full URL has to
333 be used. Otherwise, we should only use the path portion of the URL.
334
335 This should not be called from user code, and is only exposed for use
336 when subclassing the
337 :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
338
339 :param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
340 :param proxies: A dictionary of schemes or schemes and hosts to proxy URLs.
341 :rtype: str
342 """
343 proxy = select_proxy(request.url, proxies)
344 scheme = urlparse(request.url).scheme
345
346 is_proxied_http_request = (proxy and scheme != 'https')
347 using_socks_proxy = False
348 if proxy:
349 proxy_scheme = urlparse(proxy).scheme.lower()
350 using_socks_proxy = proxy_scheme.startswith('socks')
351
352 url = request.path_url
353 if is_proxied_http_request and not using_socks_proxy:
354 url = urldefragauth(request.url)
355
356 return url
357
358 def add_headers(self, request, **kwargs):
359 """Add any headers needed by the connection. As of v2.0 this does
360 nothing by default, but is left for overriding by users that subclass
361 the :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
362
363 This should not be called from user code, and is only exposed for use
364 when subclassing the
365 :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
366
367 :param request: The :class:`PreparedRequest <PreparedRequest>` to add headers to.
368 :param kwargs: The keyword arguments from the call to send().
369 """
370 pass
371
372 def proxy_headers(self, proxy):
373 """Returns a dictionary of the headers to add to any request sent
374 through a proxy. This works with urllib3 magic to ensure that they are
375 correctly sent to the proxy, rather than in a tunnelled request if
376 CONNECT is being used.
377
378 This should not be called from user code, and is only exposed for use
379 when subclassing the
380 :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
381
382 :param proxy: The url of the proxy being used for this request.
383 :rtype: dict
384 """
385 headers = {}
386 username, password = get_auth_from_url(proxy)
387
388 if username:
389 headers['Proxy-Authorization'] = _basic_auth_str(username,
390 password)
391
392 return headers
393
394 def send(self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None):
395 """Sends PreparedRequest object. Returns Response object.
396
397 :param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
398 :param stream: (optional) Whether to stream the request content.
399 :param timeout: (optional) How long to wait for the server to send
400 data before giving up, as a float, or a :ref:`(connect timeout,
401 read timeout) <timeouts>` tuple.
402 :type timeout: float or tuple or urllib3 Timeout object
403 :param verify: (optional) Either a boolean, in which case it controls whether
404 we verify the server's TLS certificate, or a string, in which case it
405 must be a path to a CA bundle to use
406 :param cert: (optional) Any user-provided SSL certificate to be trusted.
407 :param proxies: (optional) The proxies dictionary to apply to the request.
408 :rtype: requests.Response
409 """
410
411 try:
412 conn = self.get_connection(request.url, proxies)
413 except LocationValueError as e:
414 raise InvalidURL(e, request=request)
415
416 self.cert_verify(conn, request.url, verify, cert)
417 url = self.request_url(request, proxies)
418 self.add_headers(request, stream=stream, timeout=timeout, verify=verify, cert=cert, proxies=proxies)
419
420 chunked = not (request.body is None or 'Content-Length' in request.headers)
421
422 if isinstance(timeout, tuple):
423 try:
424 connect, read = timeout
425 timeout = TimeoutSauce(connect=connect, read=read)
426 except ValueError as e:
427 # this may raise a string formatting error.
428 err = ("Invalid timeout {}. Pass a (connect, read) "
429 "timeout tuple, or a single float to set "
430 "both timeouts to the same value".format(timeout))
431 raise ValueError(err)
432 elif isinstance(timeout, TimeoutSauce):
433 pass
434 else:
435 timeout = TimeoutSauce(connect=timeout, read=timeout)
436
437 try:
438 if not chunked:
439 resp = conn.urlopen(
440 method=request.method,
441 url=url,
442 body=request.body,
443 headers=request.headers,
444 redirect=False,
445 assert_same_host=False,
446 preload_content=False,
447 decode_content=False,
448 retries=self.max_retries,
449 timeout=timeout
450 )
451
452 # Send the request.
453 else:
454 if hasattr(conn, 'proxy_pool'):
455 conn = conn.proxy_pool
456
457 low_conn = conn._get_conn(timeout=DEFAULT_POOL_TIMEOUT)
458
459 try:
460 low_conn.putrequest(request.method,
461 url,
462 skip_accept_encoding=True)
463
464 for header, value in request.headers.items():
465 low_conn.putheader(header, value)
466
467 low_conn.endheaders()
468
469 for i in request.body:
470 low_conn.send(hex(len(i))[2:].encode('utf-8'))
471 low_conn.send(b'\r\n')
472 low_conn.send(i)
473 low_conn.send(b'\r\n')
474 low_conn.send(b'0\r\n\r\n')
475
476 # Receive the response from the server
477 try:
478 # For Python 2.7, use buffering of HTTP responses
479 r = low_conn.getresponse(buffering=True)
480 except TypeError:
481 # For compatibility with Python 3.3+
482 r = low_conn.getresponse()
483
484 resp = HTTPResponse.from_httplib(
485 r,
486 pool=conn,
487 connection=low_conn,
488 preload_content=False,
489 decode_content=False
490 )
491 except:
492 # If we hit any problems here, clean up the connection.
493 # Then, reraise so that we can handle the actual exception.
494 low_conn.close()
495 raise
496
497 except (ProtocolError, socket.error) as err:
498 raise ConnectionError(err, request=request)
499
500 except MaxRetryError as e:
501 if isinstance(e.reason, ConnectTimeoutError):
502 # TODO: Remove this in 3.0.0: see #2811
503 if not isinstance(e.reason, NewConnectionError):
504 raise ConnectTimeout(e, request=request)
505
506 if isinstance(e.reason, ResponseError):
507 raise RetryError(e, request=request)
508
509 if isinstance(e.reason, _ProxyError):
510 raise ProxyError(e, request=request)
511
512 if isinstance(e.reason, _SSLError):
513 # This branch is for urllib3 v1.22 and later.
514 raise SSLError(e, request=request)
515
516 raise ConnectionError(e, request=request)
517
518 except ClosedPoolError as e:
519 raise ConnectionError(e, request=request)
520
521 except _ProxyError as e:
522 raise ProxyError(e)
523
524 except (_SSLError, _HTTPError) as e:
525 if isinstance(e, _SSLError):
526 # This branch is for urllib3 versions earlier than v1.22
527 raise SSLError(e, request=request)
528 elif isinstance(e, ReadTimeoutError):
529 raise ReadTimeout(e, request=request)
530 else:
531 raise
532
533 return self.build_response(request, resp)