Mercurial > repos > shellac > sam_consensus_v3
comparison env/lib/python3.9/site-packages/requests/models.py @ 0:4f3585e2f14b draft default tip
"planemo upload commit 60cee0fc7c0cda8592644e1aad72851dec82c959"
author | shellac |
---|---|
date | Mon, 22 Mar 2021 18:12:50 +0000 |
parents | |
children |
comparison
equal
deleted
inserted
replaced
-1:000000000000 | 0:4f3585e2f14b |
---|---|
1 # -*- coding: utf-8 -*- | |
2 | |
3 """ | |
4 requests.models | |
5 ~~~~~~~~~~~~~~~ | |
6 | |
7 This module contains the primary objects that power Requests. | |
8 """ | |
9 | |
10 import datetime | |
11 import sys | |
12 | |
13 # Import encoding now, to avoid implicit import later. | |
14 # Implicit import within threads may cause LookupError when standard library is in a ZIP, | |
15 # such as in Embedded Python. See https://github.com/psf/requests/issues/3578. | |
16 import encodings.idna | |
17 | |
18 from urllib3.fields import RequestField | |
19 from urllib3.filepost import encode_multipart_formdata | |
20 from urllib3.util import parse_url | |
21 from urllib3.exceptions import ( | |
22 DecodeError, ReadTimeoutError, ProtocolError, LocationParseError) | |
23 | |
24 from io import UnsupportedOperation | |
25 from .hooks import default_hooks | |
26 from .structures import CaseInsensitiveDict | |
27 | |
28 from .auth import HTTPBasicAuth | |
29 from .cookies import cookiejar_from_dict, get_cookie_header, _copy_cookie_jar | |
30 from .exceptions import ( | |
31 HTTPError, MissingSchema, InvalidURL, ChunkedEncodingError, | |
32 ContentDecodingError, ConnectionError, StreamConsumedError) | |
33 from ._internal_utils import to_native_string, unicode_is_ascii | |
34 from .utils import ( | |
35 guess_filename, get_auth_from_url, requote_uri, | |
36 stream_decode_response_unicode, to_key_val_list, parse_header_links, | |
37 iter_slices, guess_json_utf, super_len, check_header_validity) | |
38 from .compat import ( | |
39 Callable, Mapping, | |
40 cookielib, urlunparse, urlsplit, urlencode, str, bytes, | |
41 is_py2, chardet, builtin_str, basestring) | |
42 from .compat import json as complexjson | |
43 from .status_codes import codes | |
44 | |
45 #: The set of HTTP status codes that indicate an automatically | |
46 #: processable redirect. | |
47 REDIRECT_STATI = ( | |
48 codes.moved, # 301 | |
49 codes.found, # 302 | |
50 codes.other, # 303 | |
51 codes.temporary_redirect, # 307 | |
52 codes.permanent_redirect, # 308 | |
53 ) | |
54 | |
55 DEFAULT_REDIRECT_LIMIT = 30 | |
56 CONTENT_CHUNK_SIZE = 10 * 1024 | |
57 ITER_CHUNK_SIZE = 512 | |
58 | |
59 | |
60 class RequestEncodingMixin(object): | |
61 @property | |
62 def path_url(self): | |
63 """Build the path URL to use.""" | |
64 | |
65 url = [] | |
66 | |
67 p = urlsplit(self.url) | |
68 | |
69 path = p.path | |
70 if not path: | |
71 path = '/' | |
72 | |
73 url.append(path) | |
74 | |
75 query = p.query | |
76 if query: | |
77 url.append('?') | |
78 url.append(query) | |
79 | |
80 return ''.join(url) | |
81 | |
82 @staticmethod | |
83 def _encode_params(data): | |
84 """Encode parameters in a piece of data. | |
85 | |
86 Will successfully encode parameters when passed as a dict or a list of | |
87 2-tuples. Order is retained if data is a list of 2-tuples but arbitrary | |
88 if parameters are supplied as a dict. | |
89 """ | |
90 | |
91 if isinstance(data, (str, bytes)): | |
92 return data | |
93 elif hasattr(data, 'read'): | |
94 return data | |
95 elif hasattr(data, '__iter__'): | |
96 result = [] | |
97 for k, vs in to_key_val_list(data): | |
98 if isinstance(vs, basestring) or not hasattr(vs, '__iter__'): | |
99 vs = [vs] | |
100 for v in vs: | |
101 if v is not None: | |
102 result.append( | |
103 (k.encode('utf-8') if isinstance(k, str) else k, | |
104 v.encode('utf-8') if isinstance(v, str) else v)) | |
105 return urlencode(result, doseq=True) | |
106 else: | |
107 return data | |
108 | |
109 @staticmethod | |
110 def _encode_files(files, data): | |
111 """Build the body for a multipart/form-data request. | |
112 | |
113 Will successfully encode files when passed as a dict or a list of | |
114 tuples. Order is retained if data is a list of tuples but arbitrary | |
115 if parameters are supplied as a dict. | |
116 The tuples may be 2-tuples (filename, fileobj), 3-tuples (filename, fileobj, contentype) | |
117 or 4-tuples (filename, fileobj, contentype, custom_headers). | |
118 """ | |
119 if (not files): | |
120 raise ValueError("Files must be provided.") | |
121 elif isinstance(data, basestring): | |
122 raise ValueError("Data must not be a string.") | |
123 | |
124 new_fields = [] | |
125 fields = to_key_val_list(data or {}) | |
126 files = to_key_val_list(files or {}) | |
127 | |
128 for field, val in fields: | |
129 if isinstance(val, basestring) or not hasattr(val, '__iter__'): | |
130 val = [val] | |
131 for v in val: | |
132 if v is not None: | |
133 # Don't call str() on bytestrings: in Py3 it all goes wrong. | |
134 if not isinstance(v, bytes): | |
135 v = str(v) | |
136 | |
137 new_fields.append( | |
138 (field.decode('utf-8') if isinstance(field, bytes) else field, | |
139 v.encode('utf-8') if isinstance(v, str) else v)) | |
140 | |
141 for (k, v) in files: | |
142 # support for explicit filename | |
143 ft = None | |
144 fh = None | |
145 if isinstance(v, (tuple, list)): | |
146 if len(v) == 2: | |
147 fn, fp = v | |
148 elif len(v) == 3: | |
149 fn, fp, ft = v | |
150 else: | |
151 fn, fp, ft, fh = v | |
152 else: | |
153 fn = guess_filename(v) or k | |
154 fp = v | |
155 | |
156 if isinstance(fp, (str, bytes, bytearray)): | |
157 fdata = fp | |
158 elif hasattr(fp, 'read'): | |
159 fdata = fp.read() | |
160 elif fp is None: | |
161 continue | |
162 else: | |
163 fdata = fp | |
164 | |
165 rf = RequestField(name=k, data=fdata, filename=fn, headers=fh) | |
166 rf.make_multipart(content_type=ft) | |
167 new_fields.append(rf) | |
168 | |
169 body, content_type = encode_multipart_formdata(new_fields) | |
170 | |
171 return body, content_type | |
172 | |
173 | |
174 class RequestHooksMixin(object): | |
175 def register_hook(self, event, hook): | |
176 """Properly register a hook.""" | |
177 | |
178 if event not in self.hooks: | |
179 raise ValueError('Unsupported event specified, with event name "%s"' % (event)) | |
180 | |
181 if isinstance(hook, Callable): | |
182 self.hooks[event].append(hook) | |
183 elif hasattr(hook, '__iter__'): | |
184 self.hooks[event].extend(h for h in hook if isinstance(h, Callable)) | |
185 | |
186 def deregister_hook(self, event, hook): | |
187 """Deregister a previously registered hook. | |
188 Returns True if the hook existed, False if not. | |
189 """ | |
190 | |
191 try: | |
192 self.hooks[event].remove(hook) | |
193 return True | |
194 except ValueError: | |
195 return False | |
196 | |
197 | |
198 class Request(RequestHooksMixin): | |
199 """A user-created :class:`Request <Request>` object. | |
200 | |
201 Used to prepare a :class:`PreparedRequest <PreparedRequest>`, which is sent to the server. | |
202 | |
203 :param method: HTTP method to use. | |
204 :param url: URL to send. | |
205 :param headers: dictionary of headers to send. | |
206 :param files: dictionary of {filename: fileobject} files to multipart upload. | |
207 :param data: the body to attach to the request. If a dictionary or | |
208 list of tuples ``[(key, value)]`` is provided, form-encoding will | |
209 take place. | |
210 :param json: json for the body to attach to the request (if files or data is not specified). | |
211 :param params: URL parameters to append to the URL. If a dictionary or | |
212 list of tuples ``[(key, value)]`` is provided, form-encoding will | |
213 take place. | |
214 :param auth: Auth handler or (user, pass) tuple. | |
215 :param cookies: dictionary or CookieJar of cookies to attach to this request. | |
216 :param hooks: dictionary of callback hooks, for internal usage. | |
217 | |
218 Usage:: | |
219 | |
220 >>> import requests | |
221 >>> req = requests.Request('GET', 'https://httpbin.org/get') | |
222 >>> req.prepare() | |
223 <PreparedRequest [GET]> | |
224 """ | |
225 | |
226 def __init__(self, | |
227 method=None, url=None, headers=None, files=None, data=None, | |
228 params=None, auth=None, cookies=None, hooks=None, json=None): | |
229 | |
230 # Default empty dicts for dict params. | |
231 data = [] if data is None else data | |
232 files = [] if files is None else files | |
233 headers = {} if headers is None else headers | |
234 params = {} if params is None else params | |
235 hooks = {} if hooks is None else hooks | |
236 | |
237 self.hooks = default_hooks() | |
238 for (k, v) in list(hooks.items()): | |
239 self.register_hook(event=k, hook=v) | |
240 | |
241 self.method = method | |
242 self.url = url | |
243 self.headers = headers | |
244 self.files = files | |
245 self.data = data | |
246 self.json = json | |
247 self.params = params | |
248 self.auth = auth | |
249 self.cookies = cookies | |
250 | |
251 def __repr__(self): | |
252 return '<Request [%s]>' % (self.method) | |
253 | |
254 def prepare(self): | |
255 """Constructs a :class:`PreparedRequest <PreparedRequest>` for transmission and returns it.""" | |
256 p = PreparedRequest() | |
257 p.prepare( | |
258 method=self.method, | |
259 url=self.url, | |
260 headers=self.headers, | |
261 files=self.files, | |
262 data=self.data, | |
263 json=self.json, | |
264 params=self.params, | |
265 auth=self.auth, | |
266 cookies=self.cookies, | |
267 hooks=self.hooks, | |
268 ) | |
269 return p | |
270 | |
271 | |
272 class PreparedRequest(RequestEncodingMixin, RequestHooksMixin): | |
273 """The fully mutable :class:`PreparedRequest <PreparedRequest>` object, | |
274 containing the exact bytes that will be sent to the server. | |
275 | |
276 Instances are generated from a :class:`Request <Request>` object, and | |
277 should not be instantiated manually; doing so may produce undesirable | |
278 effects. | |
279 | |
280 Usage:: | |
281 | |
282 >>> import requests | |
283 >>> req = requests.Request('GET', 'https://httpbin.org/get') | |
284 >>> r = req.prepare() | |
285 >>> r | |
286 <PreparedRequest [GET]> | |
287 | |
288 >>> s = requests.Session() | |
289 >>> s.send(r) | |
290 <Response [200]> | |
291 """ | |
292 | |
293 def __init__(self): | |
294 #: HTTP verb to send to the server. | |
295 self.method = None | |
296 #: HTTP URL to send the request to. | |
297 self.url = None | |
298 #: dictionary of HTTP headers. | |
299 self.headers = None | |
300 # The `CookieJar` used to create the Cookie header will be stored here | |
301 # after prepare_cookies is called | |
302 self._cookies = None | |
303 #: request body to send to the server. | |
304 self.body = None | |
305 #: dictionary of callback hooks, for internal usage. | |
306 self.hooks = default_hooks() | |
307 #: integer denoting starting position of a readable file-like body. | |
308 self._body_position = None | |
309 | |
310 def prepare(self, | |
311 method=None, url=None, headers=None, files=None, data=None, | |
312 params=None, auth=None, cookies=None, hooks=None, json=None): | |
313 """Prepares the entire request with the given parameters.""" | |
314 | |
315 self.prepare_method(method) | |
316 self.prepare_url(url, params) | |
317 self.prepare_headers(headers) | |
318 self.prepare_cookies(cookies) | |
319 self.prepare_body(data, files, json) | |
320 self.prepare_auth(auth, url) | |
321 | |
322 # Note that prepare_auth must be last to enable authentication schemes | |
323 # such as OAuth to work on a fully prepared request. | |
324 | |
325 # This MUST go after prepare_auth. Authenticators could add a hook | |
326 self.prepare_hooks(hooks) | |
327 | |
328 def __repr__(self): | |
329 return '<PreparedRequest [%s]>' % (self.method) | |
330 | |
331 def copy(self): | |
332 p = PreparedRequest() | |
333 p.method = self.method | |
334 p.url = self.url | |
335 p.headers = self.headers.copy() if self.headers is not None else None | |
336 p._cookies = _copy_cookie_jar(self._cookies) | |
337 p.body = self.body | |
338 p.hooks = self.hooks | |
339 p._body_position = self._body_position | |
340 return p | |
341 | |
342 def prepare_method(self, method): | |
343 """Prepares the given HTTP method.""" | |
344 self.method = method | |
345 if self.method is not None: | |
346 self.method = to_native_string(self.method.upper()) | |
347 | |
348 @staticmethod | |
349 def _get_idna_encoded_host(host): | |
350 import idna | |
351 | |
352 try: | |
353 host = idna.encode(host, uts46=True).decode('utf-8') | |
354 except idna.IDNAError: | |
355 raise UnicodeError | |
356 return host | |
357 | |
358 def prepare_url(self, url, params): | |
359 """Prepares the given HTTP URL.""" | |
360 #: Accept objects that have string representations. | |
361 #: We're unable to blindly call unicode/str functions | |
362 #: as this will include the bytestring indicator (b'') | |
363 #: on python 3.x. | |
364 #: https://github.com/psf/requests/pull/2238 | |
365 if isinstance(url, bytes): | |
366 url = url.decode('utf8') | |
367 else: | |
368 url = unicode(url) if is_py2 else str(url) | |
369 | |
370 # Remove leading whitespaces from url | |
371 url = url.lstrip() | |
372 | |
373 # Don't do any URL preparation for non-HTTP schemes like `mailto`, | |
374 # `data` etc to work around exceptions from `url_parse`, which | |
375 # handles RFC 3986 only. | |
376 if ':' in url and not url.lower().startswith('http'): | |
377 self.url = url | |
378 return | |
379 | |
380 # Support for unicode domain names and paths. | |
381 try: | |
382 scheme, auth, host, port, path, query, fragment = parse_url(url) | |
383 except LocationParseError as e: | |
384 raise InvalidURL(*e.args) | |
385 | |
386 if not scheme: | |
387 error = ("Invalid URL {0!r}: No schema supplied. Perhaps you meant http://{0}?") | |
388 error = error.format(to_native_string(url, 'utf8')) | |
389 | |
390 raise MissingSchema(error) | |
391 | |
392 if not host: | |
393 raise InvalidURL("Invalid URL %r: No host supplied" % url) | |
394 | |
395 # In general, we want to try IDNA encoding the hostname if the string contains | |
396 # non-ASCII characters. This allows users to automatically get the correct IDNA | |
397 # behaviour. For strings containing only ASCII characters, we need to also verify | |
398 # it doesn't start with a wildcard (*), before allowing the unencoded hostname. | |
399 if not unicode_is_ascii(host): | |
400 try: | |
401 host = self._get_idna_encoded_host(host) | |
402 except UnicodeError: | |
403 raise InvalidURL('URL has an invalid label.') | |
404 elif host.startswith(u'*'): | |
405 raise InvalidURL('URL has an invalid label.') | |
406 | |
407 # Carefully reconstruct the network location | |
408 netloc = auth or '' | |
409 if netloc: | |
410 netloc += '@' | |
411 netloc += host | |
412 if port: | |
413 netloc += ':' + str(port) | |
414 | |
415 # Bare domains aren't valid URLs. | |
416 if not path: | |
417 path = '/' | |
418 | |
419 if is_py2: | |
420 if isinstance(scheme, str): | |
421 scheme = scheme.encode('utf-8') | |
422 if isinstance(netloc, str): | |
423 netloc = netloc.encode('utf-8') | |
424 if isinstance(path, str): | |
425 path = path.encode('utf-8') | |
426 if isinstance(query, str): | |
427 query = query.encode('utf-8') | |
428 if isinstance(fragment, str): | |
429 fragment = fragment.encode('utf-8') | |
430 | |
431 if isinstance(params, (str, bytes)): | |
432 params = to_native_string(params) | |
433 | |
434 enc_params = self._encode_params(params) | |
435 if enc_params: | |
436 if query: | |
437 query = '%s&%s' % (query, enc_params) | |
438 else: | |
439 query = enc_params | |
440 | |
441 url = requote_uri(urlunparse([scheme, netloc, path, None, query, fragment])) | |
442 self.url = url | |
443 | |
444 def prepare_headers(self, headers): | |
445 """Prepares the given HTTP headers.""" | |
446 | |
447 self.headers = CaseInsensitiveDict() | |
448 if headers: | |
449 for header in headers.items(): | |
450 # Raise exception on invalid header value. | |
451 check_header_validity(header) | |
452 name, value = header | |
453 self.headers[to_native_string(name)] = value | |
454 | |
455 def prepare_body(self, data, files, json=None): | |
456 """Prepares the given HTTP body data.""" | |
457 | |
458 # Check if file, fo, generator, iterator. | |
459 # If not, run through normal process. | |
460 | |
461 # Nottin' on you. | |
462 body = None | |
463 content_type = None | |
464 | |
465 if not data and json is not None: | |
466 # urllib3 requires a bytes-like body. Python 2's json.dumps | |
467 # provides this natively, but Python 3 gives a Unicode string. | |
468 content_type = 'application/json' | |
469 body = complexjson.dumps(json) | |
470 if not isinstance(body, bytes): | |
471 body = body.encode('utf-8') | |
472 | |
473 is_stream = all([ | |
474 hasattr(data, '__iter__'), | |
475 not isinstance(data, (basestring, list, tuple, Mapping)) | |
476 ]) | |
477 | |
478 if is_stream: | |
479 try: | |
480 length = super_len(data) | |
481 except (TypeError, AttributeError, UnsupportedOperation): | |
482 length = None | |
483 | |
484 body = data | |
485 | |
486 if getattr(body, 'tell', None) is not None: | |
487 # Record the current file position before reading. | |
488 # This will allow us to rewind a file in the event | |
489 # of a redirect. | |
490 try: | |
491 self._body_position = body.tell() | |
492 except (IOError, OSError): | |
493 # This differentiates from None, allowing us to catch | |
494 # a failed `tell()` later when trying to rewind the body | |
495 self._body_position = object() | |
496 | |
497 if files: | |
498 raise NotImplementedError('Streamed bodies and files are mutually exclusive.') | |
499 | |
500 if length: | |
501 self.headers['Content-Length'] = builtin_str(length) | |
502 else: | |
503 self.headers['Transfer-Encoding'] = 'chunked' | |
504 else: | |
505 # Multi-part file uploads. | |
506 if files: | |
507 (body, content_type) = self._encode_files(files, data) | |
508 else: | |
509 if data: | |
510 body = self._encode_params(data) | |
511 if isinstance(data, basestring) or hasattr(data, 'read'): | |
512 content_type = None | |
513 else: | |
514 content_type = 'application/x-www-form-urlencoded' | |
515 | |
516 self.prepare_content_length(body) | |
517 | |
518 # Add content-type if it wasn't explicitly provided. | |
519 if content_type and ('content-type' not in self.headers): | |
520 self.headers['Content-Type'] = content_type | |
521 | |
522 self.body = body | |
523 | |
524 def prepare_content_length(self, body): | |
525 """Prepare Content-Length header based on request method and body""" | |
526 if body is not None: | |
527 length = super_len(body) | |
528 if length: | |
529 # If length exists, set it. Otherwise, we fallback | |
530 # to Transfer-Encoding: chunked. | |
531 self.headers['Content-Length'] = builtin_str(length) | |
532 elif self.method not in ('GET', 'HEAD') and self.headers.get('Content-Length') is None: | |
533 # Set Content-Length to 0 for methods that can have a body | |
534 # but don't provide one. (i.e. not GET or HEAD) | |
535 self.headers['Content-Length'] = '0' | |
536 | |
537 def prepare_auth(self, auth, url=''): | |
538 """Prepares the given HTTP auth data.""" | |
539 | |
540 # If no Auth is explicitly provided, extract it from the URL first. | |
541 if auth is None: | |
542 url_auth = get_auth_from_url(self.url) | |
543 auth = url_auth if any(url_auth) else None | |
544 | |
545 if auth: | |
546 if isinstance(auth, tuple) and len(auth) == 2: | |
547 # special-case basic HTTP auth | |
548 auth = HTTPBasicAuth(*auth) | |
549 | |
550 # Allow auth to make its changes. | |
551 r = auth(self) | |
552 | |
553 # Update self to reflect the auth changes. | |
554 self.__dict__.update(r.__dict__) | |
555 | |
556 # Recompute Content-Length | |
557 self.prepare_content_length(self.body) | |
558 | |
559 def prepare_cookies(self, cookies): | |
560 """Prepares the given HTTP cookie data. | |
561 | |
562 This function eventually generates a ``Cookie`` header from the | |
563 given cookies using cookielib. Due to cookielib's design, the header | |
564 will not be regenerated if it already exists, meaning this function | |
565 can only be called once for the life of the | |
566 :class:`PreparedRequest <PreparedRequest>` object. Any subsequent calls | |
567 to ``prepare_cookies`` will have no actual effect, unless the "Cookie" | |
568 header is removed beforehand. | |
569 """ | |
570 if isinstance(cookies, cookielib.CookieJar): | |
571 self._cookies = cookies | |
572 else: | |
573 self._cookies = cookiejar_from_dict(cookies) | |
574 | |
575 cookie_header = get_cookie_header(self._cookies, self) | |
576 if cookie_header is not None: | |
577 self.headers['Cookie'] = cookie_header | |
578 | |
579 def prepare_hooks(self, hooks): | |
580 """Prepares the given hooks.""" | |
581 # hooks can be passed as None to the prepare method and to this | |
582 # method. To prevent iterating over None, simply use an empty list | |
583 # if hooks is False-y | |
584 hooks = hooks or [] | |
585 for event in hooks: | |
586 self.register_hook(event, hooks[event]) | |
587 | |
588 | |
589 class Response(object): | |
590 """The :class:`Response <Response>` object, which contains a | |
591 server's response to an HTTP request. | |
592 """ | |
593 | |
594 __attrs__ = [ | |
595 '_content', 'status_code', 'headers', 'url', 'history', | |
596 'encoding', 'reason', 'cookies', 'elapsed', 'request' | |
597 ] | |
598 | |
599 def __init__(self): | |
600 self._content = False | |
601 self._content_consumed = False | |
602 self._next = None | |
603 | |
604 #: Integer Code of responded HTTP Status, e.g. 404 or 200. | |
605 self.status_code = None | |
606 | |
607 #: Case-insensitive Dictionary of Response Headers. | |
608 #: For example, ``headers['content-encoding']`` will return the | |
609 #: value of a ``'Content-Encoding'`` response header. | |
610 self.headers = CaseInsensitiveDict() | |
611 | |
612 #: File-like object representation of response (for advanced usage). | |
613 #: Use of ``raw`` requires that ``stream=True`` be set on the request. | |
614 #: This requirement does not apply for use internally to Requests. | |
615 self.raw = None | |
616 | |
617 #: Final URL location of Response. | |
618 self.url = None | |
619 | |
620 #: Encoding to decode with when accessing r.text. | |
621 self.encoding = None | |
622 | |
623 #: A list of :class:`Response <Response>` objects from | |
624 #: the history of the Request. Any redirect responses will end | |
625 #: up here. The list is sorted from the oldest to the most recent request. | |
626 self.history = [] | |
627 | |
628 #: Textual reason of responded HTTP Status, e.g. "Not Found" or "OK". | |
629 self.reason = None | |
630 | |
631 #: A CookieJar of Cookies the server sent back. | |
632 self.cookies = cookiejar_from_dict({}) | |
633 | |
634 #: The amount of time elapsed between sending the request | |
635 #: and the arrival of the response (as a timedelta). | |
636 #: This property specifically measures the time taken between sending | |
637 #: the first byte of the request and finishing parsing the headers. It | |
638 #: is therefore unaffected by consuming the response content or the | |
639 #: value of the ``stream`` keyword argument. | |
640 self.elapsed = datetime.timedelta(0) | |
641 | |
642 #: The :class:`PreparedRequest <PreparedRequest>` object to which this | |
643 #: is a response. | |
644 self.request = None | |
645 | |
646 def __enter__(self): | |
647 return self | |
648 | |
649 def __exit__(self, *args): | |
650 self.close() | |
651 | |
652 def __getstate__(self): | |
653 # Consume everything; accessing the content attribute makes | |
654 # sure the content has been fully read. | |
655 if not self._content_consumed: | |
656 self.content | |
657 | |
658 return {attr: getattr(self, attr, None) for attr in self.__attrs__} | |
659 | |
660 def __setstate__(self, state): | |
661 for name, value in state.items(): | |
662 setattr(self, name, value) | |
663 | |
664 # pickled objects do not have .raw | |
665 setattr(self, '_content_consumed', True) | |
666 setattr(self, 'raw', None) | |
667 | |
668 def __repr__(self): | |
669 return '<Response [%s]>' % (self.status_code) | |
670 | |
671 def __bool__(self): | |
672 """Returns True if :attr:`status_code` is less than 400. | |
673 | |
674 This attribute checks if the status code of the response is between | |
675 400 and 600 to see if there was a client error or a server error. If | |
676 the status code, is between 200 and 400, this will return True. This | |
677 is **not** a check to see if the response code is ``200 OK``. | |
678 """ | |
679 return self.ok | |
680 | |
681 def __nonzero__(self): | |
682 """Returns True if :attr:`status_code` is less than 400. | |
683 | |
684 This attribute checks if the status code of the response is between | |
685 400 and 600 to see if there was a client error or a server error. If | |
686 the status code, is between 200 and 400, this will return True. This | |
687 is **not** a check to see if the response code is ``200 OK``. | |
688 """ | |
689 return self.ok | |
690 | |
691 def __iter__(self): | |
692 """Allows you to use a response as an iterator.""" | |
693 return self.iter_content(128) | |
694 | |
695 @property | |
696 def ok(self): | |
697 """Returns True if :attr:`status_code` is less than 400, False if not. | |
698 | |
699 This attribute checks if the status code of the response is between | |
700 400 and 600 to see if there was a client error or a server error. If | |
701 the status code is between 200 and 400, this will return True. This | |
702 is **not** a check to see if the response code is ``200 OK``. | |
703 """ | |
704 try: | |
705 self.raise_for_status() | |
706 except HTTPError: | |
707 return False | |
708 return True | |
709 | |
710 @property | |
711 def is_redirect(self): | |
712 """True if this Response is a well-formed HTTP redirect that could have | |
713 been processed automatically (by :meth:`Session.resolve_redirects`). | |
714 """ | |
715 return ('location' in self.headers and self.status_code in REDIRECT_STATI) | |
716 | |
717 @property | |
718 def is_permanent_redirect(self): | |
719 """True if this Response one of the permanent versions of redirect.""" | |
720 return ('location' in self.headers and self.status_code in (codes.moved_permanently, codes.permanent_redirect)) | |
721 | |
722 @property | |
723 def next(self): | |
724 """Returns a PreparedRequest for the next request in a redirect chain, if there is one.""" | |
725 return self._next | |
726 | |
727 @property | |
728 def apparent_encoding(self): | |
729 """The apparent encoding, provided by the chardet library.""" | |
730 return chardet.detect(self.content)['encoding'] | |
731 | |
732 def iter_content(self, chunk_size=1, decode_unicode=False): | |
733 """Iterates over the response data. When stream=True is set on the | |
734 request, this avoids reading the content at once into memory for | |
735 large responses. The chunk size is the number of bytes it should | |
736 read into memory. This is not necessarily the length of each item | |
737 returned as decoding can take place. | |
738 | |
739 chunk_size must be of type int or None. A value of None will | |
740 function differently depending on the value of `stream`. | |
741 stream=True will read data as it arrives in whatever size the | |
742 chunks are received. If stream=False, data is returned as | |
743 a single chunk. | |
744 | |
745 If decode_unicode is True, content will be decoded using the best | |
746 available encoding based on the response. | |
747 """ | |
748 | |
749 def generate(): | |
750 # Special case for urllib3. | |
751 if hasattr(self.raw, 'stream'): | |
752 try: | |
753 for chunk in self.raw.stream(chunk_size, decode_content=True): | |
754 yield chunk | |
755 except ProtocolError as e: | |
756 raise ChunkedEncodingError(e) | |
757 except DecodeError as e: | |
758 raise ContentDecodingError(e) | |
759 except ReadTimeoutError as e: | |
760 raise ConnectionError(e) | |
761 else: | |
762 # Standard file-like object. | |
763 while True: | |
764 chunk = self.raw.read(chunk_size) | |
765 if not chunk: | |
766 break | |
767 yield chunk | |
768 | |
769 self._content_consumed = True | |
770 | |
771 if self._content_consumed and isinstance(self._content, bool): | |
772 raise StreamConsumedError() | |
773 elif chunk_size is not None and not isinstance(chunk_size, int): | |
774 raise TypeError("chunk_size must be an int, it is instead a %s." % type(chunk_size)) | |
775 # simulate reading small chunks of the content | |
776 reused_chunks = iter_slices(self._content, chunk_size) | |
777 | |
778 stream_chunks = generate() | |
779 | |
780 chunks = reused_chunks if self._content_consumed else stream_chunks | |
781 | |
782 if decode_unicode: | |
783 chunks = stream_decode_response_unicode(chunks, self) | |
784 | |
785 return chunks | |
786 | |
787 def iter_lines(self, chunk_size=ITER_CHUNK_SIZE, decode_unicode=False, delimiter=None): | |
788 """Iterates over the response data, one line at a time. When | |
789 stream=True is set on the request, this avoids reading the | |
790 content at once into memory for large responses. | |
791 | |
792 .. note:: This method is not reentrant safe. | |
793 """ | |
794 | |
795 pending = None | |
796 | |
797 for chunk in self.iter_content(chunk_size=chunk_size, decode_unicode=decode_unicode): | |
798 | |
799 if pending is not None: | |
800 chunk = pending + chunk | |
801 | |
802 if delimiter: | |
803 lines = chunk.split(delimiter) | |
804 else: | |
805 lines = chunk.splitlines() | |
806 | |
807 if lines and lines[-1] and chunk and lines[-1][-1] == chunk[-1]: | |
808 pending = lines.pop() | |
809 else: | |
810 pending = None | |
811 | |
812 for line in lines: | |
813 yield line | |
814 | |
815 if pending is not None: | |
816 yield pending | |
817 | |
818 @property | |
819 def content(self): | |
820 """Content of the response, in bytes.""" | |
821 | |
822 if self._content is False: | |
823 # Read the contents. | |
824 if self._content_consumed: | |
825 raise RuntimeError( | |
826 'The content for this response was already consumed') | |
827 | |
828 if self.status_code == 0 or self.raw is None: | |
829 self._content = None | |
830 else: | |
831 self._content = b''.join(self.iter_content(CONTENT_CHUNK_SIZE)) or b'' | |
832 | |
833 self._content_consumed = True | |
834 # don't need to release the connection; that's been handled by urllib3 | |
835 # since we exhausted the data. | |
836 return self._content | |
837 | |
838 @property | |
839 def text(self): | |
840 """Content of the response, in unicode. | |
841 | |
842 If Response.encoding is None, encoding will be guessed using | |
843 ``chardet``. | |
844 | |
845 The encoding of the response content is determined based solely on HTTP | |
846 headers, following RFC 2616 to the letter. If you can take advantage of | |
847 non-HTTP knowledge to make a better guess at the encoding, you should | |
848 set ``r.encoding`` appropriately before accessing this property. | |
849 """ | |
850 | |
851 # Try charset from content-type | |
852 content = None | |
853 encoding = self.encoding | |
854 | |
855 if not self.content: | |
856 return str('') | |
857 | |
858 # Fallback to auto-detected encoding. | |
859 if self.encoding is None: | |
860 encoding = self.apparent_encoding | |
861 | |
862 # Decode unicode from given encoding. | |
863 try: | |
864 content = str(self.content, encoding, errors='replace') | |
865 except (LookupError, TypeError): | |
866 # A LookupError is raised if the encoding was not found which could | |
867 # indicate a misspelling or similar mistake. | |
868 # | |
869 # A TypeError can be raised if encoding is None | |
870 # | |
871 # So we try blindly encoding. | |
872 content = str(self.content, errors='replace') | |
873 | |
874 return content | |
875 | |
876 def json(self, **kwargs): | |
877 r"""Returns the json-encoded content of a response, if any. | |
878 | |
879 :param \*\*kwargs: Optional arguments that ``json.loads`` takes. | |
880 :raises ValueError: If the response body does not contain valid json. | |
881 """ | |
882 | |
883 if not self.encoding and self.content and len(self.content) > 3: | |
884 # No encoding set. JSON RFC 4627 section 3 states we should expect | |
885 # UTF-8, -16 or -32. Detect which one to use; If the detection or | |
886 # decoding fails, fall back to `self.text` (using chardet to make | |
887 # a best guess). | |
888 encoding = guess_json_utf(self.content) | |
889 if encoding is not None: | |
890 try: | |
891 return complexjson.loads( | |
892 self.content.decode(encoding), **kwargs | |
893 ) | |
894 except UnicodeDecodeError: | |
895 # Wrong UTF codec detected; usually because it's not UTF-8 | |
896 # but some other 8-bit codec. This is an RFC violation, | |
897 # and the server didn't bother to tell us what codec *was* | |
898 # used. | |
899 pass | |
900 return complexjson.loads(self.text, **kwargs) | |
901 | |
902 @property | |
903 def links(self): | |
904 """Returns the parsed header links of the response, if any.""" | |
905 | |
906 header = self.headers.get('link') | |
907 | |
908 # l = MultiDict() | |
909 l = {} | |
910 | |
911 if header: | |
912 links = parse_header_links(header) | |
913 | |
914 for link in links: | |
915 key = link.get('rel') or link.get('url') | |
916 l[key] = link | |
917 | |
918 return l | |
919 | |
920 def raise_for_status(self): | |
921 """Raises :class:`HTTPError`, if one occurred.""" | |
922 | |
923 http_error_msg = '' | |
924 if isinstance(self.reason, bytes): | |
925 # We attempt to decode utf-8 first because some servers | |
926 # choose to localize their reason strings. If the string | |
927 # isn't utf-8, we fall back to iso-8859-1 for all other | |
928 # encodings. (See PR #3538) | |
929 try: | |
930 reason = self.reason.decode('utf-8') | |
931 except UnicodeDecodeError: | |
932 reason = self.reason.decode('iso-8859-1') | |
933 else: | |
934 reason = self.reason | |
935 | |
936 if 400 <= self.status_code < 500: | |
937 http_error_msg = u'%s Client Error: %s for url: %s' % (self.status_code, reason, self.url) | |
938 | |
939 elif 500 <= self.status_code < 600: | |
940 http_error_msg = u'%s Server Error: %s for url: %s' % (self.status_code, reason, self.url) | |
941 | |
942 if http_error_msg: | |
943 raise HTTPError(http_error_msg, response=self) | |
944 | |
945 def close(self): | |
946 """Releases the connection back to the pool. Once this method has been | |
947 called the underlying ``raw`` object must not be accessed again. | |
948 | |
949 *Note: Should not normally need to be called explicitly.* | |
950 """ | |
951 if not self._content_consumed: | |
952 self.raw.close() | |
953 | |
954 release_conn = getattr(self.raw, 'release_conn', None) | |
955 if release_conn is not None: | |
956 release_conn() |