Mercurial > repos > shellac > sam_consensus_v3
comparison env/lib/python3.9/site-packages/requests_toolbelt/streaming_iterator.py @ 0:4f3585e2f14b draft default tip
"planemo upload commit 60cee0fc7c0cda8592644e1aad72851dec82c959"
| author | shellac |
|---|---|
| date | Mon, 22 Mar 2021 18:12:50 +0000 |
| parents | |
| children |
comparison
equal
deleted
inserted
replaced
| -1:000000000000 | 0:4f3585e2f14b |
|---|---|
| 1 # -*- coding: utf-8 -*- | |
| 2 """ | |
| 3 | |
| 4 requests_toolbelt.streaming_iterator | |
| 5 ==================================== | |
| 6 | |
| 7 This holds the implementation details for the :class:`StreamingIterator`. It | |
| 8 is designed for the case where you, the user, know the size of the upload but | |
| 9 need to provide the data as an iterator. This class will allow you to specify | |
| 10 the size and stream the data without using a chunked transfer-encoding. | |
| 11 | |
| 12 """ | |
| 13 from requests.utils import super_len | |
| 14 | |
| 15 from .multipart.encoder import CustomBytesIO, encode_with | |
| 16 | |
| 17 | |
| 18 class StreamingIterator(object): | |
| 19 | |
| 20 """ | |
| 21 This class provides a way of allowing iterators with a known size to be | |
| 22 streamed instead of chunked. | |
| 23 | |
| 24 In requests, if you pass in an iterator it assumes you want to use | |
| 25 chunked transfer-encoding to upload the data, which not all servers | |
| 26 support well. Additionally, you may want to set the content-length | |
| 27 yourself to avoid this but that will not work. The only way to preempt | |
| 28 requests using a chunked transfer-encoding and forcing it to stream the | |
| 29 uploads is to mimic a very specific interace. Instead of having to know | |
| 30 these details you can instead just use this class. You simply provide the | |
| 31 size and iterator and pass the instance of StreamingIterator to requests | |
| 32 via the data parameter like so: | |
| 33 | |
| 34 .. code-block:: python | |
| 35 | |
| 36 from requests_toolbelt import StreamingIterator | |
| 37 | |
| 38 import requests | |
| 39 | |
| 40 # Let iterator be some generator that you already have and size be | |
| 41 # the size of the data produced by the iterator | |
| 42 | |
| 43 r = requests.post(url, data=StreamingIterator(size, iterator)) | |
| 44 | |
| 45 You can also pass file-like objects to :py:class:`StreamingIterator` in | |
| 46 case requests can't determize the filesize itself. This is the case with | |
| 47 streaming file objects like ``stdin`` or any sockets. Wrapping e.g. files | |
| 48 that are on disk with ``StreamingIterator`` is unnecessary, because | |
| 49 requests can determine the filesize itself. | |
| 50 | |
| 51 Naturally, you should also set the `Content-Type` of your upload | |
| 52 appropriately because the toolbelt will not attempt to guess that for you. | |
| 53 """ | |
| 54 | |
| 55 def __init__(self, size, iterator, encoding='utf-8'): | |
| 56 #: The expected size of the upload | |
| 57 self.size = int(size) | |
| 58 | |
| 59 if self.size < 0: | |
| 60 raise ValueError( | |
| 61 'The size of the upload must be a positive integer' | |
| 62 ) | |
| 63 | |
| 64 #: Attribute that requests will check to determine the length of the | |
| 65 #: body. See bug #80 for more details | |
| 66 self.len = self.size | |
| 67 | |
| 68 #: Encoding the input data is using | |
| 69 self.encoding = encoding | |
| 70 | |
| 71 #: The iterator used to generate the upload data | |
| 72 self.iterator = iterator | |
| 73 | |
| 74 if hasattr(iterator, 'read'): | |
| 75 self._file = iterator | |
| 76 else: | |
| 77 self._file = _IteratorAsBinaryFile(iterator, encoding) | |
| 78 | |
| 79 def read(self, size=-1): | |
| 80 return encode_with(self._file.read(size), self.encoding) | |
| 81 | |
| 82 | |
| 83 class _IteratorAsBinaryFile(object): | |
| 84 def __init__(self, iterator, encoding='utf-8'): | |
| 85 #: The iterator used to generate the upload data | |
| 86 self.iterator = iterator | |
| 87 | |
| 88 #: Encoding the iterator is using | |
| 89 self.encoding = encoding | |
| 90 | |
| 91 # The buffer we use to provide the correct number of bytes requested | |
| 92 # during a read | |
| 93 self._buffer = CustomBytesIO() | |
| 94 | |
| 95 def _get_bytes(self): | |
| 96 try: | |
| 97 return encode_with(next(self.iterator), self.encoding) | |
| 98 except StopIteration: | |
| 99 return b'' | |
| 100 | |
| 101 def _load_bytes(self, size): | |
| 102 self._buffer.smart_truncate() | |
| 103 amount_to_load = size - super_len(self._buffer) | |
| 104 bytes_to_append = True | |
| 105 | |
| 106 while amount_to_load > 0 and bytes_to_append: | |
| 107 bytes_to_append = self._get_bytes() | |
| 108 amount_to_load -= self._buffer.append(bytes_to_append) | |
| 109 | |
| 110 def read(self, size=-1): | |
| 111 size = int(size) | |
| 112 if size == -1: | |
| 113 return b''.join(self.iterator) | |
| 114 | |
| 115 self._load_bytes(size) | |
| 116 return self._buffer.read(size) |
