Mercurial > repos > bcclaywell > argo_navis
comparison venv/lib/python2.7/site-packages/requests/packages/urllib3/response.py @ 0:d67268158946 draft
planemo upload commit a3f181f5f126803c654b3a66dd4e83a48f7e203b
| author | bcclaywell |
|---|---|
| date | Mon, 12 Oct 2015 17:43:33 -0400 |
| parents | |
| children |
comparison
equal
deleted
inserted
replaced
| -1:000000000000 | 0:d67268158946 |
|---|---|
| 1 from contextlib import contextmanager | |
| 2 import zlib | |
| 3 import io | |
| 4 from socket import timeout as SocketTimeout | |
| 5 | |
| 6 from ._collections import HTTPHeaderDict | |
| 7 from .exceptions import ( | |
| 8 ProtocolError, DecodeError, ReadTimeoutError, ResponseNotChunked | |
| 9 ) | |
| 10 from .packages.six import string_types as basestring, binary_type, PY3 | |
| 11 from .packages.six.moves import http_client as httplib | |
| 12 from .connection import HTTPException, BaseSSLError | |
| 13 from .util.response import is_fp_closed, is_response_to_head | |
| 14 | |
| 15 | |
| 16 class DeflateDecoder(object): | |
| 17 | |
| 18 def __init__(self): | |
| 19 self._first_try = True | |
| 20 self._data = binary_type() | |
| 21 self._obj = zlib.decompressobj() | |
| 22 | |
| 23 def __getattr__(self, name): | |
| 24 return getattr(self._obj, name) | |
| 25 | |
| 26 def decompress(self, data): | |
| 27 if not data: | |
| 28 return data | |
| 29 | |
| 30 if not self._first_try: | |
| 31 return self._obj.decompress(data) | |
| 32 | |
| 33 self._data += data | |
| 34 try: | |
| 35 return self._obj.decompress(data) | |
| 36 except zlib.error: | |
| 37 self._first_try = False | |
| 38 self._obj = zlib.decompressobj(-zlib.MAX_WBITS) | |
| 39 try: | |
| 40 return self.decompress(self._data) | |
| 41 finally: | |
| 42 self._data = None | |
| 43 | |
| 44 | |
| 45 class GzipDecoder(object): | |
| 46 | |
| 47 def __init__(self): | |
| 48 self._obj = zlib.decompressobj(16 + zlib.MAX_WBITS) | |
| 49 | |
| 50 def __getattr__(self, name): | |
| 51 return getattr(self._obj, name) | |
| 52 | |
| 53 def decompress(self, data): | |
| 54 if not data: | |
| 55 return data | |
| 56 return self._obj.decompress(data) | |
| 57 | |
| 58 | |
| 59 def _get_decoder(mode): | |
| 60 if mode == 'gzip': | |
| 61 return GzipDecoder() | |
| 62 | |
| 63 return DeflateDecoder() | |
| 64 | |
| 65 | |
| 66 class HTTPResponse(io.IOBase): | |
| 67 """ | |
| 68 HTTP Response container. | |
| 69 | |
| 70 Backwards-compatible to httplib's HTTPResponse but the response ``body`` is | |
| 71 loaded and decoded on-demand when the ``data`` property is accessed. This | |
| 72 class is also compatible with the Python standard library's :mod:`io` | |
| 73 module, and can hence be treated as a readable object in the context of that | |
| 74 framework. | |
| 75 | |
| 76 Extra parameters for behaviour not present in httplib.HTTPResponse: | |
| 77 | |
| 78 :param preload_content: | |
| 79 If True, the response's body will be preloaded during construction. | |
| 80 | |
| 81 :param decode_content: | |
| 82 If True, attempts to decode specific content-encoding's based on headers | |
| 83 (like 'gzip' and 'deflate') will be skipped and raw data will be used | |
| 84 instead. | |
| 85 | |
| 86 :param original_response: | |
| 87 When this HTTPResponse wrapper is generated from an httplib.HTTPResponse | |
| 88 object, it's convenient to include the original for debug purposes. It's | |
| 89 otherwise unused. | |
| 90 """ | |
| 91 | |
| 92 CONTENT_DECODERS = ['gzip', 'deflate'] | |
| 93 REDIRECT_STATUSES = [301, 302, 303, 307, 308] | |
| 94 | |
| 95 def __init__(self, body='', headers=None, status=0, version=0, reason=None, | |
| 96 strict=0, preload_content=True, decode_content=True, | |
| 97 original_response=None, pool=None, connection=None): | |
| 98 | |
| 99 if isinstance(headers, HTTPHeaderDict): | |
| 100 self.headers = headers | |
| 101 else: | |
| 102 self.headers = HTTPHeaderDict(headers) | |
| 103 self.status = status | |
| 104 self.version = version | |
| 105 self.reason = reason | |
| 106 self.strict = strict | |
| 107 self.decode_content = decode_content | |
| 108 | |
| 109 self._decoder = None | |
| 110 self._body = None | |
| 111 self._fp = None | |
| 112 self._original_response = original_response | |
| 113 self._fp_bytes_read = 0 | |
| 114 | |
| 115 if body and isinstance(body, (basestring, binary_type)): | |
| 116 self._body = body | |
| 117 | |
| 118 self._pool = pool | |
| 119 self._connection = connection | |
| 120 | |
| 121 if hasattr(body, 'read'): | |
| 122 self._fp = body | |
| 123 | |
| 124 # Are we using the chunked-style of transfer encoding? | |
| 125 self.chunked = False | |
| 126 self.chunk_left = None | |
| 127 tr_enc = self.headers.get('transfer-encoding', '').lower() | |
| 128 # Don't incur the penalty of creating a list and then discarding it | |
| 129 encodings = (enc.strip() for enc in tr_enc.split(",")) | |
| 130 if "chunked" in encodings: | |
| 131 self.chunked = True | |
| 132 | |
| 133 # We certainly don't want to preload content when the response is chunked. | |
| 134 if not self.chunked and preload_content and not self._body: | |
| 135 self._body = self.read(decode_content=decode_content) | |
| 136 | |
| 137 def get_redirect_location(self): | |
| 138 """ | |
| 139 Should we redirect and where to? | |
| 140 | |
| 141 :returns: Truthy redirect location string if we got a redirect status | |
| 142 code and valid location. ``None`` if redirect status and no | |
| 143 location. ``False`` if not a redirect status code. | |
| 144 """ | |
| 145 if self.status in self.REDIRECT_STATUSES: | |
| 146 return self.headers.get('location') | |
| 147 | |
| 148 return False | |
| 149 | |
| 150 def release_conn(self): | |
| 151 if not self._pool or not self._connection: | |
| 152 return | |
| 153 | |
| 154 self._pool._put_conn(self._connection) | |
| 155 self._connection = None | |
| 156 | |
| 157 @property | |
| 158 def data(self): | |
| 159 # For backwords-compat with earlier urllib3 0.4 and earlier. | |
| 160 if self._body: | |
| 161 return self._body | |
| 162 | |
| 163 if self._fp: | |
| 164 return self.read(cache_content=True) | |
| 165 | |
| 166 def tell(self): | |
| 167 """ | |
| 168 Obtain the number of bytes pulled over the wire so far. May differ from | |
| 169 the amount of content returned by :meth:``HTTPResponse.read`` if bytes | |
| 170 are encoded on the wire (e.g, compressed). | |
| 171 """ | |
| 172 return self._fp_bytes_read | |
| 173 | |
| 174 def _init_decoder(self): | |
| 175 """ | |
| 176 Set-up the _decoder attribute if necessar. | |
| 177 """ | |
| 178 # Note: content-encoding value should be case-insensitive, per RFC 7230 | |
| 179 # Section 3.2 | |
| 180 content_encoding = self.headers.get('content-encoding', '').lower() | |
| 181 if self._decoder is None and content_encoding in self.CONTENT_DECODERS: | |
| 182 self._decoder = _get_decoder(content_encoding) | |
| 183 | |
| 184 def _decode(self, data, decode_content, flush_decoder): | |
| 185 """ | |
| 186 Decode the data passed in and potentially flush the decoder. | |
| 187 """ | |
| 188 try: | |
| 189 if decode_content and self._decoder: | |
| 190 data = self._decoder.decompress(data) | |
| 191 except (IOError, zlib.error) as e: | |
| 192 content_encoding = self.headers.get('content-encoding', '').lower() | |
| 193 raise DecodeError( | |
| 194 "Received response with content-encoding: %s, but " | |
| 195 "failed to decode it." % content_encoding, e) | |
| 196 | |
| 197 if flush_decoder and decode_content and self._decoder: | |
| 198 buf = self._decoder.decompress(binary_type()) | |
| 199 data += buf + self._decoder.flush() | |
| 200 | |
| 201 return data | |
| 202 | |
| 203 @contextmanager | |
| 204 def _error_catcher(self): | |
| 205 """ | |
| 206 Catch low-level python exceptions, instead re-raising urllib3 | |
| 207 variants, so that low-level exceptions are not leaked in the | |
| 208 high-level api. | |
| 209 | |
| 210 On exit, release the connection back to the pool. | |
| 211 """ | |
| 212 try: | |
| 213 try: | |
| 214 yield | |
| 215 | |
| 216 except SocketTimeout: | |
| 217 # FIXME: Ideally we'd like to include the url in the ReadTimeoutError but | |
| 218 # there is yet no clean way to get at it from this context. | |
| 219 raise ReadTimeoutError(self._pool, None, 'Read timed out.') | |
| 220 | |
| 221 except BaseSSLError as e: | |
| 222 # FIXME: Is there a better way to differentiate between SSLErrors? | |
| 223 if 'read operation timed out' not in str(e): # Defensive: | |
| 224 # This shouldn't happen but just in case we're missing an edge | |
| 225 # case, let's avoid swallowing SSL errors. | |
| 226 raise | |
| 227 | |
| 228 raise ReadTimeoutError(self._pool, None, 'Read timed out.') | |
| 229 | |
| 230 except HTTPException as e: | |
| 231 # This includes IncompleteRead. | |
| 232 raise ProtocolError('Connection broken: %r' % e, e) | |
| 233 except Exception: | |
| 234 # The response may not be closed but we're not going to use it anymore | |
| 235 # so close it now to ensure that the connection is released back to the pool. | |
| 236 if self._original_response and not self._original_response.isclosed(): | |
| 237 self._original_response.close() | |
| 238 | |
| 239 raise | |
| 240 finally: | |
| 241 if self._original_response and self._original_response.isclosed(): | |
| 242 self.release_conn() | |
| 243 | |
| 244 def read(self, amt=None, decode_content=None, cache_content=False): | |
| 245 """ | |
| 246 Similar to :meth:`httplib.HTTPResponse.read`, but with two additional | |
| 247 parameters: ``decode_content`` and ``cache_content``. | |
| 248 | |
| 249 :param amt: | |
| 250 How much of the content to read. If specified, caching is skipped | |
| 251 because it doesn't make sense to cache partial content as the full | |
| 252 response. | |
| 253 | |
| 254 :param decode_content: | |
| 255 If True, will attempt to decode the body based on the | |
| 256 'content-encoding' header. | |
| 257 | |
| 258 :param cache_content: | |
| 259 If True, will save the returned data such that the same result is | |
| 260 returned despite of the state of the underlying file object. This | |
| 261 is useful if you want the ``.data`` property to continue working | |
| 262 after having ``.read()`` the file object. (Overridden if ``amt`` is | |
| 263 set.) | |
| 264 """ | |
| 265 self._init_decoder() | |
| 266 if decode_content is None: | |
| 267 decode_content = self.decode_content | |
| 268 | |
| 269 if self._fp is None: | |
| 270 return | |
| 271 | |
| 272 flush_decoder = False | |
| 273 data = None | |
| 274 | |
| 275 with self._error_catcher(): | |
| 276 if amt is None: | |
| 277 # cStringIO doesn't like amt=None | |
| 278 data = self._fp.read() | |
| 279 flush_decoder = True | |
| 280 else: | |
| 281 cache_content = False | |
| 282 data = self._fp.read(amt) | |
| 283 if amt != 0 and not data: # Platform-specific: Buggy versions of Python. | |
| 284 # Close the connection when no data is returned | |
| 285 # | |
| 286 # This is redundant to what httplib/http.client _should_ | |
| 287 # already do. However, versions of python released before | |
| 288 # December 15, 2012 (http://bugs.python.org/issue16298) do | |
| 289 # not properly close the connection in all cases. There is | |
| 290 # no harm in redundantly calling close. | |
| 291 self._fp.close() | |
| 292 flush_decoder = True | |
| 293 | |
| 294 if data: | |
| 295 self._fp_bytes_read += len(data) | |
| 296 | |
| 297 data = self._decode(data, decode_content, flush_decoder) | |
| 298 | |
| 299 if cache_content: | |
| 300 self._body = data | |
| 301 | |
| 302 return data | |
| 303 | |
| 304 | |
| 305 def stream(self, amt=2**16, decode_content=None): | |
| 306 """ | |
| 307 A generator wrapper for the read() method. A call will block until | |
| 308 ``amt`` bytes have been read from the connection or until the | |
| 309 connection is closed. | |
| 310 | |
| 311 :param amt: | |
| 312 How much of the content to read. The generator will return up to | |
| 313 much data per iteration, but may return less. This is particularly | |
| 314 likely when using compressed data. However, the empty string will | |
| 315 never be returned. | |
| 316 | |
| 317 :param decode_content: | |
| 318 If True, will attempt to decode the body based on the | |
| 319 'content-encoding' header. | |
| 320 """ | |
| 321 if self.chunked: | |
| 322 for line in self.read_chunked(amt, decode_content=decode_content): | |
| 323 yield line | |
| 324 else: | |
| 325 while not is_fp_closed(self._fp): | |
| 326 data = self.read(amt=amt, decode_content=decode_content) | |
| 327 | |
| 328 if data: | |
| 329 yield data | |
| 330 | |
| 331 @classmethod | |
| 332 def from_httplib(ResponseCls, r, **response_kw): | |
| 333 """ | |
| 334 Given an :class:`httplib.HTTPResponse` instance ``r``, return a | |
| 335 corresponding :class:`urllib3.response.HTTPResponse` object. | |
| 336 | |
| 337 Remaining parameters are passed to the HTTPResponse constructor, along | |
| 338 with ``original_response=r``. | |
| 339 """ | |
| 340 headers = r.msg | |
| 341 | |
| 342 if not isinstance(headers, HTTPHeaderDict): | |
| 343 if PY3: # Python 3 | |
| 344 headers = HTTPHeaderDict(headers.items()) | |
| 345 else: # Python 2 | |
| 346 headers = HTTPHeaderDict.from_httplib(headers) | |
| 347 | |
| 348 # HTTPResponse objects in Python 3 don't have a .strict attribute | |
| 349 strict = getattr(r, 'strict', 0) | |
| 350 resp = ResponseCls(body=r, | |
| 351 headers=headers, | |
| 352 status=r.status, | |
| 353 version=r.version, | |
| 354 reason=r.reason, | |
| 355 strict=strict, | |
| 356 original_response=r, | |
| 357 **response_kw) | |
| 358 return resp | |
| 359 | |
| 360 # Backwards-compatibility methods for httplib.HTTPResponse | |
| 361 def getheaders(self): | |
| 362 return self.headers | |
| 363 | |
| 364 def getheader(self, name, default=None): | |
| 365 return self.headers.get(name, default) | |
| 366 | |
| 367 # Overrides from io.IOBase | |
| 368 def close(self): | |
| 369 if not self.closed: | |
| 370 self._fp.close() | |
| 371 | |
| 372 @property | |
| 373 def closed(self): | |
| 374 if self._fp is None: | |
| 375 return True | |
| 376 elif hasattr(self._fp, 'closed'): | |
| 377 return self._fp.closed | |
| 378 elif hasattr(self._fp, 'isclosed'): # Python 2 | |
| 379 return self._fp.isclosed() | |
| 380 else: | |
| 381 return True | |
| 382 | |
| 383 def fileno(self): | |
| 384 if self._fp is None: | |
| 385 raise IOError("HTTPResponse has no file to get a fileno from") | |
| 386 elif hasattr(self._fp, "fileno"): | |
| 387 return self._fp.fileno() | |
| 388 else: | |
| 389 raise IOError("The file-like object this HTTPResponse is wrapped " | |
| 390 "around has no file descriptor") | |
| 391 | |
| 392 def flush(self): | |
| 393 if self._fp is not None and hasattr(self._fp, 'flush'): | |
| 394 return self._fp.flush() | |
| 395 | |
| 396 def readable(self): | |
| 397 # This method is required for `io` module compatibility. | |
| 398 return True | |
| 399 | |
| 400 def readinto(self, b): | |
| 401 # This method is required for `io` module compatibility. | |
| 402 temp = self.read(len(b)) | |
| 403 if len(temp) == 0: | |
| 404 return 0 | |
| 405 else: | |
| 406 b[:len(temp)] = temp | |
| 407 return len(temp) | |
| 408 | |
| 409 def _update_chunk_length(self): | |
| 410 # First, we'll figure out length of a chunk and then | |
| 411 # we'll try to read it from socket. | |
| 412 if self.chunk_left is not None: | |
| 413 return | |
| 414 line = self._fp.fp.readline() | |
| 415 line = line.split(b';', 1)[0] | |
| 416 try: | |
| 417 self.chunk_left = int(line, 16) | |
| 418 except ValueError: | |
| 419 # Invalid chunked protocol response, abort. | |
| 420 self.close() | |
| 421 raise httplib.IncompleteRead(line) | |
| 422 | |
| 423 def _handle_chunk(self, amt): | |
| 424 returned_chunk = None | |
| 425 if amt is None: | |
| 426 chunk = self._fp._safe_read(self.chunk_left) | |
| 427 returned_chunk = chunk | |
| 428 self._fp._safe_read(2) # Toss the CRLF at the end of the chunk. | |
| 429 self.chunk_left = None | |
| 430 elif amt < self.chunk_left: | |
| 431 value = self._fp._safe_read(amt) | |
| 432 self.chunk_left = self.chunk_left - amt | |
| 433 returned_chunk = value | |
| 434 elif amt == self.chunk_left: | |
| 435 value = self._fp._safe_read(amt) | |
| 436 self._fp._safe_read(2) # Toss the CRLF at the end of the chunk. | |
| 437 self.chunk_left = None | |
| 438 returned_chunk = value | |
| 439 else: # amt > self.chunk_left | |
| 440 returned_chunk = self._fp._safe_read(self.chunk_left) | |
| 441 self._fp._safe_read(2) # Toss the CRLF at the end of the chunk. | |
| 442 self.chunk_left = None | |
| 443 return returned_chunk | |
| 444 | |
| 445 def read_chunked(self, amt=None, decode_content=None): | |
| 446 """ | |
| 447 Similar to :meth:`HTTPResponse.read`, but with an additional | |
| 448 parameter: ``decode_content``. | |
| 449 | |
| 450 :param decode_content: | |
| 451 If True, will attempt to decode the body based on the | |
| 452 'content-encoding' header. | |
| 453 """ | |
| 454 self._init_decoder() | |
| 455 # FIXME: Rewrite this method and make it a class with a better structured logic. | |
| 456 if not self.chunked: | |
| 457 raise ResponseNotChunked("Response is not chunked. " | |
| 458 "Header 'transfer-encoding: chunked' is missing.") | |
| 459 | |
| 460 # Don't bother reading the body of a HEAD request. | |
| 461 if self._original_response and is_response_to_head(self._original_response): | |
| 462 self._original_response.close() | |
| 463 return | |
| 464 | |
| 465 with self._error_catcher(): | |
| 466 while True: | |
| 467 self._update_chunk_length() | |
| 468 if self.chunk_left == 0: | |
| 469 break | |
| 470 chunk = self._handle_chunk(amt) | |
| 471 yield self._decode(chunk, decode_content=decode_content, | |
| 472 flush_decoder=True) | |
| 473 | |
| 474 # Chunk content ends with \r\n: discard it. | |
| 475 while True: | |
| 476 line = self._fp.fp.readline() | |
| 477 if not line: | |
| 478 # Some sites may not end with '\r\n'. | |
| 479 break | |
| 480 if line == b'\r\n': | |
| 481 break | |
| 482 | |
| 483 # We read everything; close the "file". | |
| 484 if self._original_response: | |
| 485 self._original_response.close() |
