response.py 27 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809
  1. from __future__ import absolute_import
  2. from contextlib import contextmanager
  3. import zlib
  4. import io
  5. import logging
  6. from socket import timeout as SocketTimeout
  7. from socket import error as SocketError
  8. try:
  9. import brotli
  10. except ImportError:
  11. brotli = None
  12. from ._collections import HTTPHeaderDict
  13. from .exceptions import (
  14. BodyNotHttplibCompatible,
  15. ProtocolError,
  16. DecodeError,
  17. ReadTimeoutError,
  18. ResponseNotChunked,
  19. IncompleteRead,
  20. InvalidHeader,
  21. )
  22. from .packages.six import string_types as basestring, PY3
  23. from .packages.six.moves import http_client as httplib
  24. from .connection import HTTPException, BaseSSLError
  25. from .util.response import is_fp_closed, is_response_to_head
  26. log = logging.getLogger(__name__)
  27. class DeflateDecoder(object):
  28. def __init__(self):
  29. self._first_try = True
  30. self._data = b""
  31. self._obj = zlib.decompressobj()
  32. def __getattr__(self, name):
  33. return getattr(self._obj, name)
  34. def decompress(self, data):
  35. if not data:
  36. return data
  37. if not self._first_try:
  38. return self._obj.decompress(data)
  39. self._data += data
  40. try:
  41. decompressed = self._obj.decompress(data)
  42. if decompressed:
  43. self._first_try = False
  44. self._data = None
  45. return decompressed
  46. except zlib.error:
  47. self._first_try = False
  48. self._obj = zlib.decompressobj(-zlib.MAX_WBITS)
  49. try:
  50. return self.decompress(self._data)
  51. finally:
  52. self._data = None
  53. class GzipDecoderState(object):
  54. FIRST_MEMBER = 0
  55. OTHER_MEMBERS = 1
  56. SWALLOW_DATA = 2
  57. class GzipDecoder(object):
  58. def __init__(self):
  59. self._obj = zlib.decompressobj(16 + zlib.MAX_WBITS)
  60. self._state = GzipDecoderState.FIRST_MEMBER
  61. def __getattr__(self, name):
  62. return getattr(self._obj, name)
  63. def decompress(self, data):
  64. ret = bytearray()
  65. if self._state == GzipDecoderState.SWALLOW_DATA or not data:
  66. return bytes(ret)
  67. while True:
  68. try:
  69. ret += self._obj.decompress(data)
  70. except zlib.error:
  71. previous_state = self._state
  72. # Ignore data after the first error
  73. self._state = GzipDecoderState.SWALLOW_DATA
  74. if previous_state == GzipDecoderState.OTHER_MEMBERS:
  75. # Allow trailing garbage acceptable in other gzip clients
  76. return bytes(ret)
  77. raise
  78. data = self._obj.unused_data
  79. if not data:
  80. return bytes(ret)
  81. self._state = GzipDecoderState.OTHER_MEMBERS
  82. self._obj = zlib.decompressobj(16 + zlib.MAX_WBITS)
  83. if brotli is not None:
  84. class BrotliDecoder(object):
  85. # Supports both 'brotlipy' and 'Brotli' packages
  86. # since they share an import name. The top branches
  87. # are for 'brotlipy' and bottom branches for 'Brotli'
  88. def __init__(self):
  89. self._obj = brotli.Decompressor()
  90. def decompress(self, data):
  91. if hasattr(self._obj, "decompress"):
  92. return self._obj.decompress(data)
  93. return self._obj.process(data)
  94. def flush(self):
  95. if hasattr(self._obj, "flush"):
  96. return self._obj.flush()
  97. return b""
  98. class MultiDecoder(object):
  99. """
  100. From RFC7231:
  101. If one or more encodings have been applied to a representation, the
  102. sender that applied the encodings MUST generate a Content-Encoding
  103. header field that lists the content codings in the order in which
  104. they were applied.
  105. """
  106. def __init__(self, modes):
  107. self._decoders = [_get_decoder(m.strip()) for m in modes.split(",")]
  108. def flush(self):
  109. return self._decoders[0].flush()
  110. def decompress(self, data):
  111. for d in reversed(self._decoders):
  112. data = d.decompress(data)
  113. return data
  114. def _get_decoder(mode):
  115. if "," in mode:
  116. return MultiDecoder(mode)
  117. if mode == "gzip":
  118. return GzipDecoder()
  119. if brotli is not None and mode == "br":
  120. return BrotliDecoder()
  121. return DeflateDecoder()
  122. class HTTPResponse(io.IOBase):
  123. """
  124. HTTP Response container.
  125. Backwards-compatible to httplib's HTTPResponse but the response ``body`` is
  126. loaded and decoded on-demand when the ``data`` property is accessed. This
  127. class is also compatible with the Python standard library's :mod:`io`
  128. module, and can hence be treated as a readable object in the context of that
  129. framework.
  130. Extra parameters for behaviour not present in httplib.HTTPResponse:
  131. :param preload_content:
  132. If True, the response's body will be preloaded during construction.
  133. :param decode_content:
  134. If True, will attempt to decode the body based on the
  135. 'content-encoding' header.
  136. :param original_response:
  137. When this HTTPResponse wrapper is generated from an httplib.HTTPResponse
  138. object, it's convenient to include the original for debug purposes. It's
  139. otherwise unused.
  140. :param retries:
  141. The retries contains the last :class:`~urllib3.util.retry.Retry` that
  142. was used during the request.
  143. :param enforce_content_length:
  144. Enforce content length checking. Body returned by server must match
  145. value of Content-Length header, if present. Otherwise, raise error.
  146. """
  147. CONTENT_DECODERS = ["gzip", "deflate"]
  148. if brotli is not None:
  149. CONTENT_DECODERS += ["br"]
  150. REDIRECT_STATUSES = [301, 302, 303, 307, 308]
  151. def __init__(
  152. self,
  153. body="",
  154. headers=None,
  155. status=0,
  156. version=0,
  157. reason=None,
  158. strict=0,
  159. preload_content=True,
  160. decode_content=True,
  161. original_response=None,
  162. pool=None,
  163. connection=None,
  164. msg=None,
  165. retries=None,
  166. enforce_content_length=False,
  167. request_method=None,
  168. request_url=None,
  169. auto_close=True,
  170. ):
  171. if isinstance(headers, HTTPHeaderDict):
  172. self.headers = headers
  173. else:
  174. self.headers = HTTPHeaderDict(headers)
  175. self.status = status
  176. self.version = version
  177. self.reason = reason
  178. self.strict = strict
  179. self.decode_content = decode_content
  180. self.retries = retries
  181. self.enforce_content_length = enforce_content_length
  182. self.auto_close = auto_close
  183. self._decoder = None
  184. self._body = None
  185. self._fp = None
  186. self._original_response = original_response
  187. self._fp_bytes_read = 0
  188. self.msg = msg
  189. self._request_url = request_url
  190. if body and isinstance(body, (basestring, bytes)):
  191. self._body = body
  192. self._pool = pool
  193. self._connection = connection
  194. if hasattr(body, "read"):
  195. self._fp = body
  196. # Are we using the chunked-style of transfer encoding?
  197. self.chunked = False
  198. self.chunk_left = None
  199. tr_enc = self.headers.get("transfer-encoding", "").lower()
  200. # Don't incur the penalty of creating a list and then discarding it
  201. encodings = (enc.strip() for enc in tr_enc.split(","))
  202. if "chunked" in encodings:
  203. self.chunked = True
  204. # Determine length of response
  205. self.length_remaining = self._init_length(request_method)
  206. # If requested, preload the body.
  207. if preload_content and not self._body:
  208. self._body = self.read(decode_content=decode_content)
  209. def get_redirect_location(self):
  210. """
  211. Should we redirect and where to?
  212. :returns: Truthy redirect location string if we got a redirect status
  213. code and valid location. ``None`` if redirect status and no
  214. location. ``False`` if not a redirect status code.
  215. """
  216. if self.status in self.REDIRECT_STATUSES:
  217. return self.headers.get("location")
  218. return False
  219. def release_conn(self):
  220. if not self._pool or not self._connection:
  221. return
  222. self._pool._put_conn(self._connection)
  223. self._connection = None
  224. @property
  225. def data(self):
  226. # For backwords-compat with earlier urllib3 0.4 and earlier.
  227. if self._body:
  228. return self._body
  229. if self._fp:
  230. return self.read(cache_content=True)
  231. @property
  232. def connection(self):
  233. return self._connection
  234. def isclosed(self):
  235. return is_fp_closed(self._fp)
  236. def tell(self):
  237. """
  238. Obtain the number of bytes pulled over the wire so far. May differ from
  239. the amount of content returned by :meth:``HTTPResponse.read`` if bytes
  240. are encoded on the wire (e.g, compressed).
  241. """
  242. return self._fp_bytes_read
  243. def _init_length(self, request_method):
  244. """
  245. Set initial length value for Response content if available.
  246. """
  247. length = self.headers.get("content-length")
  248. if length is not None:
  249. if self.chunked:
  250. # This Response will fail with an IncompleteRead if it can't be
  251. # received as chunked. This method falls back to attempt reading
  252. # the response before raising an exception.
  253. log.warning(
  254. "Received response with both Content-Length and "
  255. "Transfer-Encoding set. This is expressly forbidden "
  256. "by RFC 7230 sec 3.3.2. Ignoring Content-Length and "
  257. "attempting to process response as Transfer-Encoding: "
  258. "chunked."
  259. )
  260. return None
  261. try:
  262. # RFC 7230 section 3.3.2 specifies multiple content lengths can
  263. # be sent in a single Content-Length header
  264. # (e.g. Content-Length: 42, 42). This line ensures the values
  265. # are all valid ints and that as long as the `set` length is 1,
  266. # all values are the same. Otherwise, the header is invalid.
  267. lengths = set([int(val) for val in length.split(",")])
  268. if len(lengths) > 1:
  269. raise InvalidHeader(
  270. "Content-Length contained multiple "
  271. "unmatching values (%s)" % length
  272. )
  273. length = lengths.pop()
  274. except ValueError:
  275. length = None
  276. else:
  277. if length < 0:
  278. length = None
  279. # Convert status to int for comparison
  280. # In some cases, httplib returns a status of "_UNKNOWN"
  281. try:
  282. status = int(self.status)
  283. except ValueError:
  284. status = 0
  285. # Check for responses that shouldn't include a body
  286. if status in (204, 304) or 100 <= status < 200 or request_method == "HEAD":
  287. length = 0
  288. return length
  289. def _init_decoder(self):
  290. """
  291. Set-up the _decoder attribute if necessary.
  292. """
  293. # Note: content-encoding value should be case-insensitive, per RFC 7230
  294. # Section 3.2
  295. content_encoding = self.headers.get("content-encoding", "").lower()
  296. if self._decoder is None:
  297. if content_encoding in self.CONTENT_DECODERS:
  298. self._decoder = _get_decoder(content_encoding)
  299. elif "," in content_encoding:
  300. encodings = [
  301. e.strip()
  302. for e in content_encoding.split(",")
  303. if e.strip() in self.CONTENT_DECODERS
  304. ]
  305. if len(encodings):
  306. self._decoder = _get_decoder(content_encoding)
  307. DECODER_ERROR_CLASSES = (IOError, zlib.error)
  308. if brotli is not None:
  309. DECODER_ERROR_CLASSES += (brotli.error,)
  310. def _decode(self, data, decode_content, flush_decoder):
  311. """
  312. Decode the data passed in and potentially flush the decoder.
  313. """
  314. if not decode_content:
  315. return data
  316. try:
  317. if self._decoder:
  318. data = self._decoder.decompress(data)
  319. except self.DECODER_ERROR_CLASSES as e:
  320. content_encoding = self.headers.get("content-encoding", "").lower()
  321. raise DecodeError(
  322. "Received response with content-encoding: %s, but "
  323. "failed to decode it." % content_encoding,
  324. e,
  325. )
  326. if flush_decoder:
  327. data += self._flush_decoder()
  328. return data
  329. def _flush_decoder(self):
  330. """
  331. Flushes the decoder. Should only be called if the decoder is actually
  332. being used.
  333. """
  334. if self._decoder:
  335. buf = self._decoder.decompress(b"")
  336. return buf + self._decoder.flush()
  337. return b""
  338. @contextmanager
  339. def _error_catcher(self):
  340. """
  341. Catch low-level python exceptions, instead re-raising urllib3
  342. variants, so that low-level exceptions are not leaked in the
  343. high-level api.
  344. On exit, release the connection back to the pool.
  345. """
  346. clean_exit = False
  347. try:
  348. try:
  349. yield
  350. except SocketTimeout:
  351. # FIXME: Ideally we'd like to include the url in the ReadTimeoutError but
  352. # there is yet no clean way to get at it from this context.
  353. raise ReadTimeoutError(self._pool, None, "Read timed out.")
  354. except BaseSSLError as e:
  355. # FIXME: Is there a better way to differentiate between SSLErrors?
  356. if "read operation timed out" not in str(e): # Defensive:
  357. # This shouldn't happen but just in case we're missing an edge
  358. # case, let's avoid swallowing SSL errors.
  359. raise
  360. raise ReadTimeoutError(self._pool, None, "Read timed out.")
  361. except (HTTPException, SocketError) as e:
  362. # This includes IncompleteRead.
  363. raise ProtocolError("Connection broken: %r" % e, e)
  364. # If no exception is thrown, we should avoid cleaning up
  365. # unnecessarily.
  366. clean_exit = True
  367. finally:
  368. # If we didn't terminate cleanly, we need to throw away our
  369. # connection.
  370. if not clean_exit:
  371. # The response may not be closed but we're not going to use it
  372. # anymore so close it now to ensure that the connection is
  373. # released back to the pool.
  374. if self._original_response:
  375. self._original_response.close()
  376. # Closing the response may not actually be sufficient to close
  377. # everything, so if we have a hold of the connection close that
  378. # too.
  379. if self._connection:
  380. self._connection.close()
  381. # If we hold the original response but it's closed now, we should
  382. # return the connection back to the pool.
  383. if self._original_response and self._original_response.isclosed():
  384. self.release_conn()
  385. def read(self, amt=None, decode_content=None, cache_content=False):
  386. """
  387. Similar to :meth:`httplib.HTTPResponse.read`, but with two additional
  388. parameters: ``decode_content`` and ``cache_content``.
  389. :param amt:
  390. How much of the content to read. If specified, caching is skipped
  391. because it doesn't make sense to cache partial content as the full
  392. response.
  393. :param decode_content:
  394. If True, will attempt to decode the body based on the
  395. 'content-encoding' header.
  396. :param cache_content:
  397. If True, will save the returned data such that the same result is
  398. returned despite of the state of the underlying file object. This
  399. is useful if you want the ``.data`` property to continue working
  400. after having ``.read()`` the file object. (Overridden if ``amt`` is
  401. set.)
  402. """
  403. self._init_decoder()
  404. if decode_content is None:
  405. decode_content = self.decode_content
  406. if self._fp is None:
  407. return
  408. flush_decoder = False
  409. fp_closed = getattr(self._fp, "closed", False)
  410. with self._error_catcher():
  411. if amt is None:
  412. # cStringIO doesn't like amt=None
  413. data = self._fp.read() if not fp_closed else b""
  414. flush_decoder = True
  415. else:
  416. cache_content = False
  417. data = self._fp.read(amt) if not fp_closed else b""
  418. if (
  419. amt != 0 and not data
  420. ): # Platform-specific: Buggy versions of Python.
  421. # Close the connection when no data is returned
  422. #
  423. # This is redundant to what httplib/http.client _should_
  424. # already do. However, versions of python released before
  425. # December 15, 2012 (http://bugs.python.org/issue16298) do
  426. # not properly close the connection in all cases. There is
  427. # no harm in redundantly calling close.
  428. self._fp.close()
  429. flush_decoder = True
  430. if self.enforce_content_length and self.length_remaining not in (
  431. 0,
  432. None,
  433. ):
  434. # This is an edge case that httplib failed to cover due
  435. # to concerns of backward compatibility. We're
  436. # addressing it here to make sure IncompleteRead is
  437. # raised during streaming, so all calls with incorrect
  438. # Content-Length are caught.
  439. raise IncompleteRead(self._fp_bytes_read, self.length_remaining)
  440. if data:
  441. self._fp_bytes_read += len(data)
  442. if self.length_remaining is not None:
  443. self.length_remaining -= len(data)
  444. data = self._decode(data, decode_content, flush_decoder)
  445. if cache_content:
  446. self._body = data
  447. return data
  448. def stream(self, amt=2 ** 16, decode_content=None):
  449. """
  450. A generator wrapper for the read() method. A call will block until
  451. ``amt`` bytes have been read from the connection or until the
  452. connection is closed.
  453. :param amt:
  454. How much of the content to read. The generator will return up to
  455. much data per iteration, but may return less. This is particularly
  456. likely when using compressed data. However, the empty string will
  457. never be returned.
  458. :param decode_content:
  459. If True, will attempt to decode the body based on the
  460. 'content-encoding' header.
  461. """
  462. if self.chunked and self.supports_chunked_reads():
  463. for line in self.read_chunked(amt, decode_content=decode_content):
  464. yield line
  465. else:
  466. while not is_fp_closed(self._fp):
  467. data = self.read(amt=amt, decode_content=decode_content)
  468. if data:
  469. yield data
  470. @classmethod
  471. def from_httplib(ResponseCls, r, **response_kw):
  472. """
  473. Given an :class:`httplib.HTTPResponse` instance ``r``, return a
  474. corresponding :class:`urllib3.response.HTTPResponse` object.
  475. Remaining parameters are passed to the HTTPResponse constructor, along
  476. with ``original_response=r``.
  477. """
  478. headers = r.msg
  479. if not isinstance(headers, HTTPHeaderDict):
  480. if PY3:
  481. headers = HTTPHeaderDict(headers.items())
  482. else:
  483. # Python 2.7
  484. headers = HTTPHeaderDict.from_httplib(headers)
  485. # HTTPResponse objects in Python 3 don't have a .strict attribute
  486. strict = getattr(r, "strict", 0)
  487. resp = ResponseCls(
  488. body=r,
  489. headers=headers,
  490. status=r.status,
  491. version=r.version,
  492. reason=r.reason,
  493. strict=strict,
  494. original_response=r,
  495. **response_kw
  496. )
  497. return resp
  498. # Backwards-compatibility methods for httplib.HTTPResponse
  499. def getheaders(self):
  500. return self.headers
  501. def getheader(self, name, default=None):
  502. return self.headers.get(name, default)
  503. # Backwards compatibility for http.cookiejar
  504. def info(self):
  505. return self.headers
  506. # Overrides from io.IOBase
  507. def close(self):
  508. if not self.closed:
  509. self._fp.close()
  510. if self._connection:
  511. self._connection.close()
  512. if not self.auto_close:
  513. io.IOBase.close(self)
  514. @property
  515. def closed(self):
  516. if not self.auto_close:
  517. return io.IOBase.closed.__get__(self)
  518. elif self._fp is None:
  519. return True
  520. elif hasattr(self._fp, "isclosed"):
  521. return self._fp.isclosed()
  522. elif hasattr(self._fp, "closed"):
  523. return self._fp.closed
  524. else:
  525. return True
  526. def fileno(self):
  527. if self._fp is None:
  528. raise IOError("HTTPResponse has no file to get a fileno from")
  529. elif hasattr(self._fp, "fileno"):
  530. return self._fp.fileno()
  531. else:
  532. raise IOError(
  533. "The file-like object this HTTPResponse is wrapped "
  534. "around has no file descriptor"
  535. )
  536. def flush(self):
  537. if (
  538. self._fp is not None
  539. and hasattr(self._fp, "flush")
  540. and not getattr(self._fp, "closed", False)
  541. ):
  542. return self._fp.flush()
  543. def readable(self):
  544. # This method is required for `io` module compatibility.
  545. return True
  546. def readinto(self, b):
  547. # This method is required for `io` module compatibility.
  548. temp = self.read(len(b))
  549. if len(temp) == 0:
  550. return 0
  551. else:
  552. b[: len(temp)] = temp
  553. return len(temp)
  554. def supports_chunked_reads(self):
  555. """
  556. Checks if the underlying file-like object looks like a
  557. httplib.HTTPResponse object. We do this by testing for the fp
  558. attribute. If it is present we assume it returns raw chunks as
  559. processed by read_chunked().
  560. """
  561. return hasattr(self._fp, "fp")
  562. def _update_chunk_length(self):
  563. # First, we'll figure out length of a chunk and then
  564. # we'll try to read it from socket.
  565. if self.chunk_left is not None:
  566. return
  567. line = self._fp.fp.readline()
  568. line = line.split(b";", 1)[0]
  569. try:
  570. self.chunk_left = int(line, 16)
  571. except ValueError:
  572. # Invalid chunked protocol response, abort.
  573. self.close()
  574. raise httplib.IncompleteRead(line)
  575. def _handle_chunk(self, amt):
  576. returned_chunk = None
  577. if amt is None:
  578. chunk = self._fp._safe_read(self.chunk_left)
  579. returned_chunk = chunk
  580. self._fp._safe_read(2) # Toss the CRLF at the end of the chunk.
  581. self.chunk_left = None
  582. elif amt < self.chunk_left:
  583. value = self._fp._safe_read(amt)
  584. self.chunk_left = self.chunk_left - amt
  585. returned_chunk = value
  586. elif amt == self.chunk_left:
  587. value = self._fp._safe_read(amt)
  588. self._fp._safe_read(2) # Toss the CRLF at the end of the chunk.
  589. self.chunk_left = None
  590. returned_chunk = value
  591. else: # amt > self.chunk_left
  592. returned_chunk = self._fp._safe_read(self.chunk_left)
  593. self._fp._safe_read(2) # Toss the CRLF at the end of the chunk.
  594. self.chunk_left = None
  595. return returned_chunk
  596. def read_chunked(self, amt=None, decode_content=None):
  597. """
  598. Similar to :meth:`HTTPResponse.read`, but with an additional
  599. parameter: ``decode_content``.
  600. :param amt:
  601. How much of the content to read. If specified, caching is skipped
  602. because it doesn't make sense to cache partial content as the full
  603. response.
  604. :param decode_content:
  605. If True, will attempt to decode the body based on the
  606. 'content-encoding' header.
  607. """
  608. self._init_decoder()
  609. # FIXME: Rewrite this method and make it a class with a better structured logic.
  610. if not self.chunked:
  611. raise ResponseNotChunked(
  612. "Response is not chunked. "
  613. "Header 'transfer-encoding: chunked' is missing."
  614. )
  615. if not self.supports_chunked_reads():
  616. raise BodyNotHttplibCompatible(
  617. "Body should be httplib.HTTPResponse like. "
  618. "It should have have an fp attribute which returns raw chunks."
  619. )
  620. with self._error_catcher():
  621. # Don't bother reading the body of a HEAD request.
  622. if self._original_response and is_response_to_head(self._original_response):
  623. self._original_response.close()
  624. return
  625. # If a response is already read and closed
  626. # then return immediately.
  627. if self._fp.fp is None:
  628. return
  629. while True:
  630. self._update_chunk_length()
  631. if self.chunk_left == 0:
  632. break
  633. chunk = self._handle_chunk(amt)
  634. decoded = self._decode(
  635. chunk, decode_content=decode_content, flush_decoder=False
  636. )
  637. if decoded:
  638. yield decoded
  639. if decode_content:
  640. # On CPython and PyPy, we should never need to flush the
  641. # decoder. However, on Jython we *might* need to, so
  642. # lets defensively do it anyway.
  643. decoded = self._flush_decoder()
  644. if decoded: # Platform-specific: Jython.
  645. yield decoded
  646. # Chunk content ends with \r\n: discard it.
  647. while True:
  648. line = self._fp.fp.readline()
  649. if not line:
  650. # Some sites may not end with '\r\n'.
  651. break
  652. if line == b"\r\n":
  653. break
  654. # We read everything; close the "file".
  655. if self._original_response:
  656. self._original_response.close()
  657. def geturl(self):
  658. """
  659. Returns the URL that was the source of this response.
  660. If the request that generated this response redirected, this method
  661. will return the final redirect location.
  662. """
  663. if self.retries is not None and len(self.retries.history):
  664. return self.retries.history[-1].redirect_location
  665. else:
  666. return self._request_url
  667. def __iter__(self):
  668. buffer = []
  669. for chunk in self.stream(decode_content=True):
  670. if b"\n" in chunk:
  671. chunk = chunk.split(b"\n")
  672. yield b"".join(buffer) + chunk[0] + b"\n"
  673. for x in chunk[1:-1]:
  674. yield x + b"\n"
  675. if chunk[-1]:
  676. buffer = [chunk[-1]]
  677. else:
  678. buffer = []
  679. else:
  680. buffer.append(chunk)
  681. if buffer:
  682. yield b"".join(buffer)