utils.py 1.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748
  1. from pip._vendor.requests.models import CONTENT_CHUNK_SIZE, Response
  2. from pip._internal.utils.typing import MYPY_CHECK_RUNNING
  3. if MYPY_CHECK_RUNNING:
  4. from typing import Iterator
  5. def response_chunks(response, chunk_size=CONTENT_CHUNK_SIZE):
  6. # type: (Response, int) -> Iterator[bytes]
  7. """Given a requests Response, provide the data chunks.
  8. """
  9. try:
  10. # Special case for urllib3.
  11. for chunk in response.raw.stream(
  12. chunk_size,
  13. # We use decode_content=False here because we don't
  14. # want urllib3 to mess with the raw bytes we get
  15. # from the server. If we decompress inside of
  16. # urllib3 then we cannot verify the checksum
  17. # because the checksum will be of the compressed
  18. # file. This breakage will only occur if the
  19. # server adds a Content-Encoding header, which
  20. # depends on how the server was configured:
  21. # - Some servers will notice that the file isn't a
  22. # compressible file and will leave the file alone
  23. # and with an empty Content-Encoding
  24. # - Some servers will notice that the file is
  25. # already compressed and will leave the file
  26. # alone and will add a Content-Encoding: gzip
  27. # header
  28. # - Some servers won't notice anything at all and
  29. # will take a file that's already been compressed
  30. # and compress it again and set the
  31. # Content-Encoding: gzip header
  32. #
  33. # By setting this not to decode automatically we
  34. # hope to eliminate problems with the second case.
  35. decode_content=False,
  36. ):
  37. yield chunk
  38. except AttributeError:
  39. # Standard file-like object.
  40. while True:
  41. chunk = response.raw.read(chunk_size)
  42. if not chunk:
  43. break
  44. yield chunk