permissive implementation of iter_content
authorShivaram Lingamneni <slingamn@cs.stanford.edu>
Thu, 16 Aug 2012 07:09:27 +0000 (00:09 -0700)
committerShivaram Lingamneni <slingamn@cs.stanford.edu>
Sun, 2 Sep 2012 08:42:30 +0000 (01:42 -0700)
This allows iter_content and iter_lines to succeed without
crashing even after the response content has been fetched
(iter_content gives you an iterator over the prefetched
content)

requests/models.py
requests/utils.py
tests/test_requests.py

index 29c06c6..1159ad5 100644 (file)
@@ -31,7 +31,7 @@ from .exceptions import (
 from .utils import (
     get_encoding_from_headers, stream_untransfer, guess_filename, requote_uri,
     stream_decode_response_unicode, get_netrc_auth, get_environ_proxies,
-    to_key_val_list, DEFAULT_CA_BUNDLE_PATH, parse_header_links)
+    to_key_val_list, DEFAULT_CA_BUNDLE_PATH, parse_header_links, iter_slices)
 from .compat import (
     cookielib, urlparse, urlunparse, urljoin, urlsplit, urlencode, str, bytes,
     StringIO, is_py2, chardet, json, builtin_str, numeric_types)
@@ -730,9 +730,8 @@ class Response(object):
         length of each item returned as decoding can take place.
         """
         if self._content_consumed:
-            raise RuntimeError(
-                'The content for this response was already consumed'
-            )
+            # simulate reading small chunks of the content
+            return iter_slices(self._content, chunk_size)
 
         def generate():
             while 1:
index 5b8c88d..3639b8f 100644 (file)
@@ -360,6 +360,12 @@ def stream_decode_response_unicode(iterator, r):
     if rv:
         yield rv
 
+def iter_slices(string, slice_length):
+    """Iterate over slices of a string."""
+    pos = 0
+    while pos < len(string):
+        yield string[pos:pos+slice_length]
+        pos += slice_length
 
 def get_unicode_from_response(r):
     """Returns the requested content back in unicode.
index 8e2ebf1..9f4e819 100755 (executable)
@@ -924,6 +924,24 @@ class RequestsTestSuite(TestSetup, TestBaseMixin, unittest.TestCase):
         joined = lines[0] + '\n' + lines[1] + '\r\n' + lines[2]
         self.assertEqual(joined, quote)
 
+    def test_permissive_iter_content(self):
+        """Test that iter_content and iter_lines work even after the body has been fetched."""
+        r = get(httpbin('stream', '10'), prefetch=True)
+        assert r._content_consumed
+        # iter_lines should still work without crashing
+        self.assertEqual(len(list(r.iter_lines())), 10)
+
+        # iter_content should return a one-item iterator over the whole content
+        iter_content_list = list(r.iter_content(chunk_size=1))
+        self.assertTrue(all(len(item) == 1 for item in iter_content_list))
+        # when joined, it should be exactly the original content
+        self.assertEqual(bytes().join(iter_content_list), r.content)
+
+        # test different chunk sizes:
+        for chunk_size in range(2, 20):
+            self.assertEqual(bytes().join(r.iter_content(chunk_size=chunk_size)), r.content)
+
+
     # def test_safe_mode(self):
 
     #     safe = requests.session(config=dict(safe_mode=True))