From 5cfbb0aaf50e451179ede8bcd43ca55fbdfebad5 Mon Sep 17 00:00:00 2001 From: Ronny Pfannschmidt Date: Tue, 17 Jan 2012 13:16:09 +0100 Subject: [PATCH] insanity for chunked iteration --- requests/models.py | 36 +++++++++++++++++++++++++++++++----- 1 file changed, 31 insertions(+), 5 deletions(-) diff --git a/requests/models.py b/requests/models.py index c3f1b43..728dbc9 100644 --- a/requests/models.py +++ b/requests/models.py @@ -611,8 +611,30 @@ class Response(object): break yield chunk self._content_consumed = True + + def generate_chunked(): + resp = self.raw._original_response + fp = resp.fp + yield fp.read(resp.chunk_left) + while 1: + #XXX correct line size + pending_bytes = fp.readline(80).strip() + if not pending_bytes: + break + pending_bytes = int(pending_bytes, 16) + if pending_bytes == 0: + break + while pending_bytes: + chunk = fp.read(min(chunk_size, pending_bytes)) + pending_bytes-=len(chunk) + yield chunk + self._content_consumed = True - gen = generate() + + if getattr(self.raw._original_response, 'chunked', False): + gen = generate_chunked() + else: + gen = generate() if 'gzip' in self.headers.get('content-encoding', ''): gen = stream_decompress(gen, mode='gzip') @@ -642,10 +664,14 @@ class Response(object): for line in lines[:-1]: yield line.rstrip() # Save the last part of the chunk for next iteration, to keep full line together - pending = lines[-1] - #if pending is a complete line, give it baack - if pending[-1] == '\n': - yield pending.rstrip() + # lines may be empty for the last chunk of a chunked response + if lines: + pending = lines[-1] + #if pending is a complete line, give it baack + if pending[-1] == '\n': + yield pending.rstrip() + pending = None + else: pending = None # Yield the last line -- 2.34.1