import hashlib
from base64 import b64encode
-# from urlparse import urlparse
-from urllib.parse import urlparse
-
+from .compat import urlparse, str, bytes
from .utils import randombytes, parse_dict_header
if is_py2:
- from urllib import quote, unquote
+ from urllib import quote, unquote, urlencode
from urlparse import urlparse, urlunparse, urljoin, urlsplit
from urllib2 import parse_http_list
import cookielib
from .packages.oreos.monkeys import SimpleCookie
+ from StringIO import StringIO
+
+ str = unicode
+ bytes = str
elif is_py3:
from urllib.request import parse_http_list
from http import cookiejar as cookielib
from http.cookies import SimpleCookie
+ from io import StringIO
+
+ str = str
+ bytes = bytes
get_encoding_from_headers, stream_decode_response_unicode,
stream_decompress, guess_filename, requote_path, dict_from_string)
-from .compat import urlparse, urlunparse, urljoin, urlsplit, urlencode, quote, unquote
+from .compat import urlparse, urlunparse, urljoin, urlsplit, urlencode, quote, unquote, str, bytes, SimpleCookie, is_py3, is_py2
# Import chardet if it is available.
try:
netloc = netloc.encode('idna').decode('utf-8')
- # if isinstance(path, str):
- # path = path.encode('utf-8')
+ if is_py2:
+ if isinstance(path, str):
+ path = path.encode('utf-8')
- # path = requote_path(path)
+ path = requote_path(path)
# print([ scheme, netloc, path, params, query, fragment ])
# print('---------------------')
if not path:
path = '/'
+ # if is_py3:
path = quote(path.encode('utf-8'))
+
url.append(path)
query = p.query
if 'cookie' not in self.headers:
# Simple cookie with our dict.
- # c = oreos.monkeys.SimpleCookie()
- from http.cookies import SimpleCookie
c = SimpleCookie()
for (k, v) in list(self.cookies.items()):
c[k] = v
"""Returns true if :attr:`status_code` is 'OK'."""
return self.ok
+ def __nonzero__(self):
+ """Returns true if :attr:`status_code` is 'OK'."""
+ return self.ok
+
@property
def ok(self):
try:
except (UnicodeError, TypeError):
pass
-
-
+ if not content:
+ content = str(content, encoding, errors='replace')
return content
import logging
import socket
-from httplib import (HTTPConnection, HTTPSConnection, HTTPException,
- HTTP_PORT, HTTPS_PORT)
-
-from Queue import Queue, Empty, Full
from socket import error as SocketError, timeout as SocketTimeout
try:
import zlib
from .compat import parse_http_list as _parse_list_header
-from .compat import quote, unquote, cookielib, SimpleCookie
+from .compat import quote, unquote, cookielib, SimpleCookie, is_py2
def dict_from_string(s):
def randombytes(n):
"""Return n random bytes."""
- L = [chr(random.randrange(0, 256)).encode('utf-8') for i in range(n)]
+ if is_py2:
+ L = [chr(random.randrange(0, 256)) for i in range(n)]
+ else:
+ L = [chr(random.randrange(0, 256)).encode('utf-8') for i in range(n)]
return b"".join(L)
-
-
import io
+import json
import time
import os
import sys
import unittest
import requests
+from requests.compat import str, bytes, StringIO
# import envoy
from requests import HTTPError
from requests.auth import HTTPBasicAuth, HTTPDigestAuth
-try:
- import omnijson as json
-except ImportError:
- import json
+
if (sys.platform == 'win32') and ('HTTPBIN_URL' not in os.environ):
os.environ['HTTPBIN_URL'] = 'http://httpbin.org/'
rbody = json.loads(r.text)
# Body wasn't valid url encoded data, so the server returns None as
# "form" and the raw body as "data".
- self.assertEqual(rbody.get('form'), {})
+
+ assert rbody.get('form') in (None, {})
self.assertEqual(rbody.get('data'), 'fooaowpeuf')
rbody = json.loads(r.text)
- self.assertEqual(rbody.get('form'), {})
+ assert rbody.get('form') in (None, {})
self.assertEqual(rbody.get('data'), 'foobar')
# Make a request and monkey-patch its contents
r = requests.get(httpbin('get'))
- r.raw = io.StringIO(quote)
+ r.raw = StringIO(quote)
# Make sure iter_lines doesn't chop the trailing bit
lines = '\n'.join(r.iter_lines())