Upstream version 10.39.225.0
[platform/framework/web/crosswalk.git] / src / chrome / common / extensions / docs / server2 / appengine_url_fetcher.py
1 # Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 # Use of this source code is governed by a BSD-style license that can be
3 # found in the LICENSE file.
4
5 import base64
6 import logging
7 import posixpath
8 import time
9
10 from appengine_wrappers import urlfetch
11 from environment import GetAppVersion
12 from future import Future
13
14
15 _MAX_RETRIES = 5
16 _RETRY_DELAY_SECONDS = 30
17
18
19 def _MakeHeaders(username, password, access_token):
20   headers = {
21     'User-Agent': 'Chromium docserver %s' % GetAppVersion(),
22     'Cache-Control': 'max-age=0',
23   }
24   if username is not None and password is not None:
25     headers['Authorization'] = 'Basic %s' % base64.b64encode(
26         '%s:%s' % (username, password))
27   if access_token is not None:
28     headers['Authorization'] = 'OAuth %s' % access_token
29   return headers
30
31
32 class AppEngineUrlFetcher(object):
33   """A wrapper around the App Engine urlfetch module that allows for easy
34   async fetches.
35   """
36   def __init__(self, base_path=None):
37     assert base_path is None or not base_path.endswith('/'), base_path
38     self._base_path = base_path
39     self._retries_left = _MAX_RETRIES
40
41   def Fetch(self, url, username=None, password=None, access_token=None):
42     """Fetches a file synchronously.
43     """
44     return urlfetch.fetch(self._FromBasePath(url),
45                           deadline=20,
46                           headers=_MakeHeaders(username,
47                                                password,
48                                                access_token))
49
50   def FetchAsync(self, url, username=None, password=None, access_token=None):
51     """Fetches a file asynchronously, and returns a Future with the result.
52     """
53     def process_result(result):
54       if result.status_code == 429:
55         if self._retries_left == 0:
56           logging.error('Still throttled. Giving up.')
57           return result
58         self._retries_left -= 1
59         logging.info('Throttled. Trying again in %s seconds.' %
60                      _RETRY_DELAY_SECONDS)
61         time.sleep(_RETRY_DELAY_SECONDS)
62         return self.FetchAsync(url, username, password, access_token).Get()
63       return result
64
65     rpc = urlfetch.create_rpc(deadline=20)
66     urlfetch.make_fetch_call(rpc,
67                              self._FromBasePath(url),
68                              headers=_MakeHeaders(username,
69                                                   password,
70                                                   access_token))
71     return Future(callback=lambda: process_result(rpc.get_result()))
72
73   def _FromBasePath(self, url):
74     assert not url.startswith('/'), url
75     if self._base_path is not None:
76       url = posixpath.join(self._base_path, url) if url else self._base_path
77     return url