Upstream version 5.34.104.0
[platform/framework/web/crosswalk.git] / src / chrome / common / extensions / docs / server2 / new_github_file_system_test.py
1 #!/usr/bin/env python
2 # Copyright 2013 The Chromium Authors. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 import json
7 from copy import deepcopy
8 from cStringIO import StringIO
9 from functools import partial
10 from hashlib import sha1
11 from random import random
12 import unittest
13 from zipfile import ZipFile
14
15 from caching_file_system import CachingFileSystem
16 from file_system import FileNotFoundError, StatInfo
17 from fake_url_fetcher import FakeURLFSFetcher, MockURLFetcher
18 from local_file_system import LocalFileSystem
19 from new_github_file_system import GithubFileSystem
20 from object_store_creator import ObjectStoreCreator
21 from test_file_system import TestFileSystem
22
23
24 class _TestBundle(object):
25   '''Bundles test file data with a GithubFileSystem and test utilites. Create
26   GithubFileSystems via |CreateGfs()|, the Fetcher it uses as |fetcher|,
27   randomly mutate its contents via |Mutate()|, and access the underlying zip
28   data via |files|.
29   '''
30
31   def __init__(self):
32     self.files = {
33       'zipfile/': '',
34       'zipfile/hello.txt': 'world',
35       'zipfile/readme': 'test zip',
36       'zipfile/dir/file1': 'contents',
37       'zipfile/dir/file2': 'more contents'
38     }
39     self._test_files = {
40       'test_owner': {
41         'changing-repo': {
42           'commits': {
43             'HEAD': self._MakeShaJson(self._GenerateHash())
44           },
45           'zipball': self._ZipFromFiles(self.files)
46         }
47       }
48     }
49
50
51   def CreateGfsAndFetcher(self):
52     fetchers = []
53     def create_mock_url_fetcher(base_path):
54       assert not fetchers
55       fetchers.append(MockURLFetcher(
56           FakeURLFSFetcher(TestFileSystem(self._test_files), base_path)))
57       return fetchers[-1]
58
59     # Constructing |gfs| will create a fetcher.
60     gfs = GithubFileSystem.ForTest(
61         'changing-repo/', create_mock_url_fetcher, path='')
62     assert len(fetchers) == 1
63     return gfs, fetchers[0]
64
65   def Mutate(self):
66     fake_version = self._GenerateHash()
67     fake_data = self._GenerateHash()
68     self.files['zipfile/hello.txt'] = fake_data
69     self.files['zipfile/new-file'] = fake_data
70     self.files['zipfile/dir/file1'] = fake_data
71     # XXX(kalman): These don't work anymore because TestFileSystem no longer
72     # just uses the object it was given, but instead mutates it on
73     # construction.  For now the tests that rely on this (i.e. Mutate) are
74     # disabled, and in fact NewGithubFileSystem isn't really used anymore, so
75     # rather than fixing this we may just want to delete it all.
76     self._test_files['test_owner']['changing-repo']['zipball'] = (
77         self._ZipFromFiles(self.files))
78     self._test_files['test_owner']['changing-repo']['commits']['HEAD'] = (
79         self._MakeShaJson(fake_version))
80     return fake_version, fake_data
81
82   def _GenerateHash(self):
83     '''Generates an arbitrary SHA1 hash.
84     '''
85     return sha1(str(random())).hexdigest()
86
87   def _MakeShaJson(self, hash_value):
88     commit_json = json.loads(deepcopy(LocalFileSystem('').ReadSingle(
89         'test_data/github_file_system/test_owner/repo/commits/HEAD').Get()))
90     commit_json['sha'] = hash_value
91     return json.dumps(commit_json)
92
93   def _ZipFromFiles(self, file_dict):
94     string = StringIO()
95     zipfile = ZipFile(string, 'w')
96     for filename, contents in file_dict.iteritems():
97       zipfile.writestr(filename, contents)
98     zipfile.close()
99     return string.getvalue()
100
101
102 class TestGithubFileSystem(unittest.TestCase):
103   def setUp(self):
104     self._gfs = GithubFileSystem.ForTest(
105         'repo/', partial(FakeURLFSFetcher, LocalFileSystem('')))
106     # Start and finish the repository load.
107     self._cgfs = CachingFileSystem(self._gfs, ObjectStoreCreator.ForTest())
108
109   def DISABLED_testReadDirectory(self):
110     self._gfs.Refresh().Get()
111     self.assertEqual(
112         sorted(['requirements.txt', '.gitignore', 'README.md', 'src/']),
113         sorted(self._gfs.ReadSingle('').Get()))
114     self.assertEqual(
115         sorted(['__init__.notpy', 'hello.notpy']),
116         sorted(self._gfs.ReadSingle('src/').Get()))
117
118   def DISABLED_testReadFile(self):
119     self._gfs.Refresh().Get()
120     expected = (
121       '# Compiled Python files\n'
122       '*.pyc\n'
123     )
124     self.assertEqual(expected, self._gfs.ReadSingle('.gitignore').Get())
125
126   def DISABLED_testMultipleReads(self):
127     self._gfs.Refresh().Get()
128     self.assertEqual(
129         self._gfs.ReadSingle('requirements.txt').Get(),
130         self._gfs.ReadSingle('requirements.txt').Get())
131
132   def DISABLED_testReads(self):
133     self._gfs.Refresh().Get()
134     expected = {
135         'src/': sorted(['hello.notpy', '__init__.notpy']),
136         '': sorted(['requirements.txt', '.gitignore', 'README.md', 'src/'])
137     }
138
139     read = self._gfs.Read(['', 'src/']).Get()
140     self.assertEqual(expected['src/'], sorted(read['src/']))
141     self.assertEqual(expected[''], sorted(read['']))
142
143   def DISABLED_testStat(self):
144     # This is the hash value from the zip on disk.
145     real_hash = 'c36fc23688a9ec9e264d3182905dc0151bfff7d7'
146
147     self._gfs.Refresh().Get()
148     dir_stat = StatInfo(real_hash, {
149       'hello.notpy': StatInfo(real_hash),
150       '__init__.notpy': StatInfo(real_hash)
151     })
152
153     self.assertEqual(StatInfo(real_hash), self._gfs.Stat('README.md'))
154     self.assertEqual(StatInfo(real_hash), self._gfs.Stat('src/hello.notpy'))
155     self.assertEqual(dir_stat, self._gfs.Stat('src/'))
156
157   def DISABLED_testBadReads(self):
158     self._gfs.Refresh().Get()
159     self.assertRaises(FileNotFoundError, self._gfs.Stat, 'DONT_README.md')
160     self.assertRaises(FileNotFoundError,
161                       self._gfs.ReadSingle('DONT_README.md').Get)
162
163   def DISABLED_testCachingFileSystem(self):
164     self._cgfs.Refresh().Get()
165     initial_cgfs_read_one = self._cgfs.ReadSingle('src/hello.notpy').Get()
166
167     self.assertEqual(initial_cgfs_read_one,
168                      self._gfs.ReadSingle('src/hello.notpy').Get())
169     self.assertEqual(initial_cgfs_read_one,
170                      self._cgfs.ReadSingle('src/hello.notpy').Get())
171
172     initial_cgfs_read_two = self._cgfs.Read(
173         ['README.md', 'requirements.txt']).Get()
174
175     self.assertEqual(
176         initial_cgfs_read_two,
177         self._gfs.Read(['README.md', 'requirements.txt']).Get())
178     self.assertEqual(
179         initial_cgfs_read_two,
180         self._cgfs.Read(['README.md', 'requirements.txt']).Get())
181
182   def DISABLED_testWithoutRefresh(self):
183     # Without refreshing it will still read the content from blobstore, and it
184     # does this via the magic of the FakeURLFSFetcher.
185     self.assertEqual(['__init__.notpy', 'hello.notpy'],
186                      sorted(self._gfs.ReadSingle('src/').Get()))
187
188   def DISABLED_testRefresh(self):
189     test_bundle = _TestBundle()
190     gfs, fetcher = test_bundle.CreateGfsAndFetcher()
191
192     # It shouldn't fetch until Refresh does so; then it will do 2, one for the
193     # stat, and another for the read.
194     self.assertTrue(*fetcher.CheckAndReset())
195     gfs.Refresh().Get()
196     self.assertTrue(*fetcher.CheckAndReset(fetch_count=1,
197                                            fetch_async_count=1,
198                                            fetch_resolve_count=1))
199
200     # Refresh is just an alias for Read('').
201     gfs.Refresh().Get()
202     self.assertTrue(*fetcher.CheckAndReset())
203
204     initial_dir_read = sorted(gfs.ReadSingle('').Get())
205     initial_file_read = gfs.ReadSingle('dir/file1').Get()
206
207     version, data = test_bundle.Mutate()
208
209     # Check that changes have not effected the file system yet.
210     self.assertEqual(initial_dir_read, sorted(gfs.ReadSingle('').Get()))
211     self.assertEqual(initial_file_read, gfs.ReadSingle('dir/file1').Get())
212     self.assertNotEqual(StatInfo(version), gfs.Stat(''))
213
214     gfs, fetcher = test_bundle.CreateGfsAndFetcher()
215     gfs.Refresh().Get()
216     self.assertTrue(*fetcher.CheckAndReset(fetch_count=1,
217                                            fetch_async_count=1,
218                                            fetch_resolve_count=1))
219
220     # Check that the changes have affected the file system.
221     self.assertEqual(data, gfs.ReadSingle('new-file').Get())
222     self.assertEqual(test_bundle.files['zipfile/dir/file1'],
223                      gfs.ReadSingle('dir/file1').Get())
224     self.assertEqual(StatInfo(version), gfs.Stat('new-file'))
225
226     # Regression test: ensure that reading the data after it's been mutated,
227     # but before Refresh() has been realised, still returns the correct data.
228     gfs, fetcher = test_bundle.CreateGfsAndFetcher()
229     version, data = test_bundle.Mutate()
230
231     refresh_future = gfs.Refresh()
232     self.assertTrue(*fetcher.CheckAndReset(fetch_count=1, fetch_async_count=1))
233
234     self.assertEqual(data, gfs.ReadSingle('new-file').Get())
235     self.assertEqual(test_bundle.files['zipfile/dir/file1'],
236                      gfs.ReadSingle('dir/file1').Get())
237     self.assertEqual(StatInfo(version), gfs.Stat('new-file'))
238
239     refresh_future.Get()
240     self.assertTrue(*fetcher.CheckAndReset(fetch_resolve_count=1))
241
242   def DISABLED_testGetThenRefreshOnStartup(self):
243     # Regression test: Test that calling Get() but never resolving the future,
244     # then Refresh()ing the data, causes the data to be refreshed.
245     test_bundle = _TestBundle()
246     gfs, fetcher = test_bundle.CreateGfsAndFetcher()
247     self.assertTrue(*fetcher.CheckAndReset())
248
249     # Get a predictable version.
250     version, data = test_bundle.Mutate()
251
252     read_future = gfs.ReadSingle('hello.txt')
253     # Fetch for the Stat(), async-fetch for the Read().
254     self.assertTrue(*fetcher.CheckAndReset(fetch_count=1, fetch_async_count=1))
255
256     refresh_future = gfs.Refresh()
257     self.assertTrue(*fetcher.CheckAndReset())
258
259     self.assertEqual(data, read_future.Get())
260     self.assertTrue(*fetcher.CheckAndReset(fetch_resolve_count=1))
261     self.assertEqual(StatInfo(version), gfs.Stat('hello.txt'))
262     self.assertTrue(*fetcher.CheckAndReset())
263
264     # The fetch will already have been resolved, so resolving the Refresh won't
265     # affect anything.
266     refresh_future.Get()
267     self.assertTrue(*fetcher.CheckAndReset())
268
269     # Read data should not have changed.
270     self.assertEqual(data, gfs.ReadSingle('hello.txt').Get())
271     self.assertEqual(StatInfo(version), gfs.Stat('hello.txt'))
272     self.assertTrue(*fetcher.CheckAndReset())
273
274
275 if __name__ == '__main__':
276   unittest.main()