2 # Copyright 2014 The Chromium OS Authors. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
6 """Integration tests for cidb.py module.
8 Running these tests requires and assumes:
9 1) You are running from a machine with whitelisted access to the CIDB
10 database test instance.
11 2) You have a checkout of the crostools repo, which provides credentials
12 to the above test instance.
15 # pylint: disable-msg= W0212
17 from __future__ import print_function
23 sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.dirname(
24 os.path.abspath(__file__)))))
26 from chromite.cbuildbot import constants
27 from chromite.cbuildbot import metadata_lib
28 from chromite.lib import cidb
29 from chromite.lib import cros_build_lib
30 from chromite.lib import cros_test_lib
31 from chromite.lib import osutils
32 from chromite.lib import parallel
34 SERIES_0_TEST_DATA_PATH = os.path.join(
35 constants.CHROMITE_DIR, 'cidb', 'test_data', 'series_0')
37 SERIES_1_TEST_DATA_PATH = os.path.join(
38 constants.CHROMITE_DIR, 'cidb', 'test_data', 'series_1')
40 TEST_DB_CRED_ROOT = os.path.join(constants.SOURCE_ROOT,
44 TEST_DB_CRED_READONLY = os.path.join(constants.SOURCE_ROOT,
48 TEST_DB_CRED_BOT = os.path.join(constants.SOURCE_ROOT,
53 class CIDBIntegrationTest(cros_test_lib.TestCase):
54 """Base class for cidb tests that connect to a test MySQL instance."""
56 def _PrepareFreshDatabase(self, max_schema_version=None):
57 """Create an empty database with migrations applied.
60 max_schema_version: The highest schema version migration to apply,
61 defaults to None in which case all migrations will be applied.
64 A CIDBConnection instance, connected to a an empty database as the
67 # Note: We do not use the cidb.CIDBConnectionFactory
68 # in this module. That factory method is used only to construct
69 # connections as the bot user, which is how the builders will always
70 # connect to the database. In this module, however, we need to test
71 # database connections as other mysql users.
73 # Connect to database and drop its contents.
74 db = cidb.CIDBConnection(TEST_DB_CRED_ROOT)
77 # Connect to now fresh database and apply migrations.
78 db = cidb.CIDBConnection(TEST_DB_CRED_ROOT)
79 db.ApplySchemaMigrations(max_schema_version)
83 class CIDBMigrationsTest(CIDBIntegrationTest):
84 """Test that all migrations apply correctly."""
86 def testMigrations(self):
87 """Test that all migrations apply in bulk correctly."""
88 self._PrepareFreshDatabase()
91 def testIncrementalMigrations(self):
92 """Test that all migrations apply incrementally correctly."""
93 db = self._PrepareFreshDatabase(0)
94 migrations = db._GetMigrationScripts()
95 max_version = migrations[-1][0]
97 for i in range(1, max_version+1):
98 db.ApplySchemaMigrations(i)
100 def testActions(self):
101 """Test that InsertCLActions accepts 0-, 1-, and multi-item lists."""
102 db = self._PrepareFreshDatabase()
103 build_id = db.InsertBuild('my builder', 'chromiumos', 12, 'my config',
106 a1 = metadata_lib.GetCLActionTuple(
107 metadata_lib.GerritPatchTuple(1, 1, True),
108 constants.CL_ACTION_PICKED_UP)
109 a2 = metadata_lib.GetCLActionTuple(
110 metadata_lib.GerritPatchTuple(1, 1, True),
111 constants.CL_ACTION_PICKED_UP)
112 a3 = metadata_lib.GetCLActionTuple(
113 metadata_lib.GerritPatchTuple(1, 1, True),
114 constants.CL_ACTION_PICKED_UP)
116 db.InsertCLActions(build_id, [])
117 db.InsertCLActions(build_id, [a1])
118 db.InsertCLActions(build_id, [a2, a3])
120 action_count = db._GetEngine().execute('select count(*) from clActionTable'
122 self.assertEqual(action_count, 3)
124 # Test that all known CL action types can be inserted
125 fakepatch = metadata_lib.GerritPatchTuple(1, 1, True)
126 all_actions_list = [metadata_lib.GetCLActionTuple(fakepatch, action)
127 for action in constants.CL_ACTIONS]
128 db.InsertCLActions(build_id, all_actions_list)
130 class CIDBAPITest(CIDBIntegrationTest):
131 """Tests of the CIDB API."""
133 def testSchemaVersionTooLow(self):
134 """Tests that the minimum_schema decorator works as expected."""
135 db = self._PrepareFreshDatabase(3)
136 self.assertRaises2(cidb.UnsupportedMethodException,
137 db.InsertBuildStages, [])
139 def testSchemaVersionOK(self):
140 """Tests that the minimum_schema decorator works as expected."""
141 db = self._PrepareFreshDatabase(4)
142 db.InsertBuildStages([])
145 def GetTestDataSeries(test_data_path):
146 """Get metadata from json files at |test_data_path|.
149 A list of CBuildbotMetadata objects, sorted by their start time.
151 filenames = glob.glob(os.path.join(test_data_path, '*.json'))
153 for fname in filenames:
155 metadata_lib.CBuildbotMetadata.FromJSONString(osutils.ReadFile(fname)))
157 # Convert start time values, which are stored in RFC 2822 string format,
158 # to seconds since epoch.
159 timestamp_from_dict = lambda x: cros_build_lib.ParseUserDateTimeFormat(
160 x.GetDict()['time']['start'])
162 metadatas.sort(key=timestamp_from_dict)
166 class DataSeries0Test(CIDBIntegrationTest):
167 """Simulate a set of 630 master/slave CQ builds."""
169 def testCQWithSchema11(self):
170 """Run the CQ test with schema version 13."""
171 # Run the CQ test at schema version 13
172 self._PrepareFreshDatabase(13)
175 def _runCQTest(self):
176 """Simulate a set of 630 master/slave CQ builds.
178 Note: This test takes about 2.5 minutes to populate its 630 builds
179 and their corresponding cl actions into the test database.
181 metadatas = GetTestDataSeries(SERIES_0_TEST_DATA_PATH)
182 self.assertEqual(len(metadatas), 630, 'Did not load expected amount of '
185 bot_db = cidb.CIDBConnection(TEST_DB_CRED_BOT)
187 # Simulate the test builds, using a database connection as the
189 self.simulate_builds(bot_db, metadatas)
191 # Perform some sanity check queries against the database, connected
192 # as the readonly user.
193 readonly_db = cidb.CIDBConnection(TEST_DB_CRED_READONLY)
195 self._start_and_finish_time_checks(readonly_db)
197 build_types = readonly_db._GetEngine().execute(
198 'select build_type from buildTable').fetchall()
199 self.assertTrue(all(x == ('paladin',) for x in build_types))
201 self._cl_action_checks(readonly_db)
203 build_config_count = readonly_db._GetEngine().execute(
204 'select COUNT(distinct build_config) from buildTable').fetchall()[0][0]
205 self.assertEqual(build_config_count, 30)
207 # Test the _Select method, and verify that the first inserted
208 # build is a master-paladin build.
209 first_row = readonly_db._Select('buildTable', 1, ['id', 'build_config'])
210 self.assertEqual(first_row['build_config'], 'master-paladin')
212 # First master build has 29 slaves. Build with id 2 is a slave
213 # build with no slaves of its own.
214 self.assertEqual(len(readonly_db.GetSlaveStatuses(1)), 29)
215 self.assertEqual(len(readonly_db.GetSlaveStatuses(2)), 0)
217 # Make sure we can get build status by build id.
218 self.assertEqual(readonly_db.GetBuildStatus(2).get('id'), 2)
220 self._start_and_finish_time_checks(readonly_db)
221 self._cl_action_checks(readonly_db)
222 self._last_updated_time_checks(readonly_db)
224 def _last_updated_time_checks(self, db):
225 """Sanity checks on the last_updated column."""
226 # We should have a diversity of last_updated times. Since the timestamp
227 # resolution is only 1 second, and we have lots of parallelism in the test,
228 # we won't have a distring last_updated time per row. But we will have at
229 # least 100 distinct last_updated times.
230 distinct_last_updated = db._GetEngine().execute(
231 'select count(distinct last_updated) from buildTable').fetchall()[0][0]
232 self.assertTrue(distinct_last_updated > 80)
234 ids_by_last_updated = db._GetEngine().execute(
235 'select id from buildTable order by last_updated').fetchall()
237 ids_by_last_updated = [id_tuple[0] for id_tuple in ids_by_last_updated]
239 # Build #1 should have been last updated before build # 200.
240 self.assertLess(ids_by_last_updated.index(1),
241 ids_by_last_updated.index(200))
243 # However, build #1 (which was a master build) should have been last updated
244 # AFTER build #2 which was its slave.
245 self.assertGreater(ids_by_last_updated.index(1),
246 ids_by_last_updated.index(2))
248 def _cl_action_checks(self, db):
249 """Sanity checks that correct cl actions were recorded."""
250 submitted_cl_count = db._GetEngine().execute(
251 'select count(*) from clActionTable where action="submitted"'
253 rejected_cl_count = db._GetEngine().execute(
254 'select count(*) from clActionTable where action="kicked_out"'
256 total_actions = db._GetEngine().execute(
257 'select count(*) from clActionTable').fetchall()[0][0]
258 self.assertEqual(submitted_cl_count, 56)
259 self.assertEqual(rejected_cl_count, 8)
260 self.assertEqual(total_actions, 1877)
262 actions_for_change = db.GetActionsForChange(
263 metadata_lib.GerritChangeTuple(205535, False))
265 self.assertEqual(len(actions_for_change), 60)
266 last_action = actions_for_change[-1]
267 last_action.pop('timestamp')
268 last_action.pop('id')
269 self.assertEqual(last_action, {'action': 'submitted',
270 'build_config': 'master-paladin',
272 'change_number': 205535L,
273 'change_source': 'external',
276 def _start_and_finish_time_checks(self, db):
277 """Sanity checks that correct data was recorded, and can be retrieved."""
278 max_start_time = db._GetEngine().execute(
279 'select max(start_time) from buildTable').fetchall()[0][0]
280 min_start_time = db._GetEngine().execute(
281 'select min(start_time) from buildTable').fetchall()[0][0]
282 max_fin_time = db._GetEngine().execute(
283 'select max(finish_time) from buildTable').fetchall()[0][0]
284 min_fin_time = db._GetEngine().execute(
285 'select min(finish_time) from buildTable').fetchall()[0][0]
286 self.assertGreater(max_start_time, min_start_time)
287 self.assertGreater(max_fin_time, min_fin_time)
289 # For all builds, finish_time should equal last_updated.
290 mismatching_times = db._GetEngine().execute(
291 'select count(*) from buildTable where finish_time != last_updated'
293 self.assertEqual(mismatching_times, 0)
296 def simulate_builds(self, db, metadatas):
297 """Simulate a serires of Commit Queue master and slave builds.
299 This method use the metadata objects in |metadatas| to simulate those
300 builds insertions and updates to the cidb. All metadatas encountered
301 after a particular master build will be assumed to be slaves of that build,
302 until a new master build is encountered. Slave builds for a particular
303 master will be simulated in parallel.
305 The first element in |metadatas| must be a CQ master build.
308 db: A CIDBConnection instance.
309 metadatas: A list of CBuildbotMetadata instances, sorted by start time.
311 m_iter = iter(metadatas)
314 return m.GetDict()['bot-config'] == 'master-paladin'
316 next_master = m_iter.next()
321 assert is_master(master)
322 master_build_id = _SimulateBuildStart(db, master)
324 def simulate_slave(slave_metadata):
325 build_id = _SimulateBuildStart(db, slave_metadata,
327 _SimulateCQBuildFinish(db, slave_metadata, build_id)
328 logging.debug('Simulated slave build %s on pid %s', build_id,
337 slave_metadatas.append(slave)
339 with parallel.BackgroundTaskRunner(simulate_slave, processes=15) as queue:
340 for slave in slave_metadatas:
343 _SimulateCQBuildFinish(db, master, master_build_id)
344 logging.debug('Simulated master build %s', master_build_id)
347 class DataSeries1Test(CIDBIntegrationTest):
348 """Simulate a single set of canary builds."""
351 """Simulate a single set of canary builds with database schema v7."""
352 metadatas = GetTestDataSeries(SERIES_1_TEST_DATA_PATH)
353 self.assertEqual(len(metadatas), 18, 'Did not load expected amount of '
356 # Migrate db to specified version. As new schema versions are added,
357 # migrations to later version can be applied after the test builds are
358 # simulated, to test that db contents are correctly migrated.
359 self._PrepareFreshDatabase(11)
361 bot_db = cidb.CIDBConnection(TEST_DB_CRED_BOT)
364 return m.GetValue('bot-config') == 'master-release'
366 master_index = metadatas.index(next(m for m in metadatas if is_master(m)))
367 master_metadata = metadatas.pop(master_index)
368 self.assertEqual(master_metadata.GetValue('bot-config'), 'master-release')
370 master_id = self._simulate_canary(bot_db, master_metadata)
373 self._simulate_canary(bot_db, m, master_id)
375 # Verify that expected data was inserted
376 num_boards = bot_db._GetEngine().execute(
377 'select count(*) from boardPerBuildTable'
379 self.assertEqual(num_boards, 40)
381 main_firmware_versions = bot_db._GetEngine().execute(
382 'select count(distinct main_firmware_version) from boardPerBuildTable'
384 self.assertEqual(main_firmware_versions, 29)
386 # For all builds, finish_time should equal last_updated.
387 mismatching_times = bot_db._GetEngine().execute(
388 'select count(*) from buildTable where finish_time != last_updated'
390 self.assertEqual(mismatching_times, 0)
392 def _simulate_canary(self, db, metadata, master_build_id=None):
393 """Helper method to simulate an individual canary build.
396 db: cidb instance to use for simulation
397 metadata: CBuildbotMetadata instance of build to simulate.
398 master_build_id: Optional id of master build.
401 build_id of build that was simulated.
403 build_id = _SimulateBuildStart(db, metadata, master_build_id)
404 metadata_dict = metadata.GetDict()
406 # Insert child configs and boards
407 for child_config_dict in metadata_dict['child-configs']:
408 db.InsertChildConfigPerBuild(build_id, child_config_dict['name'])
410 for board in metadata_dict['board-metadata'].keys():
411 db.InsertBoardPerBuild(build_id, board)
413 for board, bm in metadata_dict['board-metadata'].items():
414 db.UpdateBoardPerBuildMetadata(build_id, board, bm)
416 db.UpdateMetadata(build_id, metadata)
418 status = metadata_dict['status']['status']
419 status = _TranslateStatus(status)
420 db.FinishBuild(build_id, status)
425 def _TranslateStatus(status):
426 # TODO(akeshet): The status strings used in BuildStatus are not the same as
427 # those recorded in CBuildbotMetadata. Use a general purpose adapter.
428 if status == 'passed':
431 if status == 'failed':
437 def _SimulateBuildStart(db, metadata, master_build_id=None):
438 """Returns (build_id, metadata_id) tuple."""
439 metadata_dict = metadata.GetDict()
440 # TODO(akeshet): We are pretending that all these builds were on the internal
441 # waterfall at the moment, for testing purposes. This is because we don't
442 # actually save in the metadata.json any way to know which waterfall the
444 waterfall = 'chromeos'
446 build_id = db.InsertBuild(metadata_dict['builder-name'],
448 metadata_dict['build-number'],
449 metadata_dict['bot-config'],
450 metadata_dict['bot-hostname'],
456 def _SimulateCQBuildFinish(db, metadata, build_id):
458 metadata_dict = metadata.GetDict()
460 # Insert the first build stage using InsertBuildStage, then batch-insert
461 # the rest with InsertBuildStages. This allows us to test InsertBuildStage
462 # without taking too much performance loss in the test.
463 stage_results = metadata_dict['results']
464 if len(stage_results) > 0:
466 db.InsertBuildStage(build_id, r['name'], r['board'],
467 _TranslateStatus(r['status']), r['log'],
468 cros_build_lib.ParseDurationToSeconds(r['duration']),
470 if len(stage_results) > 1:
471 stages = [{'build_id': build_id,
474 'status': _TranslateStatus(r['status']),
477 cros_build_lib.ParseDurationToSeconds(r['duration']),
478 'summary': r['summary']}
479 for r in stage_results[1:]]
480 db.InsertBuildStages(stages)
482 db.InsertCLActions(build_id, metadata_dict['cl_actions'])
484 db.UpdateMetadata(build_id, metadata)
486 status = metadata_dict['status']['status']
488 status = _TranslateStatus(status)
490 db.FinishBuild(build_id, status)
493 # TODO(akeshet): Allow command line args to specify alternate CIDB instance
495 if __name__ == '__main__':
496 logging.root.setLevel(logging.DEBUG)
497 logging.root.addHandler(logging.StreamHandler())