From: Artem Bityutskiy Date: Wed, 28 Nov 2012 08:30:07 +0000 (+0200) Subject: tests: helpers: use less random data X-Git-Tag: v1.0~26 X-Git-Url: http://review.tizen.org/git/?a=commitdiff_plain;h=49394ca6ee723ccfe90372341318a172bafe09da;p=tools%2Fbmap-tools.git tests: helpers: use less random data We do not need really random data in the files we generate - this only slows the tests down a lot. Let's fill the files with semi-random data instead - just pick a random byte and fill large regions with this byt. This also make compression work faster, and it is good enough for our purposes. Besides, this allows to reproduce tests by setting a known seed, which does not work with os.urandom. Change-Id: I0459ee44d271b8e19581e84cc33b7167e4ada166 Signed-off-by: Artem Bityutskiy --- diff --git a/tests/helpers.py b/tests/helpers.py index 99015bb..45e03c3 100644 --- a/tests/helpers.py +++ b/tests/helpers.py @@ -5,7 +5,6 @@ tests. """ # * Too many statements (R0915) # pylint: disable=R0915 -import os import tempfile import random import itertools @@ -13,7 +12,7 @@ from bmaptools import BmapHelpers def create_random_sparse_file(file_obj, size): """ Create a sparse file with randomly distributed holes. The mapped areas - are filled with random data. Returns a tuple containing 2 lists: + are filled with semi-random data. Returns a tuple containing 2 lists: 1. a list of mapped block ranges, same as 'Fiemap.get_mapped_ranges()' 2. a list of unmapped block ranges (holes), same as 'Fiemap.get_unmapped_ranges()' """ @@ -32,7 +31,7 @@ def create_random_sparse_file(file_obj, size): if map_the_block: file_obj.seek(block * block_size) - file_obj.write(bytearray(os.urandom(block_size))) + file_obj.write(chr(random.getrandbits(8)) * block_size) else: file_obj.truncate((block + 1) * block_size) @@ -60,7 +59,7 @@ def create_random_sparse_file(file_obj, size): return (mapped, holes) def _create_random_file(file_obj, size): - """ Fill the 'file_obj' file object with random data up to the size + """ Fill the 'file_obj' file object with semi-random data up to the size 'size'. """ chunk_size = 1024 * 1024 @@ -70,7 +69,7 @@ def _create_random_file(file_obj, size): if written + chunk_size > size: chunk_size = size - written - file_obj.write(bytearray(os.urandom(chunk_size))) + file_obj.write(chr(random.getrandbits(8)) * chunk_size) written += chunk_size file_obj.flush()