from argparse import ArgumentParser
import asteval
import collections
+from contextlib import ExitStack
import copy
import difflib
import doctest
'SZ_4G': 0x100000000
}
+RE_REMOVE_DEFCONFIG = re.compile(r'(.*)_defconfig')
+
### helper functions ###
+def remove_defconfig(defc):
+ """Drop the _defconfig suffix on a string
+
+ Args:
+ defc (str): String to convert
+
+ Returns:
+ str: string with the '_defconfig' suffix removed
+ """
+ return RE_REMOVE_DEFCONFIG.match(defc)[1]
+
def check_top_directory():
"""Exit if we are not at the top of source directory."""
- for f in ('README', 'Licenses'):
- if not os.path.exists(f):
+ for fname in 'README', 'Licenses':
+ if not os.path.exists(fname):
sys.exit('Please run at the top of source directory.')
def check_clean_directory():
"""Exit if the source tree is not clean."""
- for f in ('.config', 'include/config'):
- if os.path.exists(f):
+ for fname in '.config', 'include/config':
+ if os.path.exists(fname):
sys.exit("source tree is not clean, please run 'make mrproper'")
def get_make_cmd():
necessarily "make". (for example, "gmake" on FreeBSD).
Returns the most appropriate command name on your system.
"""
- process = subprocess.Popen([SHOW_GNU_MAKE], stdout=subprocess.PIPE)
- ret = process.communicate()
- if process.returncode:
- sys.exit('GNU Make not found')
+ with subprocess.Popen([SHOW_GNU_MAKE], stdout=subprocess.PIPE) as proc:
+ ret = proc.communicate()
+ if proc.returncode:
+ sys.exit('GNU Make not found')
return ret[0].rstrip()
def get_matched_defconfig(line):
"""Get the defconfig files that match a pattern
Args:
- line: Path or filename to match, e.g. 'configs/snow_defconfig' or
+ line (str): Path or filename to match, e.g. 'configs/snow_defconfig' or
'k2*_defconfig'. If no directory is provided, 'configs/' is
prepended
Returns:
- a list of matching defconfig files
+ list of str: a list of matching defconfig files
"""
dirname = os.path.dirname(line)
if dirname:
"""Get all the defconfig files that match the patterns in a file.
Args:
- defconfigs_file: File containing a list of defconfigs to process, or
- '-' to read the list from stdin
+ defconfigs_file (str): File containing a list of defconfigs to process,
+ or '-' to read the list from stdin
Returns:
- A list of paths to defconfig files, with no duplicates
+ list of str: A list of paths to defconfig files, with no duplicates
"""
defconfigs = []
- if defconfigs_file == '-':
- fd = sys.stdin
- defconfigs_file = 'stdin'
- else:
- fd = open(defconfigs_file)
- for i, line in enumerate(fd):
- line = line.strip()
- if not line:
- continue # skip blank lines silently
- if ' ' in line:
- line = line.split(' ')[0] # handle 'git log' input
- matched = get_matched_defconfig(line)
- if not matched:
- print("warning: %s:%d: no defconfig matched '%s'" % \
- (defconfigs_file, i + 1, line), file=sys.stderr)
-
- defconfigs += matched
+ with ExitStack() as stack:
+ if defconfigs_file == '-':
+ inf = sys.stdin
+ defconfigs_file = 'stdin'
+ else:
+ inf = stack.enter_context(open(defconfigs_file, encoding='utf-8'))
+ for i, line in enumerate(inf):
+ line = line.strip()
+ if not line:
+ continue # skip blank lines silently
+ if ' ' in line:
+ line = line.split(' ')[0] # handle 'git log' input
+ matched = get_matched_defconfig(line)
+ if not matched:
+ print(f"warning: {defconfigs_file}:{i + 1}: no defconfig matched '{line}'",
+ file=sys.stderr)
+
+ defconfigs += matched
# use set() to drop multiple matching
- return [ defconfig[len('configs') + 1:] for defconfig in set(defconfigs) ]
+ return [defconfig[len('configs') + 1:] for defconfig in set(defconfigs)]
def get_all_defconfigs():
- """Get all the defconfig files under the configs/ directory."""
+ """Get all the defconfig files under the configs/ directory.
+
+ Returns:
+ list of str: List of paths to defconfig files
+ """
defconfigs = []
- for (dirpath, dirnames, filenames) in os.walk('configs'):
+ for (dirpath, _, filenames) in os.walk('configs'):
dirpath = dirpath[len('configs') + 1:]
for filename in fnmatch.filter(filenames, '*_defconfig'):
defconfigs.append(os.path.join(dirpath, filename))
# Otherwise, additional whitespace or line-feed might be printed.
return '\n'.join([ '\033[' + color + 'm' + s + '\033[0m' if s else ''
for s in string.split('\n') ])
- else:
- return string
+ return string
-def show_diff(a, b, file_path, color_enabled):
+def show_diff(alines, blines, file_path, color_enabled):
"""Show unidified diff.
- Arguments:
- a: A list of lines (before)
- b: A list of lines (after)
- file_path: Path to the file
- color_enabled: Display the diff in color
+ Args:
+ alines (list of str): A list of lines (before)
+ blines (list of str): A list of lines (after)
+ file_path (str): Path to the file
+ color_enabled (bool): Display the diff in color
"""
-
- diff = difflib.unified_diff(a, b,
+ diff = difflib.unified_diff(alines, blines,
fromfile=os.path.join('a', file_path),
tofile=os.path.join('b', file_path))
for line in diff:
- if line[0] == '-' and line[1] != '-':
- print(color_text(color_enabled, COLOR_RED, line), end=' ')
- elif line[0] == '+' and line[1] != '+':
- print(color_text(color_enabled, COLOR_GREEN, line), end=' ')
+ if line.startswith('-') and not line.startswith('--'):
+ print(color_text(color_enabled, COLOR_RED, line))
+ elif line.startswith('+') and not line.startswith('++'):
+ print(color_text(color_enabled, COLOR_GREEN, line))
else:
- print(line, end=' ')
+ print(line)
-def extend_matched_lines(lines, matched, pre_patterns, post_patterns, extend_pre,
- extend_post):
+def extend_matched_lines(lines, matched, pre_patterns, post_patterns,
+ extend_pre, extend_post):
"""Extend matched lines if desired patterns are found before/after already
matched lines.
- Arguments:
- lines: A list of lines handled.
- matched: A list of line numbers that have been already matched.
- (will be updated by this function)
- pre_patterns: A list of regular expression that should be matched as
- preamble.
- post_patterns: A list of regular expression that should be matched as
- postamble.
- extend_pre: Add the line number of matched preamble to the matched list.
- extend_post: Add the line number of matched postamble to the matched list.
+ Args:
+ lines (list of str): list of lines handled.
+ matched (list of int): list of line numbers that have been already
+ matched (will be updated by this function)
+ pre_patterns (list of re.Pattern): list of regular expression that should
+ be matched as preamble
+ post_patterns (list of re.Pattern): list of regular expression that should
+ be matched as postamble
+ extend_pre (bool): Add the line number of matched preamble to the matched
+ list
+ extend_post (bool): Add the line number of matched postamble to the
+ matched list
"""
extended_matched = []
if j >= len(lines):
break
- for p in pre_patterns:
- if p.search(lines[i - 1]):
+ for pat in pre_patterns:
+ if pat.search(lines[i - 1]):
break
else:
# not matched
continue
- for p in post_patterns:
- if p.search(lines[j]):
+ for pat in post_patterns:
+ if pat.search(lines[j]):
break
else:
# not matched
matched.sort()
def confirm(args, prompt):
+ """Ask the user to confirm something
+
+ Args:
+ args (Namespace ): program arguments
+
+ Returns:
+ bool: True to confirm, False to cancel/stop
+ """
if not args.yes:
while True:
- choice = input('{} [y/n]: '.format(prompt))
+ choice = input(f'{prompt} [y/n]: ')
choice = choice.lower()
print(choice)
- if choice == 'y' or choice == 'n':
+ if choice in ('y', 'n'):
break
if choice == 'n':
else:
out.write(data)
+def read_file(fname, as_lines=True, skip_unicode=False):
+ """Read a file and return the contents
+
+ Args:
+ fname (str): Filename to read from
+ as_lines: Return file contents as a list of lines
+ skip_unicode (bool): True to report unicode errors and continue
+
+ Returns:
+ iter of str: List of ;ines from the file with newline removed; str if
+ as_lines is False with newlines intact; or None if a unicode error
+ occurred
+
+ Raises:
+ UnicodeDecodeError: Unicode error occurred when reading
+ """
+ with open(fname, encoding='utf-8') as inf:
+ try:
+ if as_lines:
+ return [line.rstrip('\n') for line in inf.readlines()]
+ else:
+ return inf.read()
+ except UnicodeDecodeError as e:
+ if not skip_unicode:
+ raises
+ print("Failed on file %s': %s" % (fname, e))
+ return None
+
def cleanup_empty_blocks(header_path, args):
"""Clean up empty conditional blocks
- Arguments:
- header_path: path to the cleaned file.
- args: program arguments
+ Args:
+ header_path (str): path to the cleaned file.
+ args (Namespace): program arguments
"""
pattern = re.compile(r'^\s*#\s*if.*$\n^\s*#\s*endif.*$\n*', flags=re.M)
- with open(header_path) as f:
- try:
- data = f.read()
- except UnicodeDecodeError as e:
- print("Failed on file %s': %s" % (header_path, e))
- return
+ data = read_file(header_path, as_lines=False, skip_unicode=True)
+ if data is None:
+ return
new_data = pattern.sub('\n', data)
if args.dry_run:
return
- write_file(header_path, new_data)
+ if new_data != data:
+ write_file(header_path, new_data)
def cleanup_one_header(header_path, patterns, args):
"""Clean regex-matched lines away from a file.
- Arguments:
+ Args:
header_path: path to the cleaned file.
patterns: list of regex patterns. Any lines matching to these
patterns are deleted.
- args: program arguments
+ args (Namespace): program arguments
"""
- with open(header_path) as f:
- try:
- lines = f.readlines()
- except UnicodeDecodeError as e:
- print("Failed on file %s': %s" % (header_path, e))
- return
+ lines = read_file(header_path, skip_unicode=True)
+ if lines is None:
+ return
matched = []
for i, line in enumerate(lines):
- if i - 1 in matched and lines[i - 1][-2:] == '\\\n':
+ if i - 1 in matched and lines[i - 1].endswith('\\'):
matched.append(i)
continue
for pattern in patterns:
return
# remove empty #ifdef ... #endif, successive blank lines
- pattern_if = re.compile(r'#\s*if(def|ndef)?\W') # #if, #ifdef, #ifndef
- pattern_elif = re.compile(r'#\s*el(if|se)\W') # #elif, #else
- pattern_endif = re.compile(r'#\s*endif\W') # #endif
+ pattern_if = re.compile(r'#\s*if(def|ndef)?\b') # #if, #ifdef, #ifndef
+ pattern_elif = re.compile(r'#\s*el(if|se)\b') # #elif, #else
+ pattern_endif = re.compile(r'#\s*endif\b') # #endif
pattern_blank = re.compile(r'^\s*$') # empty line
while True:
def cleanup_headers(configs, args):
"""Delete config defines from board headers.
- Arguments:
+ Args:
configs: A list of CONFIGs to remove.
- args: program arguments
+ args (Namespace): program arguments
"""
if not confirm(args, 'Clean up headers?'):
return
patterns = []
for config in configs:
- patterns.append(re.compile(r'#\s*define\s+%s\W' % config))
- patterns.append(re.compile(r'#\s*undef\s+%s\W' % config))
+ patterns.append(re.compile(r'#\s*define\s+%s\b' % config))
+ patterns.append(re.compile(r'#\s*undef\s+%s\b' % config))
for dir in 'include', 'arch', 'board':
for (dirpath, dirnames, filenames) in os.walk(dir):
def cleanup_one_extra_option(defconfig_path, configs, args):
"""Delete config defines in CONFIG_SYS_EXTRA_OPTIONS in one defconfig file.
- Arguments:
+ Args:
defconfig_path: path to the cleaned defconfig file.
configs: A list of CONFIGs to remove.
- args: program arguments
+ args (Namespace): program arguments
"""
start = 'CONFIG_SYS_EXTRA_OPTIONS="'
- end = '"\n'
+ end = '"'
- with open(defconfig_path) as f:
- lines = f.readlines()
+ lines = read_file(defconfig_path)
for i, line in enumerate(lines):
if line.startswith(start) and line.endswith(end):
def cleanup_extra_options(configs, args):
"""Delete config defines in CONFIG_SYS_EXTRA_OPTIONS in defconfig files.
- Arguments:
+ Args:
configs: A list of CONFIGs to remove.
- args: program arguments
+ args (Namespace): program arguments
"""
if not confirm(args, 'Clean up CONFIG_SYS_EXTRA_OPTIONS?'):
return
def cleanup_whitelist(configs, args):
"""Delete config whitelist entries
- Arguments:
+ Args:
configs: A list of CONFIGs to remove.
- args: program arguments
+ args (Namespace): program arguments
"""
if not confirm(args, 'Clean up whitelist entries?'):
return
- with open(os.path.join('scripts', 'config_whitelist.txt')) as f:
- lines = f.readlines()
+ lines = read_file(os.path.join('scripts', 'config_whitelist.txt'))
lines = [x for x in lines if x.strip() not in configs]
def cleanup_readme(configs, args):
"""Delete config description in README
- Arguments:
+ Args:
configs: A list of CONFIGs to remove.
- args: program arguments
+ args (Namespace): program arguments
"""
if not confirm(args, 'Clean up README?'):
return
for config in configs:
patterns.append(re.compile(r'^\s+%s' % config))
- with open('README') as f:
- lines = f.readlines()
+ lines = read_file('README')
found = False
newlines = []
def __init__(self, total):
"""Create a new progress indicator.
- Arguments:
+ Args:
total: A number of defconfig files to process.
"""
self.current = 0
def __init__(self, configs, args, build_dir):
"""Create a new parser.
- Arguments:
+ Args:
configs: A list of CONFIGs to move.
- args: program arguments
+ args (Namespace): program arguments
build_dir: Build directory.
"""
self.configs = configs
"""
arch = ''
cpu = ''
- for line in open(self.dotconfig):
+ for line in read_file(self.dotconfig):
m = self.re_arch.match(line)
if m:
arch = m.group(1)
defconfig, .config, and include/autoconf.mk in order to decide
which action should be taken for this defconfig.
- Arguments:
+ Args:
config: CONFIG name to parse.
dotconfig_lines: lines from the .config file.
autoconf_lines: lines from the include/autoconf.mk file.
searching the target options.
Move the config option(s) to the .config as needed.
- Arguments:
+ Args:
defconfig: defconfig name.
Returns:
else:
autoconf_path = self.autoconf
- with open(self.dotconfig) as f:
- dotconfig_lines = f.readlines()
+ dotconfig_lines = read_file(self.dotconfig)
- with open(autoconf_path) as f:
- autoconf_lines = f.readlines()
+ autoconf_lines = read_file(autoconf_path)
for config in self.configs:
result = self.parse_one_config(config, dotconfig_lines,
log += color_text(self.args.color, log_color, actlog) + '\n'
- with open(self.dotconfig, 'a') as f:
+ with open(self.dotconfig, 'a', encoding='utf-8') as out:
for (action, value) in results:
if action == ACTION_MOVE:
- f.write(value + '\n')
+ out.write(value + '\n')
updated = True
self.results = results
log = ''
- with open(self.defconfig) as f:
- defconfig_lines = f.readlines()
+ defconfig_lines = read_file(self.defconfig)
for (action, value) in self.results:
if action != ACTION_MOVE:
continue
- if not value + '\n' in defconfig_lines:
+ if not value in defconfig_lines:
log += color_text(self.args.color, COLOR_YELLOW,
"'%s' was removed by savedefconfig.\n" %
value)
make_cmd, reference_src_dir, db_queue):
"""Create a new process slot.
- Arguments:
+ Args:
toolchains: Toolchains object containing toolchains.
configs: A list of CONFIGs to move.
args: Program arguments
given defconfig and add it to the slot. Just returns False if
the slot is occupied (i.e. the current subprocess is still running).
- Arguments:
+ Args:
defconfig: defconfig name.
Returns:
def do_build_db(self):
"""Add the board to the database"""
configs = {}
- with open(os.path.join(self.build_dir, AUTO_CONF_PATH)) as fd:
- for line in fd.readlines():
- if line.startswith('CONFIG'):
- config, value = line.split('=', 1)
- configs[config] = value.rstrip()
+ for line in read_file(os.path.join(self.build_dir, AUTO_CONF_PATH)):
+ if line.startswith('CONFIG'):
+ config, value = line.split('=', 1)
+ configs[config] = value.rstrip()
self.db_queue.put([self.defconfig, configs])
self.finish(True)
def finish(self, success):
"""Display log along with progress and go to the idle state.
- Arguments:
+ Args:
success: Should be True when the defconfig was processed
successfully, or False when it fails.
"""
reference_src_dir, db_queue):
"""Create a new slots controller.
- Arguments:
+ Args:
toolchains: Toolchains object containing toolchains.
configs: A list of CONFIGs to move.
args: Program arguments
def add(self, defconfig):
"""Add a new subprocess if a vacant slot is found.
- Arguments:
+ Args:
defconfig: defconfig name to be put into.
Returns:
def __init__(self, commit):
"""Create a reference source directory based on a specified commit.
- Arguments:
+ Args:
commit: commit to git-clone
"""
self.src_dir = tempfile.mkdtemp()
def move_config(toolchains, configs, args, db_queue):
"""Move config options to defconfig files.
- Arguments:
+ Args:
configs: A list of CONFIGs to move.
args: Program arguments
"""
progress = Progress(len(defconfigs))
slots = Slots(toolchains, configs, args, progress, reference_src_dir,
- db_queue)
+ db_queue)
# Main loop to process defconfig files:
# Add a new subprocess into a vacant slot.
if cwd and fname.startswith(cwd):
fname = fname[len(cwd) + 1:]
file_line = ' at %s:%d' % (fname, linenum)
- with open(fname) as fd:
- data = fd.read().splitlines()
+ data = read_file(fname)
if data[linenum - 1] != 'config %s' % imply_config:
return None, 0, 'bad sym format %s%s' % (data[linenum], file_line)
return fname, linenum, 'adding%s' % file_line
Message indicating the result
"""
file_line = ' at %s:%d' % (fname, linenum)
- data = open(fname).read().splitlines()
+ data = read_file(fname)
linenum -= 1
for offset, line in enumerate(data[linenum:]):
'non-arch-board': [
IMPLY_NON_ARCH_BOARD,
'Allow Kconfig options outside arch/ and /board/ to imply'],
-};
+}
def read_database():
all_defconfigs = set()
defconfig_db = collections.defaultdict(set)
- with open(CONFIG_DATABASE) as fd:
- for line in fd.readlines():
- line = line.rstrip()
- if not line: # Separator between defconfigs
- config_db[defconfig] = configs
- all_defconfigs.add(defconfig)
- configs = {}
- elif line[0] == ' ': # CONFIG line
- config, value = line.strip().split('=', 1)
- configs[config] = value
- defconfig_db[config].add(defconfig)
- all_configs.add(config)
- else: # New defconfig
- defconfig = line
+ for line in read_file(CONFIG_DATABASE):
+ line = line.rstrip()
+ if not line: # Separator between defconfigs
+ config_db[defconfig] = configs
+ all_defconfigs.add(defconfig)
+ configs = {}
+ elif line[0] == ' ': # CONFIG line
+ config, value = line.strip().split('=', 1)
+ configs[config] = value
+ defconfig_db[config].add(defconfig)
+ all_configs.add(config)
+ else: # New defconfig
+ defconfig = line
return all_configs, all_defconfigs, config_db, defconfig_db
for imply_config in rest_configs:
if 'ERRATUM' in imply_config:
continue
- if not (imply_flags & IMPLY_CMD):
+ if not imply_flags & IMPLY_CMD:
if 'CONFIG_CMD' in imply_config:
continue
- if not (imply_flags & IMPLY_TARGET):
+ if not imply_flags & IMPLY_TARGET:
if 'CONFIG_TARGET' in imply_config:
continue
in_arch_board = not sym or (fname.startswith('arch') or
fname.startswith('board'))
if (not in_arch_board and
- not (imply_flags & IMPLY_NON_ARCH_BOARD)):
+ not imply_flags & IMPLY_NON_ARCH_BOARD):
continue
if add_imply and (add_imply == 'all' or
for linenum in sorted(linenums, reverse=True):
add_imply_rule(config[CONFIG_LEN:], fname, linenum)
+def defconfig_matches(configs, re_match):
+ """Check if any CONFIG option matches a regex
+
+ The match must be complete, i.e. from the start to end of the CONFIG option.
+
+ Args:
+ configs (dict): Dict of CONFIG options:
+ key: CONFIG option
+ value: Value of option
+ re_match (re.Pattern): Match to check
+
+ Returns:
+ bool: True if any CONFIG matches the regex
+ """
+ for cfg in configs:
+ m_cfg = re_match.match(cfg)
+ if m_cfg and m_cfg.span()[1] == len(cfg):
+ return True
+ return False
def do_find_config(config_list):
"""Find boards with a given combination of CONFIGs
Params:
- config_list: List of CONFIG options to check (each a string consisting
+ config_list: List of CONFIG options to check (each a regex consisting
of a config option, with or without a CONFIG_ prefix. If an option
is preceded by a tilde (~) then it must be false, otherwise it must
be true)
all_configs, all_defconfigs, config_db, defconfig_db = read_database()
# Get the whitelist
- with open('scripts/config_whitelist.txt') as inf:
- adhoc_configs = set(inf.read().splitlines())
+ adhoc_configs = set(read_file('scripts/config_whitelist.txt'))
# Start with all defconfigs
out = all_defconfigs
# running for the next stage
in_list = out
out = set()
+ re_match = re.compile(cfg)
for defc in in_list:
- has_cfg = cfg in config_db[defc]
+ has_cfg = defconfig_matches(config_db[defc], re_match)
if has_cfg == want:
out.add(defc)
if adhoc:
print(f"Error: Not in Kconfig: %s" % ' '.join(adhoc))
else:
print(f'{len(out)} matches')
- print(' '.join(out))
+ print(' '.join([remove_defconfig(item) for item in out]))
def prefix_config(cfg):
subprocess.call(['git', 'commit', '-s', '-m', msg])
if args.build_db:
- with open(CONFIG_DATABASE, 'w') as fd:
+ with open(CONFIG_DATABASE, 'w', encoding='utf-8') as fd:
for defconfig, configs in config_db.items():
fd.write('%s\n' % defconfig)
for config in sorted(configs.keys()):