-#!/usr/bin/env python2
+#!/usr/bin/env python3
# SPDX-License-Identifier: GPL-2.0+
#
# Author: Masahiro Yamada <yamada.masahiro@socionext.com>
"""
Move config options from headers to defconfig files.
-Since Kconfig was introduced to U-Boot, we have worked on moving
-config options from headers to Kconfig (defconfig).
-
-This tool intends to help this tremendous work.
-
-
-Usage
------
-
-First, you must edit the Kconfig to add the menu entries for the configs
-you are moving.
-
-And then run this tool giving CONFIG names you want to move.
-For example, if you want to move CONFIG_CMD_USB and CONFIG_SYS_TEXT_BASE,
-simply type as follows:
-
- $ tools/moveconfig.py CONFIG_CMD_USB CONFIG_SYS_TEXT_BASE
-
-The tool walks through all the defconfig files and move the given CONFIGs.
-
-The log is also displayed on the terminal.
-
-The log is printed for each defconfig as follows:
-
-<defconfig_name>
- <action1>
- <action2>
- <action3>
- ...
-
-<defconfig_name> is the name of the defconfig.
-
-<action*> shows what the tool did for that defconfig.
-It looks like one of the following:
-
- - Move 'CONFIG_... '
- This config option was moved to the defconfig
-
- - CONFIG_... is not defined in Kconfig. Do nothing.
- The entry for this CONFIG was not found in Kconfig. The option is not
- defined in the config header, either. So, this case can be just skipped.
-
- - CONFIG_... is not defined in Kconfig (suspicious). Do nothing.
- This option is defined in the config header, but its entry was not found
- in Kconfig.
- There are two common cases:
- - You forgot to create an entry for the CONFIG before running
- this tool, or made a typo in a CONFIG passed to this tool.
- - The entry was hidden due to unmet 'depends on'.
- The tool does not know if the result is reasonable, so please check it
- manually.
-
- - 'CONFIG_...' is the same as the define in Kconfig. Do nothing.
- The define in the config header matched the one in Kconfig.
- We do not need to touch it.
-
- - Compiler is missing. Do nothing.
- The compiler specified for this architecture was not found
- in your PATH environment.
- (If -e option is passed, the tool exits immediately.)
-
- - Failed to process.
- An error occurred during processing this defconfig. Skipped.
- (If -e option is passed, the tool exits immediately on error.)
-
-Finally, you will be asked, Clean up headers? [y/n]:
-
-If you say 'y' here, the unnecessary config defines are removed
-from the config headers (include/configs/*.h).
-It just uses the regex method, so you should not rely on it.
-Just in case, please do 'git diff' to see what happened.
-
-
-How does it work?
------------------
-
-This tool runs configuration and builds include/autoconf.mk for every
-defconfig. The config options defined in Kconfig appear in the .config
-file (unless they are hidden because of unmet dependency.)
-On the other hand, the config options defined by board headers are seen
-in include/autoconf.mk. The tool looks for the specified options in both
-of them to decide the appropriate action for the options. If the given
-config option is found in the .config, but its value does not match the
-one from the board header, the config option in the .config is replaced
-with the define in the board header. Then, the .config is synced by
-"make savedefconfig" and the defconfig is updated with it.
-
-For faster processing, this tool handles multi-threading. It creates
-separate build directories where the out-of-tree build is run. The
-temporary build directories are automatically created and deleted as
-needed. The number of threads are chosen based on the number of the CPU
-cores of your system although you can change it via -j (--jobs) option.
-
-
-Toolchains
-----------
-
-Appropriate toolchain are necessary to generate include/autoconf.mk
-for all the architectures supported by U-Boot. Most of them are available
-at the kernel.org site, some are not provided by kernel.org. This tool uses
-the same tools as buildman, so see that tool for setup (e.g. --fetch-arch).
-
-
-Tips and trips
---------------
-
-To sync only X86 defconfigs:
-
- ./tools/moveconfig.py -s -d <(grep -l X86 configs/*)
-
-or:
-
- grep -l X86 configs/* | ./tools/moveconfig.py -s -d -
-
-To process CONFIG_CMD_FPGAD only for a subset of configs based on path match:
-
- ls configs/{hrcon*,iocon*,strider*} | \
- ./tools/moveconfig.py -Cy CONFIG_CMD_FPGAD -d -
-
-
-Finding implied CONFIGs
------------------------
-
-Some CONFIG options can be implied by others and this can help to reduce
-the size of the defconfig files. For example, CONFIG_X86 implies
-CONFIG_CMD_IRQ, so we can put 'imply CMD_IRQ' under 'config X86' and
-all x86 boards will have that option, avoiding adding CONFIG_CMD_IRQ to
-each of the x86 defconfig files.
-
-This tool can help find such configs. To use it, first build a database:
-
- ./tools/moveconfig.py -b
-
-Then try to query it:
-
- ./tools/moveconfig.py -i CONFIG_CMD_IRQ
- CONFIG_CMD_IRQ found in 311/2384 defconfigs
- 44 : CONFIG_SYS_FSL_ERRATUM_IFC_A002769
- 41 : CONFIG_SYS_FSL_ERRATUM_A007075
- 31 : CONFIG_SYS_FSL_DDR_VER_44
- 28 : CONFIG_ARCH_P1010
- 28 : CONFIG_SYS_FSL_ERRATUM_P1010_A003549
- 28 : CONFIG_SYS_FSL_ERRATUM_SEC_A003571
- 28 : CONFIG_SYS_FSL_ERRATUM_IFC_A003399
- 25 : CONFIG_SYS_FSL_ERRATUM_A008044
- 22 : CONFIG_ARCH_P1020
- 21 : CONFIG_SYS_FSL_DDR_VER_46
- 20 : CONFIG_MAX_PIRQ_LINKS
- 20 : CONFIG_HPET_ADDRESS
- 20 : CONFIG_X86
- 20 : CONFIG_PCIE_ECAM_SIZE
- 20 : CONFIG_IRQ_SLOT_COUNT
- 20 : CONFIG_I8259_PIC
- 20 : CONFIG_CPU_ADDR_BITS
- 20 : CONFIG_RAMBASE
- 20 : CONFIG_SYS_FSL_ERRATUM_A005871
- 20 : CONFIG_PCIE_ECAM_BASE
- 20 : CONFIG_X86_TSC_TIMER
- 20 : CONFIG_I8254_TIMER
- 20 : CONFIG_CMD_GETTIME
- 19 : CONFIG_SYS_FSL_ERRATUM_A005812
- 18 : CONFIG_X86_RUN_32BIT
- 17 : CONFIG_CMD_CHIP_CONFIG
- ...
-
-This shows a list of config options which might imply CONFIG_CMD_EEPROM along
-with how many defconfigs they cover. From this you can see that CONFIG_X86
-implies CONFIG_CMD_EEPROM. Therefore, instead of adding CONFIG_CMD_EEPROM to
-the defconfig of every x86 board, you could add a single imply line to the
-Kconfig file:
-
- config X86
- bool "x86 architecture"
- ...
- imply CMD_EEPROM
-
-That will cover 20 defconfigs. Many of the options listed are not suitable as
-they are not related. E.g. it would be odd for CONFIG_CMD_GETTIME to imply
-CMD_EEPROM.
-
-Using this search you can reduce the size of moveconfig patches.
-
-You can automatically add 'imply' statements in the Kconfig with the -a
-option:
-
- ./tools/moveconfig.py -s -i CONFIG_SCSI \
- -a CONFIG_ARCH_LS1021A,CONFIG_ARCH_LS1043A
-
-This will add 'imply SCSI' to the two CONFIG options mentioned, assuming that
-the database indicates that they do actually imply CONFIG_SCSI and do not
-already have an 'imply SCSI'.
-
-The output shows where the imply is added:
-
- 18 : CONFIG_ARCH_LS1021A arch/arm/cpu/armv7/ls102xa/Kconfig:1
- 13 : CONFIG_ARCH_LS1043A arch/arm/cpu/armv8/fsl-layerscape/Kconfig:11
- 12 : CONFIG_ARCH_LS1046A arch/arm/cpu/armv8/fsl-layerscape/Kconfig:31
-
-The first number is the number of boards which can avoid having a special
-CONFIG_SCSI option in their defconfig file if this 'imply' is added.
-The location at the right is the Kconfig file and line number where the config
-appears. For example, adding 'imply CONFIG_SCSI' to the 'config ARCH_LS1021A'
-in arch/arm/cpu/armv7/ls102xa/Kconfig at line 1 will help 18 boards to reduce
-the size of their defconfig files.
-
-If you want to add an 'imply' to every imply config in the list, you can use
-
- ./tools/moveconfig.py -s -i CONFIG_SCSI -a all
-
-To control which ones are displayed, use -I <list> where list is a list of
-options (use '-I help' to see possible options and their meaning).
-
-To skip showing you options that already have an 'imply' attached, use -A.
-
-When you have finished adding 'imply' options you can regenerate the
-defconfig files for affected boards with something like:
-
- git show --stat | ./tools/moveconfig.py -s -d -
-
-This will regenerate only those defconfigs changed in the current commit.
-If you start with (say) 100 defconfigs being changed in the commit, and add
-a few 'imply' options as above, then regenerate, hopefully you can reduce the
-number of defconfigs changed in the commit.
-
-
-Available options
------------------
-
- -c, --color
- Surround each portion of the log with escape sequences to display it
- in color on the terminal.
-
- -C, --commit
- Create a git commit with the changes when the operation is complete. A
- standard commit message is used which may need to be edited.
-
- -d, --defconfigs
- Specify a file containing a list of defconfigs to move. The defconfig
- files can be given with shell-style wildcards. Use '-' to read from stdin.
-
- -n, --dry-run
- Perform a trial run that does not make any changes. It is useful to
- see what is going to happen before one actually runs it.
-
- -e, --exit-on-error
- Exit immediately if Make exits with a non-zero status while processing
- a defconfig file.
-
- -s, --force-sync
- Do "make savedefconfig" forcibly for all the defconfig files.
- If not specified, "make savedefconfig" only occurs for cases
- where at least one CONFIG was moved.
-
- -S, --spl
- Look for moved config options in spl/include/autoconf.mk instead of
- include/autoconf.mk. This is useful for moving options for SPL build
- because SPL related options (mostly prefixed with CONFIG_SPL_) are
- sometimes blocked by CONFIG_SPL_BUILD ifdef conditionals.
-
- -H, --headers-only
- Only cleanup the headers; skip the defconfig processing
-
- -j, --jobs
- Specify the number of threads to run simultaneously. If not specified,
- the number of threads is the same as the number of CPU cores.
-
- -r, --git-ref
- Specify the git ref to clone for building the autoconf.mk. If unspecified
- use the CWD. This is useful for when changes to the Kconfig affect the
- default values and you want to capture the state of the defconfig from
- before that change was in effect. If in doubt, specify a ref pre-Kconfig
- changes (use HEAD if Kconfig changes are not committed). Worst case it will
- take a bit longer to run, but will always do the right thing.
-
- -v, --verbose
- Show any build errors as boards are built
-
- -y, --yes
- Instead of prompting, automatically go ahead with all operations. This
- includes cleaning up headers, CONFIG_SYS_EXTRA_OPTIONS, the config whitelist
- and the README.
-
-To see the complete list of supported options, run
-
- $ tools/moveconfig.py -h
-
+See doc/develop/moveconfig.rst for documentation.
"""
+from argparse import ArgumentParser
+import asteval
import collections
+from contextlib import ExitStack
import copy
import difflib
+import doctest
import filecmp
import fnmatch
import glob
import multiprocessing
-import optparse
import os
-import Queue
+import queue
import re
import shutil
import subprocess
import tempfile
import threading
import time
+import unittest
-sys.path.append(os.path.join(os.path.dirname(__file__), 'buildman'))
-sys.path.append(os.path.join(os.path.dirname(__file__), 'patman'))
-import bsettings
-import kconfiglib
-import toolchain
+from buildman import bsettings
+from buildman import kconfiglib
+from buildman import toolchain
SHOW_GNU_MAKE = 'scripts/show-gnu-make'
SLEEP_TIME=0.03
CONFIG_LEN = len('CONFIG_')
-### helper functions ###
-def get_devnull():
- """Get the file object of '/dev/null' device."""
- try:
- devnull = subprocess.DEVNULL # py3k
- except AttributeError:
- devnull = open(os.devnull, 'wb')
- return devnull
+SIZES = {
+ 'SZ_1': 0x00000001, 'SZ_2': 0x00000002,
+ 'SZ_4': 0x00000004, 'SZ_8': 0x00000008,
+ 'SZ_16': 0x00000010, 'SZ_32': 0x00000020,
+ 'SZ_64': 0x00000040, 'SZ_128': 0x00000080,
+ 'SZ_256': 0x00000100, 'SZ_512': 0x00000200,
+ 'SZ_1K': 0x00000400, 'SZ_2K': 0x00000800,
+ 'SZ_4K': 0x00001000, 'SZ_8K': 0x00002000,
+ 'SZ_16K': 0x00004000, 'SZ_32K': 0x00008000,
+ 'SZ_64K': 0x00010000, 'SZ_128K': 0x00020000,
+ 'SZ_256K': 0x00040000, 'SZ_512K': 0x00080000,
+ 'SZ_1M': 0x00100000, 'SZ_2M': 0x00200000,
+ 'SZ_4M': 0x00400000, 'SZ_8M': 0x00800000,
+ 'SZ_16M': 0x01000000, 'SZ_32M': 0x02000000,
+ 'SZ_64M': 0x04000000, 'SZ_128M': 0x08000000,
+ 'SZ_256M': 0x10000000, 'SZ_512M': 0x20000000,
+ 'SZ_1G': 0x40000000, 'SZ_2G': 0x80000000,
+ 'SZ_4G': 0x100000000
+}
+
+RE_REMOVE_DEFCONFIG = re.compile(r'(.*)_defconfig')
+### helper functions ###
def check_top_directory():
"""Exit if we are not at the top of source directory."""
- for f in ('README', 'Licenses'):
- if not os.path.exists(f):
+ for fname in 'README', 'Licenses':
+ if not os.path.exists(fname):
sys.exit('Please run at the top of source directory.')
def check_clean_directory():
"""Exit if the source tree is not clean."""
- for f in ('.config', 'include/config'):
- if os.path.exists(f):
+ for fname in '.config', 'include/config':
+ if os.path.exists(fname):
sys.exit("source tree is not clean, please run 'make mrproper'")
def get_make_cmd():
necessarily "make". (for example, "gmake" on FreeBSD).
Returns the most appropriate command name on your system.
"""
- process = subprocess.Popen([SHOW_GNU_MAKE], stdout=subprocess.PIPE)
- ret = process.communicate()
- if process.returncode:
- sys.exit('GNU Make not found')
+ with subprocess.Popen([SHOW_GNU_MAKE], stdout=subprocess.PIPE) as proc:
+ ret = proc.communicate()
+ if proc.returncode:
+ sys.exit('GNU Make not found')
return ret[0].rstrip()
def get_matched_defconfig(line):
"""Get the defconfig files that match a pattern
Args:
- line: Path or filename to match, e.g. 'configs/snow_defconfig' or
+ line (str): Path or filename to match, e.g. 'configs/snow_defconfig' or
'k2*_defconfig'. If no directory is provided, 'configs/' is
prepended
Returns:
- a list of matching defconfig files
+ list of str: a list of matching defconfig files
"""
dirname = os.path.dirname(line)
if dirname:
"""Get all the defconfig files that match the patterns in a file.
Args:
- defconfigs_file: File containing a list of defconfigs to process, or
- '-' to read the list from stdin
+ defconfigs_file (str): File containing a list of defconfigs to process,
+ or '-' to read the list from stdin
Returns:
- A list of paths to defconfig files, with no duplicates
+ list of str: A list of paths to defconfig files, with no duplicates
"""
defconfigs = []
- if defconfigs_file == '-':
- fd = sys.stdin
- defconfigs_file = 'stdin'
- else:
- fd = open(defconfigs_file)
- for i, line in enumerate(fd):
- line = line.strip()
- if not line:
- continue # skip blank lines silently
- if ' ' in line:
- line = line.split(' ')[0] # handle 'git log' input
- matched = get_matched_defconfig(line)
- if not matched:
- print >> sys.stderr, "warning: %s:%d: no defconfig matched '%s'" % \
- (defconfigs_file, i + 1, line)
-
- defconfigs += matched
+ with ExitStack() as stack:
+ if defconfigs_file == '-':
+ inf = sys.stdin
+ defconfigs_file = 'stdin'
+ else:
+ inf = stack.enter_context(open(defconfigs_file, encoding='utf-8'))
+ for i, line in enumerate(inf):
+ line = line.strip()
+ if not line:
+ continue # skip blank lines silently
+ if ' ' in line:
+ line = line.split(' ')[0] # handle 'git log' input
+ matched = get_matched_defconfig(line)
+ if not matched:
+ print(f"warning: {defconfigs_file}:{i + 1}: no defconfig matched '{line}'",
+ file=sys.stderr)
+
+ defconfigs += matched
# use set() to drop multiple matching
- return [ defconfig[len('configs') + 1:] for defconfig in set(defconfigs) ]
+ return [defconfig[len('configs') + 1:] for defconfig in set(defconfigs)]
def get_all_defconfigs():
- """Get all the defconfig files under the configs/ directory."""
+ """Get all the defconfig files under the configs/ directory.
+
+ Returns:
+ list of str: List of paths to defconfig files
+ """
defconfigs = []
- for (dirpath, dirnames, filenames) in os.walk('configs'):
+ for (dirpath, _, filenames) in os.walk('configs'):
dirpath = dirpath[len('configs') + 1:]
for filename in fnmatch.filter(filenames, '*_defconfig'):
defconfigs.append(os.path.join(dirpath, filename))
# Otherwise, additional whitespace or line-feed might be printed.
return '\n'.join([ '\033[' + color + 'm' + s + '\033[0m' if s else ''
for s in string.split('\n') ])
- else:
- return string
+ return string
-def show_diff(a, b, file_path, color_enabled):
+def show_diff(alines, blines, file_path, color_enabled):
"""Show unidified diff.
- Arguments:
- a: A list of lines (before)
- b: A list of lines (after)
- file_path: Path to the file
- color_enabled: Display the diff in color
+ Args:
+ alines (list of str): A list of lines (before)
+ blines (list of str): A list of lines (after)
+ file_path (str): Path to the file
+ color_enabled (bool): Display the diff in color
"""
-
- diff = difflib.unified_diff(a, b,
+ diff = difflib.unified_diff(alines, blines,
fromfile=os.path.join('a', file_path),
tofile=os.path.join('b', file_path))
for line in diff:
- if line[0] == '-' and line[1] != '-':
- print color_text(color_enabled, COLOR_RED, line),
- elif line[0] == '+' and line[1] != '+':
- print color_text(color_enabled, COLOR_GREEN, line),
+ if line.startswith('-') and not line.startswith('--'):
+ print(color_text(color_enabled, COLOR_RED, line))
+ elif line.startswith('+') and not line.startswith('++'):
+ print(color_text(color_enabled, COLOR_GREEN, line))
else:
- print line,
+ print(line)
-def extend_matched_lines(lines, matched, pre_patterns, post_patterns, extend_pre,
- extend_post):
+def extend_matched_lines(lines, matched, pre_patterns, post_patterns,
+ extend_pre, extend_post):
"""Extend matched lines if desired patterns are found before/after already
matched lines.
- Arguments:
- lines: A list of lines handled.
- matched: A list of line numbers that have been already matched.
- (will be updated by this function)
- pre_patterns: A list of regular expression that should be matched as
- preamble.
- post_patterns: A list of regular expression that should be matched as
- postamble.
- extend_pre: Add the line number of matched preamble to the matched list.
- extend_post: Add the line number of matched postamble to the matched list.
+ Args:
+ lines (list of str): list of lines handled.
+ matched (list of int): list of line numbers that have been already
+ matched (will be updated by this function)
+ pre_patterns (list of re.Pattern): list of regular expression that should
+ be matched as preamble
+ post_patterns (list of re.Pattern): list of regular expression that should
+ be matched as postamble
+ extend_pre (bool): Add the line number of matched preamble to the matched
+ list
+ extend_post (bool): Add the line number of matched postamble to the
+ matched list
"""
extended_matched = []
if j >= len(lines):
break
- for p in pre_patterns:
- if p.search(lines[i - 1]):
+ for pat in pre_patterns:
+ if pat.search(lines[i - 1]):
break
else:
# not matched
continue
- for p in post_patterns:
- if p.search(lines[j]):
+ for pat in post_patterns:
+ if pat.search(lines[j]):
break
else:
# not matched
matched += extended_matched
matched.sort()
-def confirm(options, prompt):
- if not options.yes:
+def confirm(args, prompt):
+ """Ask the user to confirm something
+
+ Args:
+ args (Namespace ): program arguments
+
+ Returns:
+ bool: True to confirm, False to cancel/stop
+ """
+ if not args.yes:
while True:
- choice = raw_input('{} [y/n]: '.format(prompt))
+ choice = input(f'{prompt} [y/n]: ')
choice = choice.lower()
- print choice
- if choice == 'y' or choice == 'n':
+ print(choice)
+ if choice in ('y', 'n'):
break
if choice == 'n':
return True
-def cleanup_empty_blocks(header_path, options):
+def write_file(fname, data):
+ """Write data to a file
+
+ Args:
+ fname (str): Filename to write to
+ data (list of str): Lines to write (with or without trailing newline);
+ or str to write
+ """
+ with open(fname, 'w', encoding='utf-8') as out:
+ if isinstance(data, list):
+ for line in data:
+ print(line.rstrip('\n'), file=out)
+ else:
+ out.write(data)
+
+def read_file(fname, as_lines=True, skip_unicode=False):
+ """Read a file and return the contents
+
+ Args:
+ fname (str): Filename to read from
+ as_lines: Return file contents as a list of lines
+ skip_unicode (bool): True to report unicode errors and continue
+
+ Returns:
+ iter of str: List of ;ines from the file with newline removed; str if
+ as_lines is False with newlines intact; or None if a unicode error
+ occurred
+
+ Raises:
+ UnicodeDecodeError: Unicode error occurred when reading
+ """
+ with open(fname, encoding='utf-8') as inf:
+ try:
+ if as_lines:
+ return [line.rstrip('\n') for line in inf.readlines()]
+ else:
+ return inf.read()
+ except UnicodeDecodeError as e:
+ if not skip_unicode:
+ raise
+ print("Failed on file %s': %s" % (fname, e))
+ return None
+
+def cleanup_empty_blocks(header_path, args):
"""Clean up empty conditional blocks
- Arguments:
- header_path: path to the cleaned file.
- options: option flags.
+ Args:
+ header_path (str): path to the cleaned file.
+ args (Namespace): program arguments
"""
pattern = re.compile(r'^\s*#\s*if.*$\n^\s*#\s*endif.*$\n*', flags=re.M)
- with open(header_path) as f:
- data = f.read()
+ data = read_file(header_path, as_lines=False, skip_unicode=True)
+ if data is None:
+ return
new_data = pattern.sub('\n', data)
show_diff(data.splitlines(True), new_data.splitlines(True), header_path,
- options.color)
+ args.color)
- if options.dry_run:
+ if args.dry_run:
return
- with open(header_path, 'w') as f:
- f.write(new_data)
+ if new_data != data:
+ write_file(header_path, new_data)
-def cleanup_one_header(header_path, patterns, options):
+def cleanup_one_header(header_path, patterns, args):
"""Clean regex-matched lines away from a file.
- Arguments:
+ Args:
header_path: path to the cleaned file.
patterns: list of regex patterns. Any lines matching to these
patterns are deleted.
- options: option flags.
+ args (Namespace): program arguments
"""
- with open(header_path) as f:
- lines = f.readlines()
+ lines = read_file(header_path, skip_unicode=True)
+ if lines is None:
+ return
matched = []
for i, line in enumerate(lines):
- if i - 1 in matched and lines[i - 1][-2:] == '\\\n':
+ if i - 1 in matched and lines[i - 1].endswith('\\'):
matched.append(i)
continue
for pattern in patterns:
return
# remove empty #ifdef ... #endif, successive blank lines
- pattern_if = re.compile(r'#\s*if(def|ndef)?\W') # #if, #ifdef, #ifndef
- pattern_elif = re.compile(r'#\s*el(if|se)\W') # #elif, #else
- pattern_endif = re.compile(r'#\s*endif\W') # #endif
+ pattern_if = re.compile(r'#\s*if(def|ndef)?\b') # #if, #ifdef, #ifndef
+ pattern_elif = re.compile(r'#\s*el(if|se)\b') # #elif, #else
+ pattern_endif = re.compile(r'#\s*endif\b') # #endif
pattern_blank = re.compile(r'^\s*$') # empty line
while True:
for i in reversed(matched):
tolines.pop(i)
- show_diff(lines, tolines, header_path, options.color)
+ show_diff(lines, tolines, header_path, args.color)
- if options.dry_run:
+ if args.dry_run:
return
- with open(header_path, 'w') as f:
- for line in tolines:
- f.write(line)
+ write_file(header_path, tolines)
-def cleanup_headers(configs, options):
+def cleanup_headers(configs, args):
"""Delete config defines from board headers.
- Arguments:
+ Args:
configs: A list of CONFIGs to remove.
- options: option flags.
+ args (Namespace): program arguments
"""
- if not confirm(options, 'Clean up headers?'):
+ if not confirm(args, 'Clean up headers?'):
return
patterns = []
for config in configs:
- patterns.append(re.compile(r'#\s*define\s+%s\W' % config))
- patterns.append(re.compile(r'#\s*undef\s+%s\W' % config))
+ patterns.append(re.compile(r'#\s*define\s+%s\b' % config))
+ patterns.append(re.compile(r'#\s*undef\s+%s\b' % config))
for dir in 'include', 'arch', 'board':
for (dirpath, dirnames, filenames) in os.walk(dir):
if dirpath == os.path.join('include', 'generated'):
continue
for filename in filenames:
- if not fnmatch.fnmatch(filename, '*~'):
+ if not filename.endswith(('~', '.dts', '.dtsi', '.bin',
+ '.elf','.aml','.dat')):
header_path = os.path.join(dirpath, filename)
- cleanup_one_header(header_path, patterns, options)
- cleanup_empty_blocks(header_path, options)
+ # This file contains UTF-16 data and no CONFIG symbols
+ if header_path == 'include/video_font_data.h':
+ continue
+ cleanup_one_header(header_path, patterns, args)
+ cleanup_empty_blocks(header_path, args)
-def cleanup_one_extra_option(defconfig_path, configs, options):
+def cleanup_one_extra_option(defconfig_path, configs, args):
"""Delete config defines in CONFIG_SYS_EXTRA_OPTIONS in one defconfig file.
- Arguments:
+ Args:
defconfig_path: path to the cleaned defconfig file.
configs: A list of CONFIGs to remove.
- options: option flags.
+ args (Namespace): program arguments
"""
start = 'CONFIG_SYS_EXTRA_OPTIONS="'
- end = '"\n'
+ end = '"'
- with open(defconfig_path) as f:
- lines = f.readlines()
+ lines = read_file(defconfig_path)
for i, line in enumerate(lines):
if line.startswith(start) and line.endswith(end):
else:
tolines.pop(i)
- show_diff(lines, tolines, defconfig_path, options.color)
+ show_diff(lines, tolines, defconfig_path, args.color)
- if options.dry_run:
+ if args.dry_run:
return
- with open(defconfig_path, 'w') as f:
- for line in tolines:
- f.write(line)
+ write_file(defconfig_path, tolines)
-def cleanup_extra_options(configs, options):
+def cleanup_extra_options(configs, args):
"""Delete config defines in CONFIG_SYS_EXTRA_OPTIONS in defconfig files.
- Arguments:
+ Args:
configs: A list of CONFIGs to remove.
- options: option flags.
+ args (Namespace): program arguments
"""
- if not confirm(options, 'Clean up CONFIG_SYS_EXTRA_OPTIONS?'):
+ if not confirm(args, 'Clean up CONFIG_SYS_EXTRA_OPTIONS?'):
return
configs = [ config[len('CONFIG_'):] for config in configs ]
for defconfig in defconfigs:
cleanup_one_extra_option(os.path.join('configs', defconfig), configs,
- options)
+ args)
-def cleanup_whitelist(configs, options):
+def cleanup_whitelist(configs, args):
"""Delete config whitelist entries
- Arguments:
+ Args:
configs: A list of CONFIGs to remove.
- options: option flags.
+ args (Namespace): program arguments
"""
- if not confirm(options, 'Clean up whitelist entries?'):
+ if not confirm(args, 'Clean up whitelist entries?'):
return
- with open(os.path.join('scripts', 'config_whitelist.txt')) as f:
- lines = f.readlines()
+ lines = read_file(os.path.join('scripts', 'config_whitelist.txt'))
lines = [x for x in lines if x.strip() not in configs]
- with open(os.path.join('scripts', 'config_whitelist.txt'), 'w') as f:
- f.write(''.join(lines))
+ write_file(os.path.join('scripts', 'config_whitelist.txt'), lines)
def find_matching(patterns, line):
for pat in patterns:
return True
return False
-def cleanup_readme(configs, options):
+def cleanup_readme(configs, args):
"""Delete config description in README
- Arguments:
+ Args:
configs: A list of CONFIGs to remove.
- options: option flags.
+ args (Namespace): program arguments
"""
- if not confirm(options, 'Clean up README?'):
+ if not confirm(args, 'Clean up README?'):
return
patterns = []
for config in configs:
patterns.append(re.compile(r'^\s+%s' % config))
- with open('README') as f:
- lines = f.readlines()
+ lines = read_file('README')
found = False
newlines = []
if not found:
newlines.append(line)
- with open('README', 'w') as f:
- f.write(''.join(newlines))
+ write_file('README', newlines)
+
+def try_expand(line):
+ """If value looks like an expression, try expanding it
+ Otherwise just return the existing value
+ """
+ if line.find('=') == -1:
+ return line
+
+ try:
+ aeval = asteval.Interpreter( usersyms=SIZES, minimal=True )
+ cfg, val = re.split("=", line)
+ val= val.strip('\"')
+ if re.search(r'[*+-/]|<<|SZ_+|\(([^\)]+)\)', val):
+ newval = hex(aeval(val))
+ print('\tExpanded expression %s to %s' % (val, newval))
+ return cfg+'='+newval
+ except:
+ print('\tFailed to expand expression in %s' % line)
+
+ return line
### classes ###
def __init__(self, total):
"""Create a new progress indicator.
- Arguments:
+ Args:
total: A number of defconfig files to process.
"""
self.current = 0
def show(self):
"""Display the progress."""
- print ' %d defconfigs out of %d\r' % (self.current, self.total),
+ print(' %d defconfigs out of %d\r' % (self.current, self.total), end=' ')
sys.stdout.flush()
os.environ['srctree'] = os.getcwd()
os.environ['UBOOTVERSION'] = 'dummy'
os.environ['KCONFIG_OBJDIR'] = ''
- self.conf = kconfiglib.Config()
+ self.conf = kconfiglib.Kconfig()
class KconfigParser:
re_arch = re.compile(r'CONFIG_SYS_ARCH="(.*)"')
re_cpu = re.compile(r'CONFIG_SYS_CPU="(.*)"')
- def __init__(self, configs, options, build_dir):
+ def __init__(self, configs, args, build_dir):
"""Create a new parser.
- Arguments:
+ Args:
configs: A list of CONFIGs to move.
- options: option flags.
+ args (Namespace): program arguments
build_dir: Build directory.
"""
self.configs = configs
- self.options = options
+ self.args = args
self.dotconfig = os.path.join(build_dir, '.config')
self.autoconf = os.path.join(build_dir, 'include', 'autoconf.mk')
self.spl_autoconf = os.path.join(build_dir, 'spl', 'include',
"""
arch = ''
cpu = ''
- for line in open(self.dotconfig):
+ for line in read_file(self.dotconfig):
m = self.re_arch.match(line)
if m:
arch = m.group(1)
defconfig, .config, and include/autoconf.mk in order to decide
which action should be taken for this defconfig.
- Arguments:
+ Args:
config: CONFIG name to parse.
dotconfig_lines: lines from the .config file.
autoconf_lines: lines from the include/autoconf.mk file.
else:
new_val = not_set
+ new_val = try_expand(new_val)
+
for line in dotconfig_lines:
line = line.rstrip()
if line.startswith(config + '=') or line == not_set:
searching the target options.
Move the config option(s) to the .config as needed.
- Arguments:
+ Args:
defconfig: defconfig name.
Returns:
suspicious = False
rm_files = [self.config_autoconf, self.autoconf]
- if self.options.spl:
+ if self.args.spl:
if os.path.exists(self.spl_autoconf):
autoconf_path = self.spl_autoconf
rm_files.append(self.spl_autoconf)
for f in rm_files:
os.remove(f)
return (updated, suspicious,
- color_text(self.options.color, COLOR_BROWN,
+ color_text(self.args.color, COLOR_BROWN,
"SPL is not enabled. Skipped.") + '\n')
else:
autoconf_path = self.autoconf
- with open(self.dotconfig) as f:
- dotconfig_lines = f.readlines()
+ dotconfig_lines = read_file(self.dotconfig)
- with open(autoconf_path) as f:
- autoconf_lines = f.readlines()
+ autoconf_lines = read_file(autoconf_path)
for config in self.configs:
result = self.parse_one_config(config, dotconfig_lines,
actlog = "Move '%s'" % value
log_color = COLOR_LIGHT_GREEN
elif action == ACTION_NO_ENTRY:
- actlog = "%s is not defined in Kconfig. Do nothing." % value
+ actlog = '%s is not defined in Kconfig. Do nothing.' % value
log_color = COLOR_LIGHT_BLUE
elif action == ACTION_NO_ENTRY_WARN:
- actlog = "%s is not defined in Kconfig (suspicious). Do nothing." % value
+ actlog = '%s is not defined in Kconfig (suspicious). Do nothing.' % value
log_color = COLOR_YELLOW
suspicious = True
elif action == ACTION_NO_CHANGE:
actlog = "'%s' is the same as the define in Kconfig. Do nothing." \
% value
log_color = COLOR_LIGHT_PURPLE
- elif action == ACTION_SPL_NOT_EXIST:
- actlog = "SPL is not enabled for this defconfig. Skip."
- log_color = COLOR_PURPLE
else:
- sys.exit("Internal Error. This should not happen.")
+ sys.exit('Internal Error. This should not happen.')
- log += color_text(self.options.color, log_color, actlog) + '\n'
+ log += color_text(self.args.color, log_color, actlog) + '\n'
- with open(self.dotconfig, 'a') as f:
+ with open(self.dotconfig, 'a', encoding='utf-8') as out:
for (action, value) in results:
if action == ACTION_MOVE:
- f.write(value + '\n')
+ out.write(value + '\n')
updated = True
self.results = results
log = ''
- with open(self.defconfig) as f:
- defconfig_lines = f.readlines()
+ defconfig_lines = read_file(self.defconfig)
for (action, value) in self.results:
if action != ACTION_MOVE:
continue
- if not value + '\n' in defconfig_lines:
- log += color_text(self.options.color, COLOR_YELLOW,
+ if not value in defconfig_lines:
+ log += color_text(self.args.color, COLOR_YELLOW,
"'%s' was removed by savedefconfig.\n" %
value)
for faster processing.
"""
- def __init__(self, toolchains, configs, options, progress, devnull,
+ def __init__(self, toolchains, configs, args, progress, devnull,
make_cmd, reference_src_dir, db_queue):
"""Create a new process slot.
- Arguments:
+ Args:
toolchains: Toolchains object containing toolchains.
configs: A list of CONFIGs to move.
- options: option flags.
+ args: Program arguments
progress: A progress indicator.
devnull: A file object of '/dev/null'.
make_cmd: command name of GNU Make.
db_queue: output queue to write config info for the database
"""
self.toolchains = toolchains
- self.options = options
+ self.args = args
self.progress = progress
self.build_dir = tempfile.mkdtemp()
self.devnull = devnull
self.make_cmd = (make_cmd, 'O=' + self.build_dir)
self.reference_src_dir = reference_src_dir
self.db_queue = db_queue
- self.parser = KconfigParser(configs, options, self.build_dir)
+ self.parser = KconfigParser(configs, args, self.build_dir)
self.state = STATE_IDLE
self.failed_boards = set()
self.suspicious_boards = set()
given defconfig and add it to the slot. Just returns False if
the slot is occupied (i.e. the current subprocess is still running).
- Arguments:
+ Args:
defconfig: defconfig name.
Returns:
if self.current_src_dir:
self.current_src_dir = None
self.do_defconfig()
- elif self.options.build_db:
+ elif self.args.build_db:
self.do_build_db()
else:
self.do_savedefconfig()
elif self.state == STATE_SAVEDEFCONFIG:
self.update_defconfig()
else:
- sys.exit("Internal Error. This should not happen.")
+ sys.exit('Internal Error. This should not happen.')
return True if self.state == STATE_IDLE else False
def handle_error(self):
"""Handle error cases."""
- self.log += color_text(self.options.color, COLOR_LIGHT_RED,
- "Failed to process.\n")
- if self.options.verbose:
- self.log += color_text(self.options.color, COLOR_LIGHT_CYAN,
- self.ps.stderr.read())
+ self.log += color_text(self.args.color, COLOR_LIGHT_RED,
+ 'Failed to process.\n')
+ if self.args.verbose:
+ self.log += color_text(self.args.color, COLOR_LIGHT_CYAN,
+ self.ps.stderr.read().decode())
self.finish(False)
def do_defconfig(self):
try:
toolchain = self.toolchains.Select(arch)
except ValueError:
- self.log += color_text(self.options.color, COLOR_YELLOW,
+ self.log += color_text(self.args.color, COLOR_YELLOW,
"Tool chain for '%s' is missing. Do nothing.\n" % arch)
self.finish(False)
return
- env = toolchain.MakeEnvironment(False)
+ env = toolchain.MakeEnvironment(False)
cmd = list(self.make_cmd)
cmd.append('KCONFIG_IGNORE_DUPLICATES=1')
def do_build_db(self):
"""Add the board to the database"""
configs = {}
- with open(os.path.join(self.build_dir, AUTO_CONF_PATH)) as fd:
- for line in fd.readlines():
- if line.startswith('CONFIG'):
- config, value = line.split('=', 1)
- configs[config] = value.rstrip()
+ for line in read_file(os.path.join(self.build_dir, AUTO_CONF_PATH)):
+ if line.startswith('CONFIG'):
+ config, value = line.split('=', 1)
+ configs[config] = value.rstrip()
self.db_queue.put([self.defconfig, configs])
self.finish(True)
self.suspicious_boards.add(self.defconfig)
self.log += log
- if not self.options.force_sync and not updated:
+ if not self.args.force_sync and not updated:
self.finish(True)
return
if updated:
- self.log += color_text(self.options.color, COLOR_LIGHT_GREEN,
- "Syncing by savedefconfig...\n")
+ self.log += color_text(self.args.color, COLOR_LIGHT_GREEN,
+ 'Syncing by savedefconfig...\n')
else:
- self.log += "Syncing by savedefconfig (forced by option)...\n"
+ self.log += 'Syncing by savedefconfig (forced by option)...\n'
cmd = list(self.make_cmd)
cmd.append('savedefconfig')
updated = not filecmp.cmp(orig_defconfig, new_defconfig)
if updated:
- self.log += color_text(self.options.color, COLOR_LIGHT_BLUE,
- "defconfig was updated.\n")
+ self.log += color_text(self.args.color, COLOR_LIGHT_BLUE,
+ 'defconfig was updated.\n')
- if not self.options.dry_run and updated:
+ if not self.args.dry_run and updated:
shutil.move(new_defconfig, orig_defconfig)
self.finish(True)
def finish(self, success):
"""Display log along with progress and go to the idle state.
- Arguments:
+ Args:
success: Should be True when the defconfig was processed
successfully, or False when it fails.
"""
log += '\n'.join([ ' ' + s for s in self.log.split('\n') ])
# Some threads are running in parallel.
# Print log atomically to not mix up logs from different threads.
- print >> (sys.stdout if success else sys.stderr), log
+ print(log, file=(sys.stdout if success else sys.stderr))
if not success:
- if self.options.exit_on_error:
- sys.exit("Exit on error.")
+ if self.args.exit_on_error:
+ sys.exit('Exit on error.')
# If --exit-on-error flag is not set, skip this board and continue.
# Record the failed board.
self.failed_boards.add(self.defconfig)
"""Controller of the array of subprocess slots."""
- def __init__(self, toolchains, configs, options, progress,
+ def __init__(self, toolchains, configs, args, progress,
reference_src_dir, db_queue):
"""Create a new slots controller.
- Arguments:
+ Args:
toolchains: Toolchains object containing toolchains.
configs: A list of CONFIGs to move.
- options: option flags.
+ args: Program arguments
progress: A progress indicator.
reference_src_dir: Determine the true starting config state from this
source tree.
db_queue: output queue to write config info for the database
"""
- self.options = options
+ self.args = args
self.slots = []
- devnull = get_devnull()
+ devnull = subprocess.DEVNULL
make_cmd = get_make_cmd()
- for i in range(options.jobs):
- self.slots.append(Slot(toolchains, configs, options, progress,
+ for i in range(args.jobs):
+ self.slots.append(Slot(toolchains, configs, args, progress,
devnull, make_cmd, reference_src_dir,
db_queue))
def add(self, defconfig):
"""Add a new subprocess if a vacant slot is found.
- Arguments:
+ Args:
defconfig: defconfig name to be put into.
Returns:
if boards:
boards = '\n'.join(boards) + '\n'
- msg = "The following boards were not processed due to error:\n"
+ msg = 'The following boards were not processed due to error:\n'
msg += boards
- msg += "(the list has been saved in %s)\n" % output_file
- print >> sys.stderr, color_text(self.options.color, COLOR_LIGHT_RED,
- msg)
+ msg += '(the list has been saved in %s)\n' % output_file
+ print(color_text(self.args.color, COLOR_LIGHT_RED,
+ msg), file=sys.stderr)
- with open(output_file, 'w') as f:
- f.write(boards)
+ write_file(output_file, boards)
def show_suspicious_boards(self):
"""Display all boards (defconfigs) with possible misconversion."""
if boards:
boards = '\n'.join(boards) + '\n'
- msg = "The following boards might have been converted incorrectly.\n"
- msg += "It is highly recommended to check them manually:\n"
+ msg = 'The following boards might have been converted incorrectly.\n'
+ msg += 'It is highly recommended to check them manually:\n'
msg += boards
- msg += "(the list has been saved in %s)\n" % output_file
- print >> sys.stderr, color_text(self.options.color, COLOR_YELLOW,
- msg)
+ msg += '(the list has been saved in %s)\n' % output_file
+ print(color_text(self.args.color, COLOR_YELLOW,
+ msg), file=sys.stderr)
- with open(output_file, 'w') as f:
- f.write(boards)
+ write_file(output_file, boards)
class ReferenceSource:
def __init__(self, commit):
"""Create a reference source directory based on a specified commit.
- Arguments:
+ Args:
commit: commit to git-clone
"""
self.src_dir = tempfile.mkdtemp()
- print "Cloning git repo to a separate work directory..."
+ print('Cloning git repo to a separate work directory...')
subprocess.check_output(['git', 'clone', os.getcwd(), '.'],
cwd=self.src_dir)
- print "Checkout '%s' to build the original autoconf.mk." % \
- subprocess.check_output(['git', 'rev-parse', '--short', commit]).strip()
+ print("Checkout '%s' to build the original autoconf.mk." % \
+ subprocess.check_output(['git', 'rev-parse', '--short', commit]).strip())
subprocess.check_output(['git', 'checkout', commit],
stderr=subprocess.STDOUT, cwd=self.src_dir)
return self.src_dir
-def move_config(toolchains, configs, options, db_queue):
+def move_config(toolchains, configs, args, db_queue):
"""Move config options to defconfig files.
- Arguments:
+ Args:
configs: A list of CONFIGs to move.
- options: option flags
+ args: Program arguments
"""
if len(configs) == 0:
- if options.force_sync:
- print 'No CONFIG is specified. You are probably syncing defconfigs.',
- elif options.build_db:
- print 'Building %s database' % CONFIG_DATABASE
+ if args.force_sync:
+ print('No CONFIG is specified. You are probably syncing defconfigs.', end=' ')
+ elif args.build_db:
+ print('Building %s database' % CONFIG_DATABASE)
else:
- print 'Neither CONFIG nor --force-sync is specified. Nothing will happen.',
+ print('Neither CONFIG nor --force-sync is specified. Nothing will happen.', end=' ')
else:
- print 'Move ' + ', '.join(configs),
- print '(jobs: %d)\n' % options.jobs
+ print('Move ' + ', '.join(configs), end=' ')
+ print('(jobs: %d)\n' % args.jobs)
- if options.git_ref:
- reference_src = ReferenceSource(options.git_ref)
+ if args.git_ref:
+ reference_src = ReferenceSource(args.git_ref)
reference_src_dir = reference_src.get_dir()
else:
reference_src_dir = None
- if options.defconfigs:
- defconfigs = get_matched_defconfigs(options.defconfigs)
+ if args.defconfigs:
+ defconfigs = get_matched_defconfigs(args.defconfigs)
else:
defconfigs = get_all_defconfigs()
progress = Progress(len(defconfigs))
- slots = Slots(toolchains, configs, options, progress, reference_src_dir,
- db_queue)
+ slots = Slots(toolchains, configs, args, progress, reference_src_dir,
+ db_queue)
# Main loop to process defconfig files:
# Add a new subprocess into a vacant slot.
while not slots.empty():
time.sleep(SLEEP_TIME)
- print ''
+ print('')
slots.show_failed_boards()
slots.show_suspicious_boards()
"""Check whether a config has a 'select' or 'imply' keyword
Args:
- kconf: Kconfig.Config object
+ kconf: Kconfiglib.Kconfig object
config: Name of config to check (without CONFIG_ prefix)
imply_config: Implying config (without CONFIG_ prefix) which may or
may not have an 'imply' for 'config')
Returns:
Symbol object for 'config' if found, else None
"""
- sym = kconf.get_symbol(imply_config)
+ sym = kconf.syms.get(imply_config)
if sym:
- for sel in sym.get_selected_symbols() | sym.get_implied_symbols():
- if sel.get_name() == config:
+ for sel, cond in (sym.selects + sym.implies):
+ if sel.name == config:
return sym
return None
to add an 'imply' for 'config' to that part of the Kconfig.
Args:
- kconf: Kconfig.Config object
+ kconf: Kconfiglib.Kconfig object
config: Name of config to check (without CONFIG_ prefix)
imply_config: Implying config (without CONFIG_ prefix) which may or
may not have an 'imply' for 'config')
line number within the Kconfig file, or 0 if none
message indicating the result
"""
- sym = kconf.get_symbol(imply_config)
+ sym = kconf.syms.get(imply_config)
if not sym:
return 'cannot find sym'
- locs = sym.get_def_locations()
- if len(locs) != 1:
- return '%d locations' % len(locs)
- fname, linenum = locs[0]
+ nodes = sym.nodes
+ if len(nodes) != 1:
+ return '%d locations' % len(nodes)
+ node = nodes[0]
+ fname, linenum = node.filename, node.linenr
cwd = os.getcwd()
if cwd and fname.startswith(cwd):
fname = fname[len(cwd) + 1:]
file_line = ' at %s:%d' % (fname, linenum)
- with open(fname) as fd:
- data = fd.read().splitlines()
+ data = read_file(fname)
if data[linenum - 1] != 'config %s' % imply_config:
return None, 0, 'bad sym format %s%s' % (data[linenum], file_line)
return fname, linenum, 'adding%s' % file_line
Message indicating the result
"""
file_line = ' at %s:%d' % (fname, linenum)
- data = open(fname).read().splitlines()
+ data = read_file(fname)
linenum -= 1
for offset, line in enumerate(data[linenum:]):
if line.strip().startswith('help') or not line:
data.insert(linenum + offset, '\timply %s' % config)
- with open(fname, 'w') as fd:
- fd.write('\n'.join(data) + '\n')
+ write_file(fname, data)
return 'added%s' % file_line
return 'could not insert%s'
'non-arch-board': [
IMPLY_NON_ARCH_BOARD,
'Allow Kconfig options outside arch/ and /board/ to imply'],
-};
+}
+
+
+def read_database():
+ """Read in the config database
+
+ Returns:
+ tuple:
+ set of all config options seen (each a str)
+ set of all defconfigs seen (each a str)
+ dict of configs for each defconfig:
+ key: defconfig name, e.g. "MPC8548CDS_legacy_defconfig"
+ value: dict:
+ key: CONFIG option
+ value: Value of option
+ dict of defconfigs for each config:
+ key: CONFIG option
+ value: set of boards using that option
+
+ """
+ configs = {}
+
+ # key is defconfig name, value is dict of (CONFIG_xxx, value)
+ config_db = {}
+
+ # Set of all config options we have seen
+ all_configs = set()
+
+ # Set of all defconfigs we have seen
+ all_defconfigs = set()
+
+ defconfig_db = collections.defaultdict(set)
+ for line in read_file(CONFIG_DATABASE):
+ line = line.rstrip()
+ if not line: # Separator between defconfigs
+ config_db[defconfig] = configs
+ all_defconfigs.add(defconfig)
+ configs = {}
+ elif line[0] == ' ': # CONFIG line
+ config, value = line.strip().split('=', 1)
+ configs[config] = value
+ defconfig_db[config].add(defconfig)
+ all_configs.add(config)
+ else: # New defconfig
+ defconfig = line
+
+ return all_configs, all_defconfigs, config_db, defconfig_db
+
def do_imply_config(config_list, add_imply, imply_flags, skip_added,
check_kconfig=True, find_superset=False):
"""
kconf = KconfigScanner().conf if check_kconfig else None
if add_imply and add_imply != 'all':
- add_imply = add_imply.split()
-
- # key is defconfig name, value is dict of (CONFIG_xxx, value)
- config_db = {}
-
- # Holds a dict containing the set of defconfigs that contain each config
- # key is config, value is set of defconfigs using that config
- defconfig_db = collections.defaultdict(set)
+ add_imply = add_imply.split(',')
- # Set of all config options we have seen
- all_configs = set()
-
- # Set of all defconfigs we have seen
- all_defconfigs = set()
+ all_configs, all_defconfigs, config_db, defconfig_db = read_database()
- # Read in the database
- configs = {}
- with open(CONFIG_DATABASE) as fd:
- for line in fd.readlines():
- line = line.rstrip()
- if not line: # Separator between defconfigs
- config_db[defconfig] = configs
- all_defconfigs.add(defconfig)
- configs = {}
- elif line[0] == ' ': # CONFIG line
- config, value = line.strip().split('=', 1)
- configs[config] = value
- defconfig_db[config].add(defconfig)
- all_configs.add(config)
- else: # New defconfig
- defconfig = line
-
- # Work through each target config option in tern, independently
+ # Work through each target config option in turn, independently
for config in config_list:
defconfigs = defconfig_db.get(config)
if not defconfigs:
- print '%s not found in any defconfig' % config
+ print('%s not found in any defconfig' % config)
continue
# Get the set of defconfigs without this one (since a config cannot
# imply itself)
non_defconfigs = all_defconfigs - defconfigs
num_defconfigs = len(defconfigs)
- print '%s found in %d/%d defconfigs' % (config, num_defconfigs,
- len(all_configs))
+ print('%s found in %d/%d defconfigs' % (config, num_defconfigs,
+ len(all_configs)))
# This will hold the results: key=config, value=defconfigs containing it
imply_configs = {}
for imply_config in rest_configs:
if 'ERRATUM' in imply_config:
continue
- if not (imply_flags & IMPLY_CMD):
+ if not imply_flags & IMPLY_CMD:
if 'CONFIG_CMD' in imply_config:
continue
- if not (imply_flags & IMPLY_TARGET):
+ if not imply_flags & IMPLY_TARGET:
if 'CONFIG_TARGET' in imply_config:
continue
if common_defconfigs:
skip = False
if find_superset:
- for prev in imply_configs.keys():
+ for prev in list(imply_configs.keys()):
prev_count = len(imply_configs[prev])
count = len(common_defconfigs)
if (prev_count > count and
iconfig[CONFIG_LEN:])
kconfig_info = ''
if sym:
- locs = sym.get_def_locations()
- if len(locs) == 1:
- fname, linenum = locs[0]
+ nodes = sym.nodes
+ if len(nodes) == 1:
+ fname, linenum = nodes[0].filename, nodes[0].linenr
if cwd and fname.startswith(cwd):
fname = fname[len(cwd) + 1:]
kconfig_info = '%s:%d' % (fname, linenum)
if skip_added:
show = False
else:
- sym = kconf.get_symbol(iconfig[CONFIG_LEN:])
+ sym = kconf.syms.get(iconfig[CONFIG_LEN:])
fname = ''
if sym:
- locs = sym.get_def_locations()
- if len(locs) == 1:
- fname, linenum = locs[0]
+ nodes = sym.nodes
+ if len(nodes) == 1:
+ fname, linenum = nodes[0].filename, nodes[0].linenr
if cwd and fname.startswith(cwd):
fname = fname[len(cwd) + 1:]
in_arch_board = not sym or (fname.startswith('arch') or
fname.startswith('board'))
if (not in_arch_board and
- not (imply_flags & IMPLY_NON_ARCH_BOARD)):
+ not imply_flags & IMPLY_NON_ARCH_BOARD):
continue
if add_imply and (add_imply == 'all' or
add_list[fname].append(linenum)
if show and kconfig_info != 'skip':
- print '%5d : %-30s%-25s %s' % (num_common, iconfig.ljust(30),
- kconfig_info, missing_str)
+ print('%5d : %-30s%-25s %s' % (num_common, iconfig.ljust(30),
+ kconfig_info, missing_str))
# Having collected a list of things to add, now we add them. We process
# each file from the largest line number to the smallest so that
# earlier additions do not affect our line numbers. E.g. if we added an
# imply at line 20 it would change the position of each line after
# that.
- for fname, linenums in add_list.iteritems():
+ for fname, linenums in add_list.items():
for linenum in sorted(linenums, reverse=True):
add_imply_rule(config[CONFIG_LEN:], fname, linenum)
+def defconfig_matches(configs, re_match):
+ """Check if any CONFIG option matches a regex
+
+ The match must be complete, i.e. from the start to end of the CONFIG option.
+
+ Args:
+ configs (dict): Dict of CONFIG options:
+ key: CONFIG option
+ value: Value of option
+ re_match (re.Pattern): Match to check
+
+ Returns:
+ bool: True if any CONFIG matches the regex
+ """
+ for cfg in configs:
+ if re_match.fullmatch(cfg):
+ return True
+ return False
+
+def do_find_config(config_list):
+ """Find boards with a given combination of CONFIGs
+
+ Params:
+ config_list: List of CONFIG options to check (each a regex consisting
+ of a config option, with or without a CONFIG_ prefix. If an option
+ is preceded by a tilde (~) then it must be false, otherwise it must
+ be true)
+ """
+ all_configs, all_defconfigs, config_db, defconfig_db = read_database()
+
+ # Get the whitelist
+ adhoc_configs = set(read_file('scripts/config_whitelist.txt'))
+
+ # Start with all defconfigs
+ out = all_defconfigs
+
+ # Work through each config in turn
+ adhoc = []
+ for item in config_list:
+ # Get the real config name and whether we want this config or not
+ cfg = item
+ want = True
+ if cfg[0] == '~':
+ want = False
+ cfg = cfg[1:]
+
+ if cfg in adhoc_configs:
+ adhoc.append(cfg)
+ continue
+
+ # Search everything that is still in the running. If it has a config
+ # that we want, or doesn't have one that we don't, add it into the
+ # running for the next stage
+ in_list = out
+ out = set()
+ re_match = re.compile(cfg)
+ for defc in in_list:
+ has_cfg = defconfig_matches(config_db[defc], re_match)
+ if has_cfg == want:
+ out.add(defc)
+ if adhoc:
+ print(f"Error: Not in Kconfig: %s" % ' '.join(adhoc))
+ else:
+ print(f'{len(out)} matches')
+ print(' '.join(item.split('_defconfig')[0] for item in out))
+
+
+def prefix_config(cfg):
+ """Prefix a config with CONFIG_ if needed
+
+ This handles ~ operator, which indicates that the CONFIG should be disabled
+
+ >>> prefix_config('FRED')
+ 'CONFIG_FRED'
+ >>> prefix_config('CONFIG_FRED')
+ 'CONFIG_FRED'
+ >>> prefix_config('~FRED')
+ '~CONFIG_FRED'
+ >>> prefix_config('~CONFIG_FRED')
+ '~CONFIG_FRED'
+ >>> prefix_config('A123')
+ 'CONFIG_A123'
+ """
+ op = ''
+ if cfg[0] == '~':
+ op = cfg[0]
+ cfg = cfg[1:]
+ if not cfg.startswith('CONFIG_'):
+ cfg = 'CONFIG_' + cfg
+ return op + cfg
+
def main():
try:
except NotImplementedError:
cpu_count = 1
- parser = optparse.OptionParser()
- # Add options here
- parser.add_option('-a', '--add-imply', type='string', default='',
+ epilog = '''Move config options from headers to defconfig files. See
+doc/develop/moveconfig.rst for documentation.'''
+
+ parser = ArgumentParser(epilog=epilog)
+ # Add arguments here
+ parser.add_argument('-a', '--add-imply', type=str, default='',
help='comma-separated list of CONFIG options to add '
"an 'imply' statement to for the CONFIG in -i")
- parser.add_option('-A', '--skip-added', action='store_true', default=False,
+ parser.add_argument('-A', '--skip-added', action='store_true', default=False,
help="don't show options which are already marked as "
'implying others')
- parser.add_option('-b', '--build-db', action='store_true', default=False,
+ parser.add_argument('-b', '--build-db', action='store_true', default=False,
help='build a CONFIG database')
- parser.add_option('-c', '--color', action='store_true', default=False,
+ parser.add_argument('-c', '--color', action='store_true', default=False,
help='display the log in color')
- parser.add_option('-C', '--commit', action='store_true', default=False,
+ parser.add_argument('-C', '--commit', action='store_true', default=False,
help='Create a git commit for the operation')
- parser.add_option('-d', '--defconfigs', type='string',
+ parser.add_argument('-d', '--defconfigs', type=str,
help='a file containing a list of defconfigs to move, '
"one per line (for example 'snow_defconfig') "
"or '-' to read from stdin")
- parser.add_option('-i', '--imply', action='store_true', default=False,
- help='find options which imply others')
- parser.add_option('-I', '--imply-flags', type='string', default='',
- help="control the -i option ('help' for help")
- parser.add_option('-n', '--dry-run', action='store_true', default=False,
- help='perform a trial run (show log with no changes)')
- parser.add_option('-e', '--exit-on-error', action='store_true',
+ parser.add_argument('-e', '--exit-on-error', action='store_true',
default=False,
help='exit immediately on any error')
- parser.add_option('-s', '--force-sync', action='store_true', default=False,
- help='force sync by savedefconfig')
- parser.add_option('-S', '--spl', action='store_true', default=False,
- help='parse config options defined for SPL build')
- parser.add_option('-H', '--headers-only', dest='cleanup_headers_only',
+ parser.add_argument('-f', '--find', action='store_true', default=False,
+ help='Find boards with a given config combination')
+ parser.add_argument('-H', '--headers-only', dest='cleanup_headers_only',
action='store_true', default=False,
help='only cleanup the headers')
- parser.add_option('-j', '--jobs', type='int', default=cpu_count,
+ parser.add_argument('-i', '--imply', action='store_true', default=False,
+ help='find options which imply others')
+ parser.add_argument('-I', '--imply-flags', type=str, default='',
+ help="control the -i option ('help' for help")
+ parser.add_argument('-j', '--jobs', type=int, default=cpu_count,
help='the number of jobs to run simultaneously')
- parser.add_option('-r', '--git-ref', type='string',
+ parser.add_argument('-n', '--dry-run', action='store_true', default=False,
+ help='perform a trial run (show log with no changes)')
+ parser.add_argument('-r', '--git-ref', type=str,
help='the git ref to clone for building the autoconf.mk')
- parser.add_option('-y', '--yes', action='store_true', default=False,
+ parser.add_argument('-s', '--force-sync', action='store_true', default=False,
+ help='force sync by savedefconfig')
+ parser.add_argument('-S', '--spl', action='store_true', default=False,
+ help='parse config options defined for SPL build')
+ parser.add_argument('-t', '--test', action='store_true', default=False,
+ help='run unit tests')
+ parser.add_argument('-y', '--yes', action='store_true', default=False,
help="respond 'yes' to any prompts")
- parser.add_option('-v', '--verbose', action='store_true', default=False,
+ parser.add_argument('-v', '--verbose', action='store_true', default=False,
help='show any build errors as boards are built')
- parser.usage += ' CONFIG ...'
+ parser.add_argument('configs', nargs='*')
- (options, configs) = parser.parse_args()
+ args = parser.parse_args()
+ configs = args.configs
- if len(configs) == 0 and not any((options.force_sync, options.build_db,
- options.imply)):
+ if args.test:
+ sys.argv = [sys.argv[0]]
+ fail, count = doctest.testmod()
+ if fail:
+ return 1
+ unittest.main()
+
+ if not any((len(configs), args.force_sync, args.build_db, args.imply,
+ args.find)):
parser.print_usage()
sys.exit(1)
# prefix the option name with CONFIG_ if missing
- configs = [ config if config.startswith('CONFIG_') else 'CONFIG_' + config
- for config in configs ]
+ configs = [prefix_config(cfg) for cfg in configs]
check_top_directory()
- if options.imply:
+ if args.imply:
imply_flags = 0
- if options.imply_flags == 'all':
+ if args.imply_flags == 'all':
imply_flags = -1
- elif options.imply_flags:
- for flag in options.imply_flags.split(','):
+ elif args.imply_flags:
+ for flag in args.imply_flags.split(','):
bad = flag not in IMPLY_FLAGS
if bad:
- print "Invalid flag '%s'" % flag
+ print("Invalid flag '%s'" % flag)
if flag == 'help' or bad:
- print "Imply flags: (separate with ',')"
- for name, info in IMPLY_FLAGS.iteritems():
- print ' %-15s: %s' % (name, info[1])
+ print("Imply flags: (separate with ',')")
+ for name, info in IMPLY_FLAGS.items():
+ print(' %-15s: %s' % (name, info[1]))
parser.print_usage()
sys.exit(1)
imply_flags |= IMPLY_FLAGS[flag][0]
- do_imply_config(configs, options.add_imply, imply_flags,
- options.skip_added)
+ do_imply_config(configs, args.add_imply, imply_flags, args.skip_added)
+ return
+
+ if args.find:
+ do_find_config(configs)
return
config_db = {}
- db_queue = Queue.Queue()
+ db_queue = queue.Queue()
t = DatabaseThread(config_db, db_queue)
t.setDaemon(True)
t.start()
- if not options.cleanup_headers_only:
+ if not args.cleanup_headers_only:
check_clean_directory()
- bsettings.Setup('')
+ bsettings.Setup('')
toolchains = toolchain.Toolchains()
toolchains.GetSettings()
toolchains.Scan(verbose=False)
- move_config(toolchains, configs, options, db_queue)
+ move_config(toolchains, configs, args, db_queue)
db_queue.join()
if configs:
- cleanup_headers(configs, options)
- cleanup_extra_options(configs, options)
- cleanup_whitelist(configs, options)
- cleanup_readme(configs, options)
+ cleanup_headers(configs, args)
+ cleanup_extra_options(configs, args)
+ cleanup_whitelist(configs, args)
+ cleanup_readme(configs, args)
- if options.commit:
+ if args.commit:
subprocess.call(['git', 'add', '-u'])
if configs:
msg = 'Convert %s %sto Kconfig' % (configs[0],
msg += '\n\nRsync all defconfig files using moveconfig.py'
subprocess.call(['git', 'commit', '-s', '-m', msg])
- if options.build_db:
- with open(CONFIG_DATABASE, 'w') as fd:
- for defconfig, configs in config_db.iteritems():
+ if args.build_db:
+ with open(CONFIG_DATABASE, 'w', encoding='utf-8') as fd:
+ for defconfig, configs in config_db.items():
fd.write('%s\n' % defconfig)
for config in sorted(configs.keys()):
fd.write(' %s=%s\n' % (config, configs[config]))
fd.write('\n')
if __name__ == '__main__':
- main()
+ sys.exit(main())