From 25f978cb1c55d2ff52db2aeaa1eacd84eec115db Mon Sep 17 00:00:00 2001 From: Simon Glass Date: Thu, 1 Jun 2017 19:38:58 -0600 Subject: moveconfig: Support providing a path to the defconfig files It is convenient to provide the full patch to the defconfig files in some situations, e.g. when the file was generated by a shell command (e.g. 'ls configs/zynq*'). Add support for this, and move the globbing code into a function with its own documentation. Signed-off-by: Simon Glass --- tools/moveconfig.py | 21 +++++++++++++++++++-- 1 file changed, 19 insertions(+), 2 deletions(-) (limited to 'tools') diff --git a/tools/moveconfig.py b/tools/moveconfig.py index 7aa96120a1d..390aac6f74e 100755 --- a/tools/moveconfig.py +++ b/tools/moveconfig.py @@ -278,6 +278,24 @@ def get_make_cmd(): sys.exit('GNU Make not found') return ret[0].rstrip() +def get_matched_defconfig(line): + """Get the defconfig files that match a pattern + + Args: + line: Path or filename to match, e.g. 'configs/snow_defconfig' or + 'k2*_defconfig'. If no directory is provided, 'configs/' is + prepended + + Returns: + a list of matching defconfig files + """ + dirname = os.path.dirname(line) + if dirname: + pattern = line + else: + pattern = os.path.join('configs', line) + return glob.glob(pattern) + glob.glob(pattern + '_defconfig') + def get_matched_defconfigs(defconfigs_file): """Get all the defconfig files that match the patterns in a file.""" defconfigs = [] @@ -285,8 +303,7 @@ def get_matched_defconfigs(defconfigs_file): line = line.strip() if not line: continue # skip blank lines silently - pattern = os.path.join('configs', line) - matched = glob.glob(pattern) + glob.glob(pattern + '_defconfig') + matched = get_matched_defconfig(line) if not matched: print >> sys.stderr, "warning: %s:%d: no defconfig matched '%s'" % \ (defconfigs_file, i + 1, line) -- cgit v1.2.3 From ee4e61bda4b5187d2e098accbc166b9f09c94814 Mon Sep 17 00:00:00 2001 From: Simon Glass Date: Thu, 1 Jun 2017 19:38:59 -0600 Subject: moveconfig: Allow reading the defconfig list from stdin Support passes in a defconfig filename of '-' to read the list from stdin instead of from a file. Signed-off-by: Simon Glass --- tools/moveconfig.py | 21 ++++++++++++++++++--- 1 file changed, 18 insertions(+), 3 deletions(-) (limited to 'tools') diff --git a/tools/moveconfig.py b/tools/moveconfig.py index 390aac6f74e..c581fb6a0df 100755 --- a/tools/moveconfig.py +++ b/tools/moveconfig.py @@ -297,9 +297,22 @@ def get_matched_defconfig(line): return glob.glob(pattern) + glob.glob(pattern + '_defconfig') def get_matched_defconfigs(defconfigs_file): - """Get all the defconfig files that match the patterns in a file.""" + """Get all the defconfig files that match the patterns in a file. + + Args: + defconfigs_file: File containing a list of defconfigs to process, or + '-' to read the list from stdin + + Returns: + A list of paths to defconfig files, with no duplicates + """ defconfigs = [] - for i, line in enumerate(open(defconfigs_file)): + if defconfigs_file == '-': + fd = sys.stdin + defconfigs_file = 'stdin' + else: + fd = open(defconfigs_file) + for i, line in enumerate(fd): line = line.strip() if not line: continue # skip blank lines silently @@ -1328,7 +1341,9 @@ def main(): parser.add_option('-C', '--commit', action='store_true', default=False, help='Create a git commit for the operation') parser.add_option('-d', '--defconfigs', type='string', - help='a file containing a list of defconfigs to move') + help='a file containing a list of defconfigs to move, ' + "one per line (for example 'snow_defconfig') " + "or '-' to read from stdin") parser.add_option('-n', '--dry-run', action='store_true', default=False, help='perform a trial run (show log with no changes)') parser.add_option('-e', '--exit-on-error', action='store_true', -- cgit v1.2.3 From ddf91c64233e2a62cc7d70b3a22035a4161e418e Mon Sep 17 00:00:00 2001 From: Simon Glass Date: Thu, 1 Jun 2017 19:39:00 -0600 Subject: moveconfig: Tidy up the documentation and add hints The newest clean-up features are not mentioned in the docs. Fix this and add a few hints for particular workflows that are hopefully helpful. Signed-off-by: Simon Glass --- tools/moveconfig.py | 22 ++++++++++++++++++++-- 1 file changed, 20 insertions(+), 2 deletions(-) (limited to 'tools') diff --git a/tools/moveconfig.py b/tools/moveconfig.py index c581fb6a0df..a6d473df12a 100755 --- a/tools/moveconfig.py +++ b/tools/moveconfig.py @@ -115,6 +115,23 @@ use your own. Instead of modifying the list directly, you can give them via environments. +Tips and trips +-------------- + +To sync only X86 defconfigs: + + ./tools/moveconfig.py -s -d <(grep -l X86 configs/*) + +or: + + grep -l X86 configs/* | ./tools/moveconfig.py -s -d - + +To process CONFIG_CMD_FPGAD only for a subset of configs based on path match: + + ls configs/{hrcon*,iocon*,strider*} | \ + ./tools/moveconfig.py -Cy CONFIG_CMD_FPGAD -d - + + Available options ----------------- @@ -128,7 +145,7 @@ Available options -d, --defconfigs Specify a file containing a list of defconfigs to move. The defconfig - files can be given with shell-style wildcards. + files can be given with shell-style wildcards. Use '-' to read from stdin. -n, --dry-run Perform a trial run that does not make any changes. It is useful to @@ -169,7 +186,8 @@ Available options -y, --yes Instead of prompting, automatically go ahead with all operations. This - includes cleaning up headers and CONFIG_SYS_EXTRA_OPTIONS. + includes cleaning up headers, CONFIG_SYS_EXTRA_OPTIONS, the config whitelist + and the README. To see the complete list of supported options, run -- cgit v1.2.3 From f3b8e6470cb2d599ce021e3785827a9d4e360db0 Mon Sep 17 00:00:00 2001 From: Simon Glass Date: Thu, 1 Jun 2017 19:39:01 -0600 Subject: moveconfig: Add a constant for auto.conf This filename is used a few times. Move it to a constant before adding further uses. Signed-off-by: Simon Glass --- tools/moveconfig.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) (limited to 'tools') diff --git a/tools/moveconfig.py b/tools/moveconfig.py index a6d473df12a..eb7ada28382 100755 --- a/tools/moveconfig.py +++ b/tools/moveconfig.py @@ -262,6 +262,9 @@ COLOR_LIGHT_PURPLE = '1;35' COLOR_LIGHT_CYAN = '1;36' COLOR_WHITE = '1;37' +AUTO_CONF_PATH = 'include/config/auto.conf' + + ### helper functions ### def get_devnull(): """Get the file object of '/dev/null' device.""" @@ -751,8 +754,7 @@ class KconfigParser: self.autoconf = os.path.join(build_dir, 'include', 'autoconf.mk') self.spl_autoconf = os.path.join(build_dir, 'spl', 'include', 'autoconf.mk') - self.config_autoconf = os.path.join(build_dir, 'include', 'config', - 'auto.conf') + self.config_autoconf = os.path.join(build_dir, AUTO_CONF_PATH) self.defconfig = os.path.join(build_dir, 'defconfig') def get_cross_compile(self): @@ -1070,7 +1072,7 @@ class Slot: self.state = STATE_DEFCONFIG def do_autoconf(self): - """Run 'make include/config/auto.conf'.""" + """Run 'make AUTO_CONF_PATH'.""" self.cross_compile = self.parser.get_cross_compile() if self.cross_compile is None: @@ -1083,7 +1085,7 @@ class Slot: if self.cross_compile: cmd.append('CROSS_COMPILE=%s' % self.cross_compile) cmd.append('KCONFIG_IGNORE_DUPLICATES=1') - cmd.append('include/config/auto.conf') + cmd.append(AUTO_CONF_PATH) self.ps = subprocess.Popen(cmd, stdout=self.devnull, stderr=subprocess.PIPE, cwd=self.current_src_dir) -- cgit v1.2.3 From d73fcb12e27277d5b3e80399b4b3ec41abcd80d2 Mon Sep 17 00:00:00 2001 From: Simon Glass Date: Thu, 1 Jun 2017 19:39:02 -0600 Subject: moveconfig: Support building a simple config database Add a -b option which scans all the defconfigs and builds a database of all the CONFIG options used by each. This is useful for querying later. At present this only works with the separate -b option, which does not move any configs. It would be possible to adjust the script to build the database automatically when moving configs, but this might not be useful as the database does not change that often. Signed-off-by: Simon Glass --- tools/moveconfig.py | 81 ++++++++++++++++++++++++++++++++++++++++++++++++----- 1 file changed, 74 insertions(+), 7 deletions(-) (limited to 'tools') diff --git a/tools/moveconfig.py b/tools/moveconfig.py index eb7ada28382..00101ba43d8 100755 --- a/tools/moveconfig.py +++ b/tools/moveconfig.py @@ -203,11 +203,13 @@ import glob import multiprocessing import optparse import os +import Queue import re import shutil import subprocess import sys import tempfile +import threading import time SHOW_GNU_MAKE = 'scripts/show-gnu-make' @@ -263,6 +265,7 @@ COLOR_LIGHT_CYAN = '1;36' COLOR_WHITE = '1;37' AUTO_CONF_PATH = 'include/config/auto.conf' +CONFIG_DATABASE = 'moveconfig.db' ### helper functions ### @@ -940,6 +943,34 @@ class KconfigParser: return log + +class DatabaseThread(threading.Thread): + """This thread processes results from Slot threads. + + It collects the data in the master config directary. There is only one + result thread, and this helps to serialise the build output. + """ + def __init__(self, config_db, db_queue): + """Set up a new result thread + + Args: + builder: Builder which will be sent each result + """ + threading.Thread.__init__(self) + self.config_db = config_db + self.db_queue= db_queue + + def run(self): + """Called to start up the result thread. + + We collect the next result job and pass it on to the build. + """ + while True: + defconfig, configs = self.db_queue.get() + self.config_db[defconfig] = configs + self.db_queue.task_done() + + class Slot: """A slot to store a subprocess. @@ -949,7 +980,8 @@ class Slot: for faster processing. """ - def __init__(self, configs, options, progress, devnull, make_cmd, reference_src_dir): + def __init__(self, configs, options, progress, devnull, make_cmd, + reference_src_dir, db_queue): """Create a new process slot. Arguments: @@ -960,6 +992,7 @@ class Slot: make_cmd: command name of GNU Make. reference_src_dir: Determine the true starting config state from this source tree. + db_queue: output queue to write config info for the database """ self.options = options self.progress = progress @@ -967,6 +1000,7 @@ class Slot: self.devnull = devnull self.make_cmd = (make_cmd, 'O=' + self.build_dir) self.reference_src_dir = reference_src_dir + self.db_queue = db_queue self.parser = KconfigParser(configs, options, self.build_dir) self.state = STATE_IDLE self.failed_boards = set() @@ -1042,6 +1076,8 @@ class Slot: if self.current_src_dir: self.current_src_dir = None self.do_defconfig() + elif self.options.build_db: + self.do_build_db() else: self.do_savedefconfig() elif self.state == STATE_SAVEDEFCONFIG: @@ -1091,6 +1127,17 @@ class Slot: cwd=self.current_src_dir) self.state = STATE_AUTOCONF + def do_build_db(self): + """Add the board to the database""" + configs = {} + with open(os.path.join(self.build_dir, AUTO_CONF_PATH)) as fd: + for line in fd.readlines(): + if line.startswith('CONFIG'): + config, value = line.split('=', 1) + configs[config] = value.rstrip() + self.db_queue.put([self.defconfig, configs]) + self.finish(True) + def do_savedefconfig(self): """Update the .config and run 'make savedefconfig'.""" @@ -1173,7 +1220,7 @@ class Slots: """Controller of the array of subprocess slots.""" - def __init__(self, configs, options, progress, reference_src_dir): + def __init__(self, configs, options, progress, reference_src_dir, db_queue): """Create a new slots controller. Arguments: @@ -1182,6 +1229,7 @@ class Slots: progress: A progress indicator. reference_src_dir: Determine the true starting config state from this source tree. + db_queue: output queue to write config info for the database """ self.options = options self.slots = [] @@ -1189,7 +1237,7 @@ class Slots: make_cmd = get_make_cmd() for i in range(options.jobs): self.slots.append(Slot(configs, options, progress, devnull, - make_cmd, reference_src_dir)) + make_cmd, reference_src_dir, db_queue)) def add(self, defconfig): """Add a new subprocess if a vacant slot is found. @@ -1301,7 +1349,7 @@ class ReferenceSource: return self.src_dir -def move_config(configs, options): +def move_config(configs, options, db_queue): """Move config options to defconfig files. Arguments: @@ -1311,6 +1359,8 @@ def move_config(configs, options): if len(configs) == 0: if options.force_sync: print 'No CONFIG is specified. You are probably syncing defconfigs.', + elif options.build_db: + print 'Building %s database' % CONFIG_DATABASE else: print 'Neither CONFIG nor --force-sync is specified. Nothing will happen.', else: @@ -1329,7 +1379,7 @@ def move_config(configs, options): defconfigs = get_all_defconfigs() progress = Progress(len(defconfigs)) - slots = Slots(configs, options, progress, reference_src_dir) + slots = Slots(configs, options, progress, reference_src_dir, db_queue) # Main loop to process defconfig files: # Add a new subprocess into a vacant slot. @@ -1356,6 +1406,8 @@ def main(): parser = optparse.OptionParser() # Add options here + parser.add_option('-b', '--build-db', action='store_true', default=False, + help='build a CONFIG database') parser.add_option('-c', '--color', action='store_true', default=False, help='display the log in color') parser.add_option('-C', '--commit', action='store_true', default=False, @@ -1388,7 +1440,7 @@ def main(): (options, configs) = parser.parse_args() - if len(configs) == 0 and not options.force_sync: + if len(configs) == 0 and not any((options.force_sync, options.build_db)): parser.print_usage() sys.exit(1) @@ -1398,10 +1450,17 @@ def main(): check_top_directory() + config_db = {} + db_queue = Queue.Queue() + t = DatabaseThread(config_db, db_queue) + t.setDaemon(True) + t.start() + if not options.cleanup_headers_only: check_clean_directory() update_cross_compile(options.color) - move_config(configs, options) + move_config(configs, options, db_queue) + db_queue.join() if configs: cleanup_headers(configs, options) @@ -1421,5 +1480,13 @@ def main(): msg += '\n\nRsync all defconfig files using moveconfig.py' subprocess.call(['git', 'commit', '-s', '-m', msg]) + if options.build_db: + with open(CONFIG_DATABASE, 'w') as fd: + for defconfig, configs in config_db.iteritems(): + print >>fd, '%s' % defconfig + for config in sorted(configs.keys()): + print >>fd, ' %s=%s' % (config, configs[config]) + print >>fd + if __name__ == '__main__': main() -- cgit v1.2.3 From 99b66605f05e6de13f43ef1033809b648406f98e Mon Sep 17 00:00:00 2001 From: Simon Glass Date: Thu, 1 Jun 2017 19:39:03 -0600 Subject: moveconfig: Support looking for implied CONFIG options Some CONFIG options can be implied by others and this can help to reduce the size of the defconfig files. For example, CONFIG_X86 implies CONFIG_CMD_IRQ, so we can put 'imply CMD_IRQ' under 'config X86' and all x86 boards will have that option, avoiding adding CONFIG_CMD_IRQ to each of the x86 defconfig files. Add a -i option which searches for such options. Signed-off-by: Simon Glass Reviewed-by: Heiko Schocher --- tools/moveconfig.py | 215 +++++++++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 214 insertions(+), 1 deletion(-) (limited to 'tools') diff --git a/tools/moveconfig.py b/tools/moveconfig.py index 00101ba43d8..6fa394a4959 100755 --- a/tools/moveconfig.py +++ b/tools/moveconfig.py @@ -132,6 +132,69 @@ To process CONFIG_CMD_FPGAD only for a subset of configs based on path match: ./tools/moveconfig.py -Cy CONFIG_CMD_FPGAD -d - +Finding implied CONFIGs +----------------------- + +Some CONFIG options can be implied by others and this can help to reduce +the size of the defconfig files. For example, CONFIG_X86 implies +CONFIG_CMD_IRQ, so we can put 'imply CMD_IRQ' under 'config X86' and +all x86 boards will have that option, avoiding adding CONFIG_CMD_IRQ to +each of the x86 defconfig files. + +This tool can help find such configs. To use it, first build a database: + + ./tools/moveconfig.py -b + +Then try to query it: + + ./tools/moveconfig.py -i CONFIG_CMD_IRQ + CONFIG_CMD_IRQ found in 311/2384 defconfigs + 44 : CONFIG_SYS_FSL_ERRATUM_IFC_A002769 + 41 : CONFIG_SYS_FSL_ERRATUM_A007075 + 31 : CONFIG_SYS_FSL_DDR_VER_44 + 28 : CONFIG_ARCH_P1010 + 28 : CONFIG_SYS_FSL_ERRATUM_P1010_A003549 + 28 : CONFIG_SYS_FSL_ERRATUM_SEC_A003571 + 28 : CONFIG_SYS_FSL_ERRATUM_IFC_A003399 + 25 : CONFIG_SYS_FSL_ERRATUM_A008044 + 22 : CONFIG_ARCH_P1020 + 21 : CONFIG_SYS_FSL_DDR_VER_46 + 20 : CONFIG_MAX_PIRQ_LINKS + 20 : CONFIG_HPET_ADDRESS + 20 : CONFIG_X86 + 20 : CONFIG_PCIE_ECAM_SIZE + 20 : CONFIG_IRQ_SLOT_COUNT + 20 : CONFIG_I8259_PIC + 20 : CONFIG_CPU_ADDR_BITS + 20 : CONFIG_RAMBASE + 20 : CONFIG_SYS_FSL_ERRATUM_A005871 + 20 : CONFIG_PCIE_ECAM_BASE + 20 : CONFIG_X86_TSC_TIMER + 20 : CONFIG_I8254_TIMER + 20 : CONFIG_CMD_GETTIME + 19 : CONFIG_SYS_FSL_ERRATUM_A005812 + 18 : CONFIG_X86_RUN_32BIT + 17 : CONFIG_CMD_CHIP_CONFIG + ... + +This shows a list of config options which might imply CONFIG_CMD_EEPROM along +with how many defconfigs they cover. From this you can see that CONFIG_X86 +implies CONFIG_CMD_EEPROM. Therefore, instead of adding CONFIG_CMD_EEPROM to +the defconfig of every x86 board, you could add a single imply line to the +Kconfig file: + + config X86 + bool "x86 architecture" + ... + imply CMD_EEPROM + +That will cover 20 defconfigs. Many of the options listed are not suitable as +they are not related. E.g. it would be odd for CONFIG_CMD_GETTIME to imply +CMD_EEPROM. + +Using this search you can reduce the size of moveconfig patches. + + Available options ----------------- @@ -195,6 +258,7 @@ To see the complete list of supported options, run """ +import collections import copy import difflib import filecmp @@ -1398,6 +1462,148 @@ def move_config(configs, options, db_queue): slots.show_failed_boards() slots.show_suspicious_boards() +def imply_config(config_list, find_superset=False): + """Find CONFIG options which imply those in the list + + Some CONFIG options can be implied by others and this can help to reduce + the size of the defconfig files. For example, CONFIG_X86 implies + CONFIG_CMD_IRQ, so we can put 'imply CMD_IRQ' under 'config X86' and + all x86 boards will have that option, avoiding adding CONFIG_CMD_IRQ to + each of the x86 defconfig files. + + This function uses the moveconfig database to find such options. It + displays a list of things that could possibly imply those in the list. + The algorithm ignores any that start with CONFIG_TARGET since these + typically refer to only a few defconfigs (often one). It also does not + display a config with less than 5 defconfigs. + + The algorithm works using sets. For each target config in config_list: + - Get the set 'defconfigs' which use that target config + - For each config (from a list of all configs): + - Get the set 'imply_defconfig' of defconfigs which use that config + - + - If imply_defconfigs contains anything not in defconfigs then + this config does not imply the target config + + Params: + config_list: List of CONFIG options to check (each a string) + find_superset: True to look for configs which are a superset of those + already found. So for example if CONFIG_EXYNOS5 implies an option, + but CONFIG_EXYNOS covers a larger set of defconfigs and also + implies that option, this will drop the former in favour of the + latter. In practice this option has not proved very used. + + Note the terminoloy: + config - a CONFIG_XXX options (a string, e.g. 'CONFIG_CMD_EEPROM') + defconfig - a defconfig file (a string, e.g. 'configs/snow_defconfig') + """ + # key is defconfig name, value is dict of (CONFIG_xxx, value) + config_db = {} + + # Holds a dict containing the set of defconfigs that contain each config + # key is config, value is set of defconfigs using that config + defconfig_db = collections.defaultdict(set) + + # Set of all config options we have seen + all_configs = set() + + # Set of all defconfigs we have seen + all_defconfigs = set() + + # Read in the database + configs = {} + with open(CONFIG_DATABASE) as fd: + for line in fd.readlines(): + line = line.rstrip() + if not line: # Separator between defconfigs + config_db[defconfig] = configs + all_defconfigs.add(defconfig) + configs = {} + elif line[0] == ' ': # CONFIG line + config, value = line.strip().split('=', 1) + configs[config] = value + defconfig_db[config].add(defconfig) + all_configs.add(config) + else: # New defconfig + defconfig = line + + # Work through each target config option in tern, independently + for config in config_list: + defconfigs = defconfig_db.get(config) + if not defconfigs: + print '%s not found in any defconfig' % config + continue + + # Get the set of defconfigs without this one (since a config cannot + # imply itself) + non_defconfigs = all_defconfigs - defconfigs + num_defconfigs = len(defconfigs) + print '%s found in %d/%d defconfigs' % (config, num_defconfigs, + len(all_configs)) + + # This will hold the results: key=config, value=defconfigs containing it + imply_configs = {} + rest_configs = all_configs - set([config]) + + # Look at every possible config, except the target one + for imply_config in rest_configs: + if 'CONFIG_TARGET' in imply_config: + continue + + # Find set of defconfigs that have this config + imply_defconfig = defconfig_db[imply_config] + + # Get the intersection of this with defconfigs containing the + # target config + common_defconfigs = imply_defconfig & defconfigs + + # Get the set of defconfigs containing this config which DO NOT + # also contain the taret config. If this set is non-empty it means + # that this config affects other defconfigs as well as (possibly) + # the ones affected by the target config. This means it implies + # things we don't want to imply. + not_common_defconfigs = imply_defconfig & non_defconfigs + if not_common_defconfigs: + continue + + # If there are common defconfigs, imply_config may be useful + if common_defconfigs: + skip = False + if find_superset: + for prev in imply_configs.keys(): + prev_count = len(imply_configs[prev]) + count = len(common_defconfigs) + if (prev_count > count and + (imply_configs[prev] & common_defconfigs == + common_defconfigs)): + # skip imply_config because prev is a superset + skip = True + break + elif count > prev_count: + # delete prev because imply_config is a superset + del imply_configs[prev] + if not skip: + imply_configs[imply_config] = common_defconfigs + + # Now we have a dict imply_configs of configs which imply each config + # The value of each dict item is the set of defconfigs containing that + # config. Rank them so that we print the configs that imply the largest + # number of defconfigs first. + ranked_configs = sorted(imply_configs, + key=lambda k: len(imply_configs[k]), reverse=True) + for config in ranked_configs: + num_common = len(imply_configs[config]) + + # Don't bother if there are less than 5 defconfigs affected. + if num_common < 5: + continue + missing = defconfigs - imply_configs[config] + missing_str = ', '.join(missing) if missing else 'all' + missing_str = '' + print ' %d : %-30s%s' % (num_common, config.ljust(30), + missing_str) + + def main(): try: cpu_count = multiprocessing.cpu_count() @@ -1416,6 +1622,8 @@ def main(): help='a file containing a list of defconfigs to move, ' "one per line (for example 'snow_defconfig') " "or '-' to read from stdin") + parser.add_option('-i', '--imply', action='store_true', default=False, + help='find options which imply others') parser.add_option('-n', '--dry-run', action='store_true', default=False, help='perform a trial run (show log with no changes)') parser.add_option('-e', '--exit-on-error', action='store_true', @@ -1440,7 +1648,8 @@ def main(): (options, configs) = parser.parse_args() - if len(configs) == 0 and not any((options.force_sync, options.build_db)): + if len(configs) == 0 and not any((options.force_sync, options.build_db, + options.imply)): parser.print_usage() sys.exit(1) @@ -1450,6 +1659,10 @@ def main(): check_top_directory() + if options.imply: + imply_config(configs) + return + config_db = {} db_queue = Queue.Queue() t = DatabaseThread(config_db, db_queue) -- cgit v1.2.3 From 2cce586651bbeb0489c6cc6f04489f65bd039b9e Mon Sep 17 00:00:00 2001 From: Simon Glass Date: Tue, 13 Jun 2017 21:10:06 -0600 Subject: dtoc: Support multiple compatible strings in a node Sometimes a node will have multiple compatible strings. Drivers may use one or the other so the best approach seems to be to #define them to be equivalent. Update dtoc to support this. Signed-off-by: Simon Glass Tested-by: Kever Yang --- tools/dtoc/dtoc.py | 28 +++++++++++++++++++++------- 1 file changed, 21 insertions(+), 7 deletions(-) (limited to 'tools') diff --git a/tools/dtoc/dtoc.py b/tools/dtoc/dtoc.py index 08e35f148c0..c0ab13ad34f 100755 --- a/tools/dtoc/dtoc.py +++ b/tools/dtoc/dtoc.py @@ -88,6 +88,7 @@ class DtbPlatdata: self._phandle_node = {} self._outfile = None self._lines = [] + self._aliases = {} def SetupOutput(self, fname): """Set up the output destination @@ -159,9 +160,10 @@ class DtbPlatdata: C identifier for the first compatible string """ compat = node.props['compatible'].value + aliases = [] if type(compat) == list: - compat = compat[0] - return Conv_name_to_c(compat) + compat, aliases = compat[0], compat[1:] + return Conv_name_to_c(compat), [Conv_name_to_c(a) for a in aliases] def ScanDtb(self): """Scan the device tree to obtain a tree of notes and properties @@ -239,7 +241,7 @@ class DtbPlatdata: """ structs = {} for node in self._valid_nodes: - node_name = self.GetCompatName(node) + node_name, _ = self.GetCompatName(node) fields = {} # Get a list of all the valid properties in this node. @@ -263,12 +265,17 @@ class DtbPlatdata: upto = 0 for node in self._valid_nodes: - node_name = self.GetCompatName(node) + node_name, _ = self.GetCompatName(node) struct = structs[node_name] for name, prop in node.props.items(): if name not in PROP_IGNORE_LIST and name[0] != '#': prop.Widen(struct[name]) upto += 1 + + struct_name, aliases = self.GetCompatName(node) + for alias in aliases: + self._aliases[alias] = struct_name + return structs def ScanPhandles(self): @@ -327,13 +334,17 @@ class DtbPlatdata: self.Out(';\n') self.Out('};\n') + for alias, struct_name in self._aliases.iteritems(): + self.Out('#define %s%s %s%s\n'% (STRUCT_PREFIX, alias, + STRUCT_PREFIX, struct_name)) + def OutputNode(self, node): """Output the C code for a node Args: node: node to output """ - struct_name = self.GetCompatName(node) + struct_name, _ = self.GetCompatName(node) var_name = Conv_name_to_c(node.name) self.Buf('static struct %s%s %s%s = {\n' % (STRUCT_PREFIX, struct_name, VAL_PREFIX, var_name)) @@ -384,8 +395,11 @@ class DtbPlatdata: """Generate device defintions for the platform data This writes out C platform data initialisation data and - U_BOOT_DEVICE() declarations for each valid node. See the - documentation in README.of-plat for more information. + U_BOOT_DEVICE() declarations for each valid node. Where a node has + multiple compatible strings, a #define is used to make them equivalent. + + See the documentation in doc/driver-model/of-plat.txt for more + information. """ self.Out('#include \n') self.Out('#include \n') -- cgit v1.2.3 From 2ddd85dc34c4b5fdefe363b735d2eea8d9d87c6c Mon Sep 17 00:00:00 2001 From: Simon Glass Date: Thu, 15 Jun 2017 21:39:31 -0600 Subject: moveconfig: Allow piping in 'git show --stat' output It is useful to be able to process only a subset of boards to save time. Often that subset is defined by the defconfig files in a git commit. This change allows things like: # Build the database ./tools.moveconfig.py -b # Find some implying configs ./tools/moveconfig.py -i CONFIG_X # Add some 'imply' statements to Kconfig files ./tools/moveconfig.py -i CONFIG_X -a CONFIG_A,CONFIG_B # Reprocess the defconfig files to see if we can drop some changes git show --stat | ./tools/moveconfig.py -s -d - # Update the commit, with fewer defconfig changes gii commit -au Where the commit contains defconfig files, this will reprocess them to take account of the imply statements that you added. Signed-off-by: Simon Glass --- tools/moveconfig.py | 2 ++ 1 file changed, 2 insertions(+) (limited to 'tools') diff --git a/tools/moveconfig.py b/tools/moveconfig.py index 6fa394a4959..6921135b0f8 100755 --- a/tools/moveconfig.py +++ b/tools/moveconfig.py @@ -404,6 +404,8 @@ def get_matched_defconfigs(defconfigs_file): line = line.strip() if not line: continue # skip blank lines silently + if ' ' in line: + line = line.split(' ')[0] # handle 'git log' input matched = get_matched_defconfig(line) if not matched: print >> sys.stderr, "warning: %s:%d: no defconfig matched '%s'" % \ -- cgit v1.2.3 From 9b2a2e87d2404dad62e4c2f350fd07324bfdadc3 Mon Sep 17 00:00:00 2001 From: Simon Glass Date: Thu, 15 Jun 2017 21:39:32 -0600 Subject: moveconfig: Allow control of which implying configs are shown Sometimes it is useful to display CONFIG_TARGET or CONFIG_CMD configs. Add an option to control this. Also we generally ignore implying configs which affect fewer than 5 boards. But sometimes it is useful to show those those, so add an option that reduces the minimum to two. ERRATUM configs are never useful for implying things, so ignore those. Signed-off-by: Simon Glass --- tools/moveconfig.py | 36 ++++++++++++++++++++++++++++++++---- 1 file changed, 32 insertions(+), 4 deletions(-) (limited to 'tools') diff --git a/tools/moveconfig.py b/tools/moveconfig.py index 6921135b0f8..e765acc4a46 100755 --- a/tools/moveconfig.py +++ b/tools/moveconfig.py @@ -1464,7 +1464,15 @@ def move_config(configs, options, db_queue): slots.show_failed_boards() slots.show_suspicious_boards() -def imply_config(config_list, find_superset=False): +(IMPLY_MIN_2, IMPLY_TARGET, IMPLY_CMD) = (1, 2, 4) + +IMPLY_FLAGS = { + 'min2': [IMPLY_MIN_2, 'Show options which imply >2 boards (normally >5)'], + 'target': [IMPLY_TARGET, 'Allow CONFIG_TARGET_... options to imply'], + 'cmd': [IMPLY_CMD, 'Allow CONFIG_CMD_... to imply'], +}; + +def do_imply_config(config_list, imply_flags, find_superset=False): """Find CONFIG options which imply those in the list Some CONFIG options can be implied by others and this can help to reduce @@ -1489,6 +1497,8 @@ def imply_config(config_list, find_superset=False): Params: config_list: List of CONFIG options to check (each a string) + imply_flags: Flags which control which implying configs are allowed + (IMPLY_...) find_superset: True to look for configs which are a superset of those already found. So for example if CONFIG_EXYNOS5 implies an option, but CONFIG_EXYNOS covers a larger set of defconfigs and also @@ -1549,8 +1559,14 @@ def imply_config(config_list, find_superset=False): # Look at every possible config, except the target one for imply_config in rest_configs: - if 'CONFIG_TARGET' in imply_config: + if 'ERRATUM' in imply_config: continue + if not (imply_flags & IMPLY_CMD): + if 'CONFIG_CMD' in imply_config: + continue + if not (imply_flags & IMPLY_TARGET): + if 'CONFIG_TARGET' in imply_config: + continue # Find set of defconfigs that have this config imply_defconfig = defconfig_db[imply_config] @@ -1597,7 +1613,7 @@ def imply_config(config_list, find_superset=False): num_common = len(imply_configs[config]) # Don't bother if there are less than 5 defconfigs affected. - if num_common < 5: + if num_common < (2 if imply_flags & IMPLY_MIN_2 else 5): continue missing = defconfigs - imply_configs[config] missing_str = ', '.join(missing) if missing else 'all' @@ -1626,6 +1642,8 @@ def main(): "or '-' to read from stdin") parser.add_option('-i', '--imply', action='store_true', default=False, help='find options which imply others') + parser.add_option('-I', '--imply-flags', type='string', default='', + help="control the -i option ('help' for help") parser.add_option('-n', '--dry-run', action='store_true', default=False, help='perform a trial run (show log with no changes)') parser.add_option('-e', '--exit-on-error', action='store_true', @@ -1662,7 +1680,17 @@ def main(): check_top_directory() if options.imply: - imply_config(configs) + imply_flags = 0 + for flag in options.imply_flags.split(): + if flag == 'help' or flag not in IMPLY_FLAGS: + print "Imply flags: (separate with ',')" + for name, info in IMPLY_FLAGS.iteritems(): + print ' %-15s: %s' % (name, info[1]) + parser.print_usage() + sys.exit(1) + imply_flags |= IMPLY_FLAGS[flag][0] + + do_imply_config(configs, imply_flags) return config_db = {} -- cgit v1.2.3 From cb008830aa10214c5e924d8fe15deaeca190d71a Mon Sep 17 00:00:00 2001 From: Simon Glass Date: Thu, 15 Jun 2017 21:39:33 -0600 Subject: moveconfig: Allow automatic location and adding of 'imply' By using a Kconfig parser we can find the location of each option in the Kconfig tree. Using the information from the database we can then automatically add an 'imply' option into the right place if requested by the user. Add a -a option to support adding 'imply' options. Display the location of any existing 'imply' option so that progress can be examined. Add a -A option to hide any existing 'imply' options so that already-completed additions need not be considered further. Also add documentation for this feature. Signed-off-by: Simon Glass --- tools/moveconfig.py | 224 +++++++++++++++++++++++++++++++++++++++++++++++++--- 1 file changed, 215 insertions(+), 9 deletions(-) (limited to 'tools') diff --git a/tools/moveconfig.py b/tools/moveconfig.py index e765acc4a46..eb4927f278e 100755 --- a/tools/moveconfig.py +++ b/tools/moveconfig.py @@ -194,6 +194,48 @@ CMD_EEPROM. Using this search you can reduce the size of moveconfig patches. +You can automatically add 'imply' statements in the Kconfig with the -a +option: + + ./tools/moveconfig.py -s -i CONFIG_SCSI \ + -a CONFIG_ARCH_LS1021A,CONFIG_ARCH_LS1043A + +This will add 'imply SCSI' to the two CONFIG options mentioned, assuming that +the database indicates that they do actually imply CONFIG_SCSI and do not +already have an 'imply SCSI'. + +The output shows where the imply is added: + + 18 : CONFIG_ARCH_LS1021A arch/arm/cpu/armv7/ls102xa/Kconfig:1 + 13 : CONFIG_ARCH_LS1043A arch/arm/cpu/armv8/fsl-layerscape/Kconfig:11 + 12 : CONFIG_ARCH_LS1046A arch/arm/cpu/armv8/fsl-layerscape/Kconfig:31 + +The first number is the number of boards which can avoid having a special +CONFIG_SCSI option in their defconfig file if this 'imply' is added. +The location at the right is the Kconfig file and line number where the config +appears. For example, adding 'imply CONFIG_SCSI' to the 'config ARCH_LS1021A' +in arch/arm/cpu/armv7/ls102xa/Kconfig at line 1 will help 18 boards to reduce +the size of their defconfig files. + +If you want to add an 'imply' to every imply config in the list, you can use + + ./tools/moveconfig.py -s -i CONFIG_SCSI -a all + +To control which ones are displayed, use -I where list is a list of +options (use '-I help' to see possible options and their meaning). + +To skip showing you options that already have an 'imply' attached, use -A. + +When you have finished adding 'imply' options you can regenerate the +defconfig files for affected boards with something like: + + git show --stat | ./tools/moveconfig.py -s -d - + +This will regenerate only those defconfigs changed in the current commit. +If you start with (say) 100 defconfigs being changed in the commit, and add +a few 'imply' options as above, then regenerate, hopefully you can reduce the +number of defconfigs changed in the commit. + Available options ----------------- @@ -276,6 +318,9 @@ import tempfile import threading import time +sys.path.append(os.path.join(os.path.dirname(__file__), 'buildman')) +import kconfiglib + SHOW_GNU_MAKE = 'scripts/show-gnu-make' SLEEP_TIME=0.03 @@ -331,6 +376,7 @@ COLOR_WHITE = '1;37' AUTO_CONF_PATH = 'include/config/auto.conf' CONFIG_DATABASE = 'moveconfig.db' +CONFIG_LEN = len('CONFIG_') ### helper functions ### def get_devnull(): @@ -802,6 +848,19 @@ class Progress: print ' %d defconfigs out of %d\r' % (self.current, self.total), sys.stdout.flush() + +class KconfigScanner: + """Kconfig scanner.""" + + def __init__(self): + """Scan all the Kconfig files and create a Config object.""" + # Define environment variables referenced from Kconfig + os.environ['srctree'] = os.getcwd() + os.environ['UBOOTVERSION'] = 'dummy' + os.environ['KCONFIG_OBJDIR'] = '' + self.conf = kconfiglib.Config() + + class KconfigParser: """A parser of .config and include/autoconf.mk.""" @@ -1464,15 +1523,98 @@ def move_config(configs, options, db_queue): slots.show_failed_boards() slots.show_suspicious_boards() -(IMPLY_MIN_2, IMPLY_TARGET, IMPLY_CMD) = (1, 2, 4) +def find_kconfig_rules(kconf, config, imply_config): + """Check whether a config has a 'select' or 'imply' keyword + + Args: + kconf: Kconfig.Config object + config: Name of config to check (without CONFIG_ prefix) + imply_config: Implying config (without CONFIG_ prefix) which may or + may not have an 'imply' for 'config') + + Returns: + Symbol object for 'config' if found, else None + """ + sym = kconf.get_symbol(imply_config) + if sym: + for sel in sym.get_selected_symbols(): + if sel.get_name() == config: + return sym + return None + +def check_imply_rule(kconf, config, imply_config): + """Check if we can add an 'imply' option + + This finds imply_config in the Kconfig and looks to see if it is possible + to add an 'imply' for 'config' to that part of the Kconfig. + + Args: + kconf: Kconfig.Config object + config: Name of config to check (without CONFIG_ prefix) + imply_config: Implying config (without CONFIG_ prefix) which may or + may not have an 'imply' for 'config') + + Returns: + tuple: + filename of Kconfig file containing imply_config, or None if none + line number within the Kconfig file, or 0 if none + message indicating the result + """ + sym = kconf.get_symbol(imply_config) + if not sym: + return 'cannot find sym' + locs = sym.get_def_locations() + if len(locs) != 1: + return '%d locations' % len(locs) + fname, linenum = locs[0] + cwd = os.getcwd() + if cwd and fname.startswith(cwd): + fname = fname[len(cwd) + 1:] + file_line = ' at %s:%d' % (fname, linenum) + with open(fname) as fd: + data = fd.read().splitlines() + if data[linenum - 1] != 'config %s' % imply_config: + return None, 0, 'bad sym format %s%s' % (data[linenum], file_line) + return fname, linenum, 'adding%s' % file_line + +def add_imply_rule(config, fname, linenum): + """Add a new 'imply' option to a Kconfig + + Args: + config: config option to add an imply for (without CONFIG_ prefix) + fname: Kconfig filename to update + linenum: Line number to place the 'imply' before + + Returns: + Message indicating the result + """ + file_line = ' at %s:%d' % (fname, linenum) + data = open(fname).read().splitlines() + linenum -= 1 + + for offset, line in enumerate(data[linenum:]): + if line.strip().startswith('help') or not line: + data.insert(linenum + offset, '\timply %s' % config) + with open(fname, 'w') as fd: + fd.write('\n'.join(data) + '\n') + return 'added%s' % file_line + + return 'could not insert%s' + +(IMPLY_MIN_2, IMPLY_TARGET, IMPLY_CMD, IMPLY_NON_ARCH_BOARD) = ( + 1, 2, 4, 8) IMPLY_FLAGS = { 'min2': [IMPLY_MIN_2, 'Show options which imply >2 boards (normally >5)'], 'target': [IMPLY_TARGET, 'Allow CONFIG_TARGET_... options to imply'], 'cmd': [IMPLY_CMD, 'Allow CONFIG_CMD_... to imply'], + 'non-arch-board': [ + IMPLY_NON_ARCH_BOARD, + 'Allow Kconfig options outside arch/ and /board/ to imply'], }; -def do_imply_config(config_list, imply_flags, find_superset=False): +def do_imply_config(config_list, add_imply, imply_flags, skip_added, + check_kconfig=True, find_superset=False): """Find CONFIG options which imply those in the list Some CONFIG options can be implied by others and this can help to reduce @@ -1497,8 +1639,12 @@ def do_imply_config(config_list, imply_flags, find_superset=False): Params: config_list: List of CONFIG options to check (each a string) + add_imply: Automatically add an 'imply' for each config. imply_flags: Flags which control which implying configs are allowed (IMPLY_...) + skip_added: Don't show options which already have an imply added. + check_kconfig: Check if implied symbols already have an 'imply' or + 'select' for the target config, and show this information if so. find_superset: True to look for configs which are a superset of those already found. So for example if CONFIG_EXYNOS5 implies an option, but CONFIG_EXYNOS covers a larger set of defconfigs and also @@ -1509,6 +1655,10 @@ def do_imply_config(config_list, imply_flags, find_superset=False): config - a CONFIG_XXX options (a string, e.g. 'CONFIG_CMD_EEPROM') defconfig - a defconfig file (a string, e.g. 'configs/snow_defconfig') """ + kconf = KconfigScanner().conf if check_kconfig else None + if add_imply and add_imply != 'all': + add_imply = add_imply.split() + # key is defconfig name, value is dict of (CONFIG_xxx, value) config_db = {} @@ -1607,19 +1757,68 @@ def do_imply_config(config_list, imply_flags, find_superset=False): # The value of each dict item is the set of defconfigs containing that # config. Rank them so that we print the configs that imply the largest # number of defconfigs first. - ranked_configs = sorted(imply_configs, + ranked_iconfigs = sorted(imply_configs, key=lambda k: len(imply_configs[k]), reverse=True) - for config in ranked_configs: - num_common = len(imply_configs[config]) + kconfig_info = '' + cwd = os.getcwd() + add_list = collections.defaultdict(list) + for iconfig in ranked_iconfigs: + num_common = len(imply_configs[iconfig]) # Don't bother if there are less than 5 defconfigs affected. if num_common < (2 if imply_flags & IMPLY_MIN_2 else 5): continue - missing = defconfigs - imply_configs[config] + missing = defconfigs - imply_configs[iconfig] missing_str = ', '.join(missing) if missing else 'all' missing_str = '' - print ' %d : %-30s%s' % (num_common, config.ljust(30), - missing_str) + show = True + if kconf: + sym = find_kconfig_rules(kconf, config[CONFIG_LEN:], + iconfig[CONFIG_LEN:]) + kconfig_info = '' + if sym: + locs = sym.get_def_locations() + if len(locs) == 1: + fname, linenum = locs[0] + if cwd and fname.startswith(cwd): + fname = fname[len(cwd) + 1:] + kconfig_info = '%s:%d' % (fname, linenum) + if skip_added: + show = False + else: + sym = kconf.get_symbol(iconfig[CONFIG_LEN:]) + fname = '' + if sym: + locs = sym.get_def_locations() + if len(locs) == 1: + fname, linenum = locs[0] + if cwd and fname.startswith(cwd): + fname = fname[len(cwd) + 1:] + in_arch_board = not sym or (fname.startswith('arch') or + fname.startswith('board')) + if (not in_arch_board and + not (imply_flags & IMPLY_NON_ARCH_BOARD)): + continue + + if add_imply and (add_imply == 'all' or + iconfig in add_imply): + fname, linenum, kconfig_info = (check_imply_rule(kconf, + config[CONFIG_LEN:], iconfig[CONFIG_LEN:])) + if fname: + add_list[fname].append(linenum) + + if show and kconfig_info != 'skip': + print '%5d : %-30s%-25s %s' % (num_common, iconfig.ljust(30), + kconfig_info, missing_str) + + # Having collected a list of things to add, now we add them. We process + # each file from the largest line number to the smallest so that + # earlier additions do not affect our line numbers. E.g. if we added an + # imply at line 20 it would change the position of each line after + # that. + for fname, linenums in add_list.iteritems(): + for linenum in sorted(linenums, reverse=True): + add_imply_rule(config[CONFIG_LEN:], fname, linenum) def main(): @@ -1630,6 +1829,12 @@ def main(): parser = optparse.OptionParser() # Add options here + parser.add_option('-a', '--add-imply', type='string', default='', + help='comma-separated list of CONFIG options to add ' + "an 'imply' statement to for the CONFIG in -i") + parser.add_option('-A', '--skip-added', action='store_true', default=False, + help="don't show options which are already marked as " + 'implying others') parser.add_option('-b', '--build-db', action='store_true', default=False, help='build a CONFIG database') parser.add_option('-c', '--color', action='store_true', default=False, @@ -1690,7 +1895,8 @@ def main(): sys.exit(1) imply_flags |= IMPLY_FLAGS[flag][0] - do_imply_config(configs, imply_flags) + do_imply_config(configs, options.add_imply, imply_flags, + options.skip_added) return config_db = {} -- cgit v1.2.3 From 418355cbaa4dfe1a202538c1584f0b7b147f59c0 Mon Sep 17 00:00:00 2001 From: Simon Glass Date: Sun, 18 Jun 2017 22:08:56 -0600 Subject: dtoc: Use self._options instead of the global options This class should use the options object passed to it rather than finding the global one. Fix it. Signed-off-by: Simon Glass --- tools/dtoc/dtoc.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) (limited to 'tools') diff --git a/tools/dtoc/dtoc.py b/tools/dtoc/dtoc.py index c0ab13ad34f..056f5157c95 100755 --- a/tools/dtoc/dtoc.py +++ b/tools/dtoc/dtoc.py @@ -177,7 +177,7 @@ class DtbPlatdata: for node in root.subnodes: if 'compatible' in node.props: status = node.props.get('status') - if (not options.include_disabled and not status or + if (not self._options.include_disabled and not status or status.value != 'disabled'): self._valid_nodes.append(node) phandle_prop = node.props.get('phandle') @@ -203,7 +203,7 @@ class DtbPlatdata: for node in self.fdt.GetRoot().subnodes: if 'compatible' in node.props: status = node.props.get('status') - if (not options.include_disabled and not status or + if (not self._options.include_disabled and not status or status.value != 'disabled'): node_list.append(node) phandle_prop = node.props.get('phandle') -- cgit v1.2.3 From 14f5acfc5b0c133cbe5e7f5bffc0519f994abbfa Mon Sep 17 00:00:00 2001 From: Simon Glass Date: Sun, 18 Jun 2017 22:08:57 -0600 Subject: dtoc: Add a comment at the top Add a description of the dtoc tool at the top of the file. Signed-off-by: Simon Glass --- tools/dtoc/dtoc.py | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) (limited to 'tools') diff --git a/tools/dtoc/dtoc.py b/tools/dtoc/dtoc.py index 056f5157c95..79779477d9e 100755 --- a/tools/dtoc/dtoc.py +++ b/tools/dtoc/dtoc.py @@ -6,6 +6,26 @@ # SPDX-License-Identifier: GPL-2.0+ # +"""Device tree to C tool + +This tool converts a device tree binary file (.dtb) into two C files. The +indent is to allow a C program to access data from the device tree without +having to link against libfdt. By putting the data from the device tree into +C structures, normal C code can be used. This helps to reduce the size of the +compiled program. + +Dtoc produces two output files: + + dt-structs.h - contains struct definitions + dt-platdata.c - contains data from the device tree using the struct + definitions, as well as U-Boot driver definitions. + +This tool is used in U-Boot to provide device tree data to SPL without +increasing the code size of SPL. This supports the CONFIG_SPL_OF_PLATDATA +options. For more information about the use of this options and tool please +see doc/driver-model/of-plat.txt +""" + import copy from optparse import OptionError, OptionParser import os -- cgit v1.2.3 From 7581c01a159ce04101798a39c04b5fa37ac718d2 Mon Sep 17 00:00:00 2001 From: Simon Glass Date: Sun, 18 Jun 2017 22:08:58 -0600 Subject: dtoc: Split out the main class into its own file To simplify running tests we should move this class into its own file. This allows the tests to import it without having to import dtoc.py, which runs the tests. Signed-off-by: Simon Glass --- tools/dtoc/dtb_platdata.py | 411 +++++++++++++++++++++++++++++++++++++++++++++ tools/dtoc/dtoc.py | 408 +------------------------------------------- 2 files changed, 414 insertions(+), 405 deletions(-) create mode 100644 tools/dtoc/dtb_platdata.py (limited to 'tools') diff --git a/tools/dtoc/dtb_platdata.py b/tools/dtoc/dtb_platdata.py new file mode 100644 index 00000000000..94db274c098 --- /dev/null +++ b/tools/dtoc/dtb_platdata.py @@ -0,0 +1,411 @@ +#!/usr/bin/python +# +# Copyright (C) 2017 Google, Inc +# Written by Simon Glass +# +# SPDX-License-Identifier: GPL-2.0+ +# + +import copy + +import fdt +import fdt_util + +# When we see these properties we ignore them - i.e. do not create a structure member +PROP_IGNORE_LIST = [ + '#address-cells', + '#gpio-cells', + '#size-cells', + 'compatible', + 'linux,phandle', + "status", + 'phandle', + 'u-boot,dm-pre-reloc', + 'u-boot,dm-tpl', + 'u-boot,dm-spl', +] + +# C type declarations for the tyues we support +TYPE_NAMES = { + fdt.TYPE_INT: 'fdt32_t', + fdt.TYPE_BYTE: 'unsigned char', + fdt.TYPE_STRING: 'const char *', + fdt.TYPE_BOOL: 'bool', +}; + +STRUCT_PREFIX = 'dtd_' +VAL_PREFIX = 'dtv_' + +def Conv_name_to_c(name): + """Convert a device-tree name to a C identifier + + Args: + name: Name to convert + Return: + String containing the C version of this name + """ + str = name.replace('@', '_at_') + str = str.replace('-', '_') + str = str.replace(',', '_') + str = str.replace('.', '_') + str = str.replace('/', '__') + return str + +def TabTo(num_tabs, str): + if len(str) >= num_tabs * 8: + return str + ' ' + return str + '\t' * (num_tabs - len(str) // 8) + +class DtbPlatdata: + """Provide a means to convert device tree binary data to platform data + + The output of this process is C structures which can be used in space- + constrained encvironments where the ~3KB code overhead of device tree + code is not affordable. + + Properties: + fdt: Fdt object, referencing the device tree + _dtb_fname: Filename of the input device tree binary file + _valid_nodes: A list of Node object with compatible strings + _options: Command-line options + _phandle_node: A dict of nodes indexed by phandle number (1, 2...) + _outfile: The current output file (sys.stdout or a real file) + _lines: Stashed list of output lines for outputting in the future + _phandle_node: A dict of Nodes indexed by phandle (an integer) + """ + def __init__(self, dtb_fname, options): + self._dtb_fname = dtb_fname + self._valid_nodes = None + self._options = options + self._phandle_node = {} + self._outfile = None + self._lines = [] + self._aliases = {} + + def SetupOutput(self, fname): + """Set up the output destination + + Once this is done, future calls to self.Out() will output to this + file. + + Args: + fname: Filename to send output to, or '-' for stdout + """ + if fname == '-': + self._outfile = sys.stdout + else: + self._outfile = open(fname, 'w') + + def Out(self, str): + """Output a string to the output file + + Args: + str: String to output + """ + self._outfile.write(str) + + def Buf(self, str): + """Buffer up a string to send later + + Args: + str: String to add to our 'buffer' list + """ + self._lines.append(str) + + def GetBuf(self): + """Get the contents of the output buffer, and clear it + + Returns: + The output buffer, which is then cleared for future use + """ + lines = self._lines + self._lines = [] + return lines + + def GetValue(self, type, value): + """Get a value as a C expression + + For integers this returns a byte-swapped (little-endian) hex string + For bytes this returns a hex string, e.g. 0x12 + For strings this returns a literal string enclosed in quotes + For booleans this return 'true' + + Args: + type: Data type (fdt_util) + value: Data value, as a string of bytes + """ + if type == fdt.TYPE_INT: + return '%#x' % fdt_util.fdt32_to_cpu(value) + elif type == fdt.TYPE_BYTE: + return '%#x' % ord(value[0]) + elif type == fdt.TYPE_STRING: + return '"%s"' % value + elif type == fdt.TYPE_BOOL: + return 'true' + + def GetCompatName(self, node): + """Get a node's first compatible string as a C identifier + + Args: + node: Node object to check + Return: + C identifier for the first compatible string + """ + compat = node.props['compatible'].value + aliases = [] + if type(compat) == list: + compat, aliases = compat[0], compat[1:] + return Conv_name_to_c(compat), [Conv_name_to_c(a) for a in aliases] + + def ScanDtb(self): + """Scan the device tree to obtain a tree of notes and properties + + Once this is done, self.fdt.GetRoot() can be called to obtain the + device tree root node, and progress from there. + """ + self.fdt = fdt.FdtScan(self._dtb_fname) + + def ScanNode(self, root): + for node in root.subnodes: + if 'compatible' in node.props: + status = node.props.get('status') + if (not self._options.include_disabled and not status or + status.value != 'disabled'): + self._valid_nodes.append(node) + phandle_prop = node.props.get('phandle') + if phandle_prop: + phandle = phandle_prop.GetPhandle() + self._phandle_node[phandle] = node + + # recurse to handle any subnodes + self.ScanNode(node); + + def ScanTree(self): + """Scan the device tree for useful information + + This fills in the following properties: + _phandle_node: A dict of Nodes indexed by phandle (an integer) + _valid_nodes: A list of nodes we wish to consider include in the + platform data + """ + self._phandle_node = {} + self._valid_nodes = [] + return self.ScanNode(self.fdt.GetRoot()); + + for node in self.fdt.GetRoot().subnodes: + if 'compatible' in node.props: + status = node.props.get('status') + if (not self._options.include_disabled and not status or + status.value != 'disabled'): + node_list.append(node) + phandle_prop = node.props.get('phandle') + if phandle_prop: + phandle = phandle_prop.GetPhandle() + self._phandle_node[phandle] = node + + self._valid_nodes = node_list + + def IsPhandle(self, prop): + """Check if a node contains phandles + + We have no reliable way of detecting whether a node uses a phandle + or not. As an interim measure, use a list of known property names. + + Args: + prop: Prop object to check + Return: + True if the object value contains phandles, else False + """ + if prop.name in ['clocks']: + return True + return False + + def ScanStructs(self): + """Scan the device tree building up the C structures we will use. + + Build a dict keyed by C struct name containing a dict of Prop + object for each struct field (keyed by property name). Where the + same struct appears multiple times, try to use the 'widest' + property, i.e. the one with a type which can express all others. + + Once the widest property is determined, all other properties are + updated to match that width. + """ + structs = {} + for node in self._valid_nodes: + node_name, _ = self.GetCompatName(node) + fields = {} + + # Get a list of all the valid properties in this node. + for name, prop in node.props.items(): + if name not in PROP_IGNORE_LIST and name[0] != '#': + fields[name] = copy.deepcopy(prop) + + # If we've seen this node_name before, update the existing struct. + if node_name in structs: + struct = structs[node_name] + for name, prop in fields.items(): + oldprop = struct.get(name) + if oldprop: + oldprop.Widen(prop) + else: + struct[name] = prop + + # Otherwise store this as a new struct. + else: + structs[node_name] = fields + + upto = 0 + for node in self._valid_nodes: + node_name, _ = self.GetCompatName(node) + struct = structs[node_name] + for name, prop in node.props.items(): + if name not in PROP_IGNORE_LIST and name[0] != '#': + prop.Widen(struct[name]) + upto += 1 + + struct_name, aliases = self.GetCompatName(node) + for alias in aliases: + self._aliases[alias] = struct_name + + return structs + + def ScanPhandles(self): + """Figure out what phandles each node uses + + We need to be careful when outputing nodes that use phandles since + they must come after the declaration of the phandles in the C file. + Otherwise we get a compiler error since the phandle struct is not yet + declared. + + This function adds to each node a list of phandle nodes that the node + depends on. This allows us to output things in the right order. + """ + for node in self._valid_nodes: + node.phandles = set() + for pname, prop in node.props.items(): + if pname in PROP_IGNORE_LIST or pname[0] == '#': + continue + if type(prop.value) == list: + if self.IsPhandle(prop): + # Process the list as pairs of (phandle, id) + it = iter(prop.value) + for phandle_cell, id_cell in zip(it, it): + phandle = fdt_util.fdt32_to_cpu(phandle_cell) + id = fdt_util.fdt32_to_cpu(id_cell) + target_node = self._phandle_node[phandle] + node.phandles.add(target_node) + + + def GenerateStructs(self, structs): + """Generate struct defintions for the platform data + + This writes out the body of a header file consisting of structure + definitions for node in self._valid_nodes. See the documentation in + README.of-plat for more information. + """ + self.Out('#include \n') + self.Out('#include \n') + + # Output the struct definition + for name in sorted(structs): + self.Out('struct %s%s {\n' % (STRUCT_PREFIX, name)); + for pname in sorted(structs[name]): + prop = structs[name][pname] + if self.IsPhandle(prop): + # For phandles, include a reference to the target + self.Out('\t%s%s[%d]' % (TabTo(2, 'struct phandle_2_cell'), + Conv_name_to_c(prop.name), + len(prop.value) / 2)) + else: + ptype = TYPE_NAMES[prop.type] + self.Out('\t%s%s' % (TabTo(2, ptype), + Conv_name_to_c(prop.name))) + if type(prop.value) == list: + self.Out('[%d]' % len(prop.value)) + self.Out(';\n') + self.Out('};\n') + + for alias, struct_name in self._aliases.iteritems(): + self.Out('#define %s%s %s%s\n'% (STRUCT_PREFIX, alias, + STRUCT_PREFIX, struct_name)) + + def OutputNode(self, node): + """Output the C code for a node + + Args: + node: node to output + """ + struct_name, _ = self.GetCompatName(node) + var_name = Conv_name_to_c(node.name) + self.Buf('static struct %s%s %s%s = {\n' % + (STRUCT_PREFIX, struct_name, VAL_PREFIX, var_name)) + for pname, prop in node.props.items(): + if pname in PROP_IGNORE_LIST or pname[0] == '#': + continue + ptype = TYPE_NAMES[prop.type] + member_name = Conv_name_to_c(prop.name) + self.Buf('\t%s= ' % TabTo(3, '.' + member_name)) + + # Special handling for lists + if type(prop.value) == list: + self.Buf('{') + vals = [] + # For phandles, output a reference to the platform data + # of the target node. + if self.IsPhandle(prop): + # Process the list as pairs of (phandle, id) + it = iter(prop.value) + for phandle_cell, id_cell in zip(it, it): + phandle = fdt_util.fdt32_to_cpu(phandle_cell) + id = fdt_util.fdt32_to_cpu(id_cell) + target_node = self._phandle_node[phandle] + name = Conv_name_to_c(target_node.name) + vals.append('{&%s%s, %d}' % (VAL_PREFIX, name, id)) + else: + for val in prop.value: + vals.append(self.GetValue(prop.type, val)) + self.Buf(', '.join(vals)) + self.Buf('}') + else: + self.Buf(self.GetValue(prop.type, prop.value)) + self.Buf(',\n') + self.Buf('};\n') + + # Add a device declaration + self.Buf('U_BOOT_DEVICE(%s) = {\n' % var_name) + self.Buf('\t.name\t\t= "%s",\n' % struct_name) + self.Buf('\t.platdata\t= &%s%s,\n' % (VAL_PREFIX, var_name)) + self.Buf('\t.platdata_size\t= sizeof(%s%s),\n' % + (VAL_PREFIX, var_name)) + self.Buf('};\n') + self.Buf('\n') + + self.Out(''.join(self.GetBuf())) + + def GenerateTables(self): + """Generate device defintions for the platform data + + This writes out C platform data initialisation data and + U_BOOT_DEVICE() declarations for each valid node. Where a node has + multiple compatible strings, a #define is used to make them equivalent. + + See the documentation in doc/driver-model/of-plat.txt for more + information. + """ + self.Out('#include \n') + self.Out('#include \n') + self.Out('#include \n') + self.Out('\n') + nodes_to_output = list(self._valid_nodes) + + # Keep outputing nodes until there is none left + while nodes_to_output: + node = nodes_to_output[0] + # Output all the node's dependencies first + for req_node in node.phandles: + if req_node in nodes_to_output: + self.OutputNode(req_node) + nodes_to_output.remove(req_node) + self.OutputNode(node) + nodes_to_output.remove(node) diff --git a/tools/dtoc/dtoc.py b/tools/dtoc/dtoc.py index 79779477d9e..8fc717a92d1 100755 --- a/tools/dtoc/dtoc.py +++ b/tools/dtoc/dtoc.py @@ -26,417 +26,15 @@ options. For more information about the use of this options and tool please see doc/driver-model/of-plat.txt """ -import copy -from optparse import OptionError, OptionParser +from optparse import OptionParser import os -import struct import sys # Bring in the patman libraries our_path = os.path.dirname(os.path.realpath(__file__)) sys.path.append(os.path.join(our_path, '../patman')) -import fdt -import fdt_util - -# When we see these properties we ignore them - i.e. do not create a structure member -PROP_IGNORE_LIST = [ - '#address-cells', - '#gpio-cells', - '#size-cells', - 'compatible', - 'linux,phandle', - "status", - 'phandle', - 'u-boot,dm-pre-reloc', - 'u-boot,dm-tpl', - 'u-boot,dm-spl', -] - -# C type declarations for the tyues we support -TYPE_NAMES = { - fdt.TYPE_INT: 'fdt32_t', - fdt.TYPE_BYTE: 'unsigned char', - fdt.TYPE_STRING: 'const char *', - fdt.TYPE_BOOL: 'bool', -}; - -STRUCT_PREFIX = 'dtd_' -VAL_PREFIX = 'dtv_' - -def Conv_name_to_c(name): - """Convert a device-tree name to a C identifier - - Args: - name: Name to convert - Return: - String containing the C version of this name - """ - str = name.replace('@', '_at_') - str = str.replace('-', '_') - str = str.replace(',', '_') - str = str.replace('.', '_') - str = str.replace('/', '__') - return str - -def TabTo(num_tabs, str): - if len(str) >= num_tabs * 8: - return str + ' ' - return str + '\t' * (num_tabs - len(str) // 8) - -class DtbPlatdata: - """Provide a means to convert device tree binary data to platform data - - The output of this process is C structures which can be used in space- - constrained encvironments where the ~3KB code overhead of device tree - code is not affordable. - - Properties: - fdt: Fdt object, referencing the device tree - _dtb_fname: Filename of the input device tree binary file - _valid_nodes: A list of Node object with compatible strings - _options: Command-line options - _phandle_node: A dict of nodes indexed by phandle number (1, 2...) - _outfile: The current output file (sys.stdout or a real file) - _lines: Stashed list of output lines for outputting in the future - _phandle_node: A dict of Nodes indexed by phandle (an integer) - """ - def __init__(self, dtb_fname, options): - self._dtb_fname = dtb_fname - self._valid_nodes = None - self._options = options - self._phandle_node = {} - self._outfile = None - self._lines = [] - self._aliases = {} - - def SetupOutput(self, fname): - """Set up the output destination - - Once this is done, future calls to self.Out() will output to this - file. - - Args: - fname: Filename to send output to, or '-' for stdout - """ - if fname == '-': - self._outfile = sys.stdout - else: - self._outfile = open(fname, 'w') - - def Out(self, str): - """Output a string to the output file - - Args: - str: String to output - """ - self._outfile.write(str) - - def Buf(self, str): - """Buffer up a string to send later - - Args: - str: String to add to our 'buffer' list - """ - self._lines.append(str) - - def GetBuf(self): - """Get the contents of the output buffer, and clear it - - Returns: - The output buffer, which is then cleared for future use - """ - lines = self._lines - self._lines = [] - return lines - - def GetValue(self, type, value): - """Get a value as a C expression - - For integers this returns a byte-swapped (little-endian) hex string - For bytes this returns a hex string, e.g. 0x12 - For strings this returns a literal string enclosed in quotes - For booleans this return 'true' - - Args: - type: Data type (fdt_util) - value: Data value, as a string of bytes - """ - if type == fdt.TYPE_INT: - return '%#x' % fdt_util.fdt32_to_cpu(value) - elif type == fdt.TYPE_BYTE: - return '%#x' % ord(value[0]) - elif type == fdt.TYPE_STRING: - return '"%s"' % value - elif type == fdt.TYPE_BOOL: - return 'true' - - def GetCompatName(self, node): - """Get a node's first compatible string as a C identifier - - Args: - node: Node object to check - Return: - C identifier for the first compatible string - """ - compat = node.props['compatible'].value - aliases = [] - if type(compat) == list: - compat, aliases = compat[0], compat[1:] - return Conv_name_to_c(compat), [Conv_name_to_c(a) for a in aliases] - - def ScanDtb(self): - """Scan the device tree to obtain a tree of notes and properties - - Once this is done, self.fdt.GetRoot() can be called to obtain the - device tree root node, and progress from there. - """ - self.fdt = fdt.FdtScan(self._dtb_fname) - - def ScanNode(self, root): - for node in root.subnodes: - if 'compatible' in node.props: - status = node.props.get('status') - if (not self._options.include_disabled and not status or - status.value != 'disabled'): - self._valid_nodes.append(node) - phandle_prop = node.props.get('phandle') - if phandle_prop: - phandle = phandle_prop.GetPhandle() - self._phandle_node[phandle] = node - - # recurse to handle any subnodes - self.ScanNode(node); - - def ScanTree(self): - """Scan the device tree for useful information - - This fills in the following properties: - _phandle_node: A dict of Nodes indexed by phandle (an integer) - _valid_nodes: A list of nodes we wish to consider include in the - platform data - """ - self._phandle_node = {} - self._valid_nodes = [] - return self.ScanNode(self.fdt.GetRoot()); - - for node in self.fdt.GetRoot().subnodes: - if 'compatible' in node.props: - status = node.props.get('status') - if (not self._options.include_disabled and not status or - status.value != 'disabled'): - node_list.append(node) - phandle_prop = node.props.get('phandle') - if phandle_prop: - phandle = phandle_prop.GetPhandle() - self._phandle_node[phandle] = node - - self._valid_nodes = node_list - - def IsPhandle(self, prop): - """Check if a node contains phandles - - We have no reliable way of detecting whether a node uses a phandle - or not. As an interim measure, use a list of known property names. - - Args: - prop: Prop object to check - Return: - True if the object value contains phandles, else False - """ - if prop.name in ['clocks']: - return True - return False - - def ScanStructs(self): - """Scan the device tree building up the C structures we will use. - - Build a dict keyed by C struct name containing a dict of Prop - object for each struct field (keyed by property name). Where the - same struct appears multiple times, try to use the 'widest' - property, i.e. the one with a type which can express all others. - - Once the widest property is determined, all other properties are - updated to match that width. - """ - structs = {} - for node in self._valid_nodes: - node_name, _ = self.GetCompatName(node) - fields = {} - - # Get a list of all the valid properties in this node. - for name, prop in node.props.items(): - if name not in PROP_IGNORE_LIST and name[0] != '#': - fields[name] = copy.deepcopy(prop) - - # If we've seen this node_name before, update the existing struct. - if node_name in structs: - struct = structs[node_name] - for name, prop in fields.items(): - oldprop = struct.get(name) - if oldprop: - oldprop.Widen(prop) - else: - struct[name] = prop - - # Otherwise store this as a new struct. - else: - structs[node_name] = fields - - upto = 0 - for node in self._valid_nodes: - node_name, _ = self.GetCompatName(node) - struct = structs[node_name] - for name, prop in node.props.items(): - if name not in PROP_IGNORE_LIST and name[0] != '#': - prop.Widen(struct[name]) - upto += 1 - - struct_name, aliases = self.GetCompatName(node) - for alias in aliases: - self._aliases[alias] = struct_name - - return structs - - def ScanPhandles(self): - """Figure out what phandles each node uses - - We need to be careful when outputing nodes that use phandles since - they must come after the declaration of the phandles in the C file. - Otherwise we get a compiler error since the phandle struct is not yet - declared. - - This function adds to each node a list of phandle nodes that the node - depends on. This allows us to output things in the right order. - """ - for node in self._valid_nodes: - node.phandles = set() - for pname, prop in node.props.items(): - if pname in PROP_IGNORE_LIST or pname[0] == '#': - continue - if type(prop.value) == list: - if self.IsPhandle(prop): - # Process the list as pairs of (phandle, id) - it = iter(prop.value) - for phandle_cell, id_cell in zip(it, it): - phandle = fdt_util.fdt32_to_cpu(phandle_cell) - id = fdt_util.fdt32_to_cpu(id_cell) - target_node = self._phandle_node[phandle] - node.phandles.add(target_node) - - - def GenerateStructs(self, structs): - """Generate struct defintions for the platform data - - This writes out the body of a header file consisting of structure - definitions for node in self._valid_nodes. See the documentation in - README.of-plat for more information. - """ - self.Out('#include \n') - self.Out('#include \n') - - # Output the struct definition - for name in sorted(structs): - self.Out('struct %s%s {\n' % (STRUCT_PREFIX, name)); - for pname in sorted(structs[name]): - prop = structs[name][pname] - if self.IsPhandle(prop): - # For phandles, include a reference to the target - self.Out('\t%s%s[%d]' % (TabTo(2, 'struct phandle_2_cell'), - Conv_name_to_c(prop.name), - len(prop.value) / 2)) - else: - ptype = TYPE_NAMES[prop.type] - self.Out('\t%s%s' % (TabTo(2, ptype), - Conv_name_to_c(prop.name))) - if type(prop.value) == list: - self.Out('[%d]' % len(prop.value)) - self.Out(';\n') - self.Out('};\n') - - for alias, struct_name in self._aliases.iteritems(): - self.Out('#define %s%s %s%s\n'% (STRUCT_PREFIX, alias, - STRUCT_PREFIX, struct_name)) - - def OutputNode(self, node): - """Output the C code for a node - - Args: - node: node to output - """ - struct_name, _ = self.GetCompatName(node) - var_name = Conv_name_to_c(node.name) - self.Buf('static struct %s%s %s%s = {\n' % - (STRUCT_PREFIX, struct_name, VAL_PREFIX, var_name)) - for pname, prop in node.props.items(): - if pname in PROP_IGNORE_LIST or pname[0] == '#': - continue - ptype = TYPE_NAMES[prop.type] - member_name = Conv_name_to_c(prop.name) - self.Buf('\t%s= ' % TabTo(3, '.' + member_name)) - - # Special handling for lists - if type(prop.value) == list: - self.Buf('{') - vals = [] - # For phandles, output a reference to the platform data - # of the target node. - if self.IsPhandle(prop): - # Process the list as pairs of (phandle, id) - it = iter(prop.value) - for phandle_cell, id_cell in zip(it, it): - phandle = fdt_util.fdt32_to_cpu(phandle_cell) - id = fdt_util.fdt32_to_cpu(id_cell) - target_node = self._phandle_node[phandle] - name = Conv_name_to_c(target_node.name) - vals.append('{&%s%s, %d}' % (VAL_PREFIX, name, id)) - else: - for val in prop.value: - vals.append(self.GetValue(prop.type, val)) - self.Buf(', '.join(vals)) - self.Buf('}') - else: - self.Buf(self.GetValue(prop.type, prop.value)) - self.Buf(',\n') - self.Buf('};\n') - - # Add a device declaration - self.Buf('U_BOOT_DEVICE(%s) = {\n' % var_name) - self.Buf('\t.name\t\t= "%s",\n' % struct_name) - self.Buf('\t.platdata\t= &%s%s,\n' % (VAL_PREFIX, var_name)) - self.Buf('\t.platdata_size\t= sizeof(%s%s),\n' % - (VAL_PREFIX, var_name)) - self.Buf('};\n') - self.Buf('\n') - - self.Out(''.join(self.GetBuf())) - - def GenerateTables(self): - """Generate device defintions for the platform data - - This writes out C platform data initialisation data and - U_BOOT_DEVICE() declarations for each valid node. Where a node has - multiple compatible strings, a #define is used to make them equivalent. - - See the documentation in doc/driver-model/of-plat.txt for more - information. - """ - self.Out('#include \n') - self.Out('#include \n') - self.Out('#include \n') - self.Out('\n') - nodes_to_output = list(self._valid_nodes) - - # Keep outputing nodes until there is none left - while nodes_to_output: - node = nodes_to_output[0] - # Output all the node's dependencies first - for req_node in node.phandles: - if req_node in nodes_to_output: - self.OutputNode(req_node) - nodes_to_output.remove(req_node) - self.OutputNode(node) - nodes_to_output.remove(node) +import dtb_platdata if __name__ != "__main__": @@ -454,7 +52,7 @@ parser.add_option('-o', '--output', action='store', default='-', if not args: raise ValueError('Please specify a command: struct, platdata') -plat = DtbPlatdata(options.dtb_file, options) +plat = dtb_platdata.DtbPlatdata(options.dtb_file, options) plat.ScanDtb() plat.ScanTree() plat.SetupOutput(options.output) -- cgit v1.2.3 From 2be282ca01dba237504fe2887fbd4539865093c3 Mon Sep 17 00:00:00 2001 From: Simon Glass Date: Sun, 18 Jun 2017 22:08:59 -0600 Subject: dtoc: Fix pylint warnings Unfortunately I neglected to run pylint on this tool with its initial submission. Fix the warnings. Signed-off-by: Simon Glass --- tools/dtoc/dtb_platdata.py | 261 ++++++++++++++++++++++++--------------------- tools/dtoc/dtoc.py | 14 +-- 2 files changed, 144 insertions(+), 131 deletions(-) (limited to 'tools') diff --git a/tools/dtoc/dtb_platdata.py b/tools/dtoc/dtb_platdata.py index 94db274c098..75adf48deaf 100644 --- a/tools/dtoc/dtb_platdata.py +++ b/tools/dtoc/dtb_platdata.py @@ -6,7 +6,14 @@ # SPDX-License-Identifier: GPL-2.0+ # +"""Device tree to platform data class + +This supports converting device tree data to C structures definitions and +static data. +""" + import copy +import sys import fdt import fdt_util @@ -31,12 +38,12 @@ TYPE_NAMES = { fdt.TYPE_BYTE: 'unsigned char', fdt.TYPE_STRING: 'const char *', fdt.TYPE_BOOL: 'bool', -}; +} STRUCT_PREFIX = 'dtd_' VAL_PREFIX = 'dtv_' -def Conv_name_to_c(name): +def conv_name_to_c(name): """Convert a device-tree name to a C identifier Args: @@ -44,19 +51,29 @@ def Conv_name_to_c(name): Return: String containing the C version of this name """ - str = name.replace('@', '_at_') - str = str.replace('-', '_') - str = str.replace(',', '_') - str = str.replace('.', '_') - str = str.replace('/', '__') - return str - -def TabTo(num_tabs, str): - if len(str) >= num_tabs * 8: - return str + ' ' - return str + '\t' * (num_tabs - len(str) // 8) - -class DtbPlatdata: + new = name.replace('@', '_at_') + new = new.replace('-', '_') + new = new.replace(',', '_') + new = new.replace('.', '_') + new = new.replace('/', '__') + return new + +def tab_to(num_tabs, line): + """Append tabs to a line of text to reach a tab stop. + + Args: + num_tabs: Tab stop to obtain (0 = column 0, 1 = column 8, etc.) + line: Line of text to append to + + Returns: + line with the correct number of tabs appeneded. If the line already + extends past that tab stop then a single space is appended. + """ + if len(line) >= num_tabs * 8: + return line + ' ' + return line + '\t' * (num_tabs - len(line) // 8) + +class DtbPlatdata(object): """Provide a means to convert device tree binary data to platform data The output of this process is C structures which can be used in space- @@ -64,28 +81,29 @@ class DtbPlatdata: code is not affordable. Properties: - fdt: Fdt object, referencing the device tree + _fdt: Fdt object, referencing the device tree _dtb_fname: Filename of the input device tree binary file _valid_nodes: A list of Node object with compatible strings _options: Command-line options - _phandle_node: A dict of nodes indexed by phandle number (1, 2...) + _phandle_nodes: A dict of nodes indexed by phandle number (1, 2...) _outfile: The current output file (sys.stdout or a real file) _lines: Stashed list of output lines for outputting in the future - _phandle_node: A dict of Nodes indexed by phandle (an integer) + _phandle_nodes: A dict of Nodes indexed by phandle (an integer) """ def __init__(self, dtb_fname, options): + self._fdt = None self._dtb_fname = dtb_fname self._valid_nodes = None self._options = options - self._phandle_node = {} + self._phandle_nodes = {} self._outfile = None self._lines = [] self._aliases = {} - def SetupOutput(self, fname): + def setup_output(self, fname): """Set up the output destination - Once this is done, future calls to self.Out() will output to this + Once this is done, future calls to self.out() will output to this file. Args: @@ -96,23 +114,23 @@ class DtbPlatdata: else: self._outfile = open(fname, 'w') - def Out(self, str): + def out(self, line): """Output a string to the output file Args: - str: String to output + line: String to output """ - self._outfile.write(str) + self._outfile.write(line) - def Buf(self, str): + def buf(self, line): """Buffer up a string to send later Args: - str: String to add to our 'buffer' list + line: String to add to our 'buffer' list """ - self._lines.append(str) + self._lines.append(line) - def GetBuf(self): + def get_buf(self): """Get the contents of the output buffer, and clear it Returns: @@ -122,7 +140,8 @@ class DtbPlatdata: self._lines = [] return lines - def GetValue(self, type, value): + @staticmethod + def get_value(ftype, value): """Get a value as a C expression For integers this returns a byte-swapped (little-endian) hex string @@ -134,16 +153,17 @@ class DtbPlatdata: type: Data type (fdt_util) value: Data value, as a string of bytes """ - if type == fdt.TYPE_INT: + if ftype == fdt.TYPE_INT: return '%#x' % fdt_util.fdt32_to_cpu(value) - elif type == fdt.TYPE_BYTE: + elif ftype == fdt.TYPE_BYTE: return '%#x' % ord(value[0]) - elif type == fdt.TYPE_STRING: + elif ftype == fdt.TYPE_STRING: return '"%s"' % value - elif type == fdt.TYPE_BOOL: + elif ftype == fdt.TYPE_BOOL: return 'true' - def GetCompatName(self, node): + @staticmethod + def get_compat_name(node): """Get a node's first compatible string as a C identifier Args: @@ -153,59 +173,55 @@ class DtbPlatdata: """ compat = node.props['compatible'].value aliases = [] - if type(compat) == list: + if isinstance(compat, list): compat, aliases = compat[0], compat[1:] - return Conv_name_to_c(compat), [Conv_name_to_c(a) for a in aliases] + return conv_name_to_c(compat), [conv_name_to_c(a) for a in aliases] - def ScanDtb(self): + def scan_dtb(self): """Scan the device tree to obtain a tree of notes and properties - Once this is done, self.fdt.GetRoot() can be called to obtain the + Once this is done, self._fdt.GetRoot() can be called to obtain the device tree root node, and progress from there. """ - self.fdt = fdt.FdtScan(self._dtb_fname) + self._fdt = fdt.FdtScan(self._dtb_fname) + + def scan_node(self, root): + """Scan a node and subnodes to build a tree of node and phandle info + + This adds each node to self._valid_nodes and each phandle to + self._phandle_nodes. - def ScanNode(self, root): + Args: + root: Root node for scan + """ for node in root.subnodes: if 'compatible' in node.props: status = node.props.get('status') if (not self._options.include_disabled and not status or - status.value != 'disabled'): + status.value != 'disabled'): self._valid_nodes.append(node) phandle_prop = node.props.get('phandle') if phandle_prop: phandle = phandle_prop.GetPhandle() - self._phandle_node[phandle] = node + self._phandle_nodes[phandle] = node # recurse to handle any subnodes - self.ScanNode(node); + self.scan_node(node) - def ScanTree(self): + def scan_tree(self): """Scan the device tree for useful information This fills in the following properties: - _phandle_node: A dict of Nodes indexed by phandle (an integer) + _phandle_nodes: A dict of Nodes indexed by phandle (an integer) _valid_nodes: A list of nodes we wish to consider include in the platform data """ - self._phandle_node = {} + self._phandle_nodes = {} self._valid_nodes = [] - return self.ScanNode(self.fdt.GetRoot()); - - for node in self.fdt.GetRoot().subnodes: - if 'compatible' in node.props: - status = node.props.get('status') - if (not self._options.include_disabled and not status or - status.value != 'disabled'): - node_list.append(node) - phandle_prop = node.props.get('phandle') - if phandle_prop: - phandle = phandle_prop.GetPhandle() - self._phandle_node[phandle] = node - - self._valid_nodes = node_list + return self.scan_node(self._fdt.GetRoot()) - def IsPhandle(self, prop): + @staticmethod + def is_phandle(prop): """Check if a node contains phandles We have no reliable way of detecting whether a node uses a phandle @@ -220,7 +236,7 @@ class DtbPlatdata: return True return False - def ScanStructs(self): + def scan_structs(self): """Scan the device tree building up the C structures we will use. Build a dict keyed by C struct name containing a dict of Prop @@ -233,7 +249,7 @@ class DtbPlatdata: """ structs = {} for node in self._valid_nodes: - node_name, _ = self.GetCompatName(node) + node_name, _ = self.get_compat_name(node) fields = {} # Get a list of all the valid properties in this node. @@ -257,20 +273,20 @@ class DtbPlatdata: upto = 0 for node in self._valid_nodes: - node_name, _ = self.GetCompatName(node) + node_name, _ = self.get_compat_name(node) struct = structs[node_name] for name, prop in node.props.items(): if name not in PROP_IGNORE_LIST and name[0] != '#': prop.Widen(struct[name]) upto += 1 - struct_name, aliases = self.GetCompatName(node) + struct_name, aliases = self.get_compat_name(node) for alias in aliases: self._aliases[alias] = struct_name return structs - def ScanPhandles(self): + def scan_phandles(self): """Figure out what phandles each node uses We need to be careful when outputing nodes that use phandles since @@ -286,104 +302,101 @@ class DtbPlatdata: for pname, prop in node.props.items(): if pname in PROP_IGNORE_LIST or pname[0] == '#': continue - if type(prop.value) == list: - if self.IsPhandle(prop): + if isinstance(prop.value, list): + if self.is_phandle(prop): # Process the list as pairs of (phandle, id) - it = iter(prop.value) - for phandle_cell, id_cell in zip(it, it): + value_it = iter(prop.value) + for phandle_cell, _ in zip(value_it, value_it): phandle = fdt_util.fdt32_to_cpu(phandle_cell) - id = fdt_util.fdt32_to_cpu(id_cell) - target_node = self._phandle_node[phandle] + target_node = self._phandle_nodes[phandle] node.phandles.add(target_node) - def GenerateStructs(self, structs): + def generate_structs(self, structs): """Generate struct defintions for the platform data This writes out the body of a header file consisting of structure definitions for node in self._valid_nodes. See the documentation in README.of-plat for more information. """ - self.Out('#include \n') - self.Out('#include \n') + self.out('#include \n') + self.out('#include \n') # Output the struct definition for name in sorted(structs): - self.Out('struct %s%s {\n' % (STRUCT_PREFIX, name)); + self.out('struct %s%s {\n' % (STRUCT_PREFIX, name)) for pname in sorted(structs[name]): prop = structs[name][pname] - if self.IsPhandle(prop): + if self.is_phandle(prop): # For phandles, include a reference to the target - self.Out('\t%s%s[%d]' % (TabTo(2, 'struct phandle_2_cell'), - Conv_name_to_c(prop.name), + self.out('\t%s%s[%d]' % (tab_to(2, 'struct phandle_2_cell'), + conv_name_to_c(prop.name), len(prop.value) / 2)) else: ptype = TYPE_NAMES[prop.type] - self.Out('\t%s%s' % (TabTo(2, ptype), - Conv_name_to_c(prop.name))) - if type(prop.value) == list: - self.Out('[%d]' % len(prop.value)) - self.Out(';\n') - self.Out('};\n') + self.out('\t%s%s' % (tab_to(2, ptype), + conv_name_to_c(prop.name))) + if isinstance(prop.value, list): + self.out('[%d]' % len(prop.value)) + self.out(';\n') + self.out('};\n') for alias, struct_name in self._aliases.iteritems(): - self.Out('#define %s%s %s%s\n'% (STRUCT_PREFIX, alias, + self.out('#define %s%s %s%s\n'% (STRUCT_PREFIX, alias, STRUCT_PREFIX, struct_name)) - def OutputNode(self, node): + def output_node(self, node): """Output the C code for a node Args: node: node to output """ - struct_name, _ = self.GetCompatName(node) - var_name = Conv_name_to_c(node.name) - self.Buf('static struct %s%s %s%s = {\n' % - (STRUCT_PREFIX, struct_name, VAL_PREFIX, var_name)) + struct_name, _ = self.get_compat_name(node) + var_name = conv_name_to_c(node.name) + self.buf('static struct %s%s %s%s = {\n' % + (STRUCT_PREFIX, struct_name, VAL_PREFIX, var_name)) for pname, prop in node.props.items(): if pname in PROP_IGNORE_LIST or pname[0] == '#': continue - ptype = TYPE_NAMES[prop.type] - member_name = Conv_name_to_c(prop.name) - self.Buf('\t%s= ' % TabTo(3, '.' + member_name)) + member_name = conv_name_to_c(prop.name) + self.buf('\t%s= ' % tab_to(3, '.' + member_name)) # Special handling for lists - if type(prop.value) == list: - self.Buf('{') + if isinstance(prop.value, list): + self.buf('{') vals = [] # For phandles, output a reference to the platform data # of the target node. - if self.IsPhandle(prop): + if self.is_phandle(prop): # Process the list as pairs of (phandle, id) - it = iter(prop.value) - for phandle_cell, id_cell in zip(it, it): + value_it = iter(prop.value) + for phandle_cell, id_cell in zip(value_it, value_it): phandle = fdt_util.fdt32_to_cpu(phandle_cell) - id = fdt_util.fdt32_to_cpu(id_cell) - target_node = self._phandle_node[phandle] - name = Conv_name_to_c(target_node.name) - vals.append('{&%s%s, %d}' % (VAL_PREFIX, name, id)) + id_num = fdt_util.fdt32_to_cpu(id_cell) + target_node = self._phandle_nodes[phandle] + name = conv_name_to_c(target_node.name) + vals.append('{&%s%s, %d}' % (VAL_PREFIX, name, id_num)) else: for val in prop.value: - vals.append(self.GetValue(prop.type, val)) - self.Buf(', '.join(vals)) - self.Buf('}') + vals.append(self.get_value(prop.type, val)) + self.buf(', '.join(vals)) + self.buf('}') else: - self.Buf(self.GetValue(prop.type, prop.value)) - self.Buf(',\n') - self.Buf('};\n') + self.buf(self.get_value(prop.type, prop.value)) + self.buf(',\n') + self.buf('};\n') # Add a device declaration - self.Buf('U_BOOT_DEVICE(%s) = {\n' % var_name) - self.Buf('\t.name\t\t= "%s",\n' % struct_name) - self.Buf('\t.platdata\t= &%s%s,\n' % (VAL_PREFIX, var_name)) - self.Buf('\t.platdata_size\t= sizeof(%s%s),\n' % - (VAL_PREFIX, var_name)) - self.Buf('};\n') - self.Buf('\n') + self.buf('U_BOOT_DEVICE(%s) = {\n' % var_name) + self.buf('\t.name\t\t= "%s",\n' % struct_name) + self.buf('\t.platdata\t= &%s%s,\n' % (VAL_PREFIX, var_name)) + self.buf('\t.platdata_size\t= sizeof(%s%s),\n' % (VAL_PREFIX, var_name)) + self.buf('};\n') + self.buf('\n') - self.Out(''.join(self.GetBuf())) + self.out(''.join(self.get_buf())) - def GenerateTables(self): + def generate_tables(self): """Generate device defintions for the platform data This writes out C platform data initialisation data and @@ -393,10 +406,10 @@ class DtbPlatdata: See the documentation in doc/driver-model/of-plat.txt for more information. """ - self.Out('#include \n') - self.Out('#include \n') - self.Out('#include \n') - self.Out('\n') + self.out('#include \n') + self.out('#include \n') + self.out('#include \n') + self.out('\n') nodes_to_output = list(self._valid_nodes) # Keep outputing nodes until there is none left @@ -405,7 +418,7 @@ class DtbPlatdata: # Output all the node's dependencies first for req_node in node.phandles: if req_node in nodes_to_output: - self.OutputNode(req_node) + self.output_node(req_node) nodes_to_output.remove(req_node) - self.OutputNode(node) + self.output_node(node) nodes_to_output.remove(node) diff --git a/tools/dtoc/dtoc.py b/tools/dtoc/dtoc.py index 8fc717a92d1..abda9191fa4 100755 --- a/tools/dtoc/dtoc.py +++ b/tools/dtoc/dtoc.py @@ -53,16 +53,16 @@ if not args: raise ValueError('Please specify a command: struct, platdata') plat = dtb_platdata.DtbPlatdata(options.dtb_file, options) -plat.ScanDtb() -plat.ScanTree() -plat.SetupOutput(options.output) -structs = plat.ScanStructs() -plat.ScanPhandles() +plat.scan_dtb() +plat.scan_tree() +plat.setup_output(options.output) +structs = plat.scan_structs() +plat.scan_phandles() for cmd in args[0].split(','): if cmd == 'struct': - plat.GenerateStructs(structs) + plat.generate_structs(structs) elif cmd == 'platdata': - plat.GenerateTables() + plat.generate_tables() else: raise ValueError("Unknown command '%s': (use: struct, platdata)" % cmd) -- cgit v1.2.3 From 86290ce40e76ad2e09da096cc7686cdc86756ee1 Mon Sep 17 00:00:00 2001 From: Simon Glass Date: Sun, 18 Jun 2017 22:09:00 -0600 Subject: dtoc: Don't handle properties with / in them This conversion appears to not be needed as it does not occur in practice. Drop it. Signed-off-by: Simon Glass --- tools/dtoc/dtb_platdata.py | 1 - 1 file changed, 1 deletion(-) (limited to 'tools') diff --git a/tools/dtoc/dtb_platdata.py b/tools/dtoc/dtb_platdata.py index 75adf48deaf..d86651b9aa9 100644 --- a/tools/dtoc/dtb_platdata.py +++ b/tools/dtoc/dtb_platdata.py @@ -55,7 +55,6 @@ def conv_name_to_c(name): new = new.replace('-', '_') new = new.replace(',', '_') new = new.replace('.', '_') - new = new.replace('/', '__') return new def tab_to(num_tabs, line): -- cgit v1.2.3 From e36024b05ff64937be65a74c156e9c83ad315a4c Mon Sep 17 00:00:00 2001 From: Simon Glass Date: Sun, 18 Jun 2017 22:09:01 -0600 Subject: dtoc: Pass include_disabled explicitly This option is the only one actually used by the dtb_platdata class. Pass it explicitly to avoid needing to pass the whole option object to the constructor. Signed-off-by: Simon Glass --- tools/dtoc/dtb_platdata.py | 8 ++++---- tools/dtoc/dtoc.py | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) (limited to 'tools') diff --git a/tools/dtoc/dtb_platdata.py b/tools/dtoc/dtb_platdata.py index d86651b9aa9..de4a88b5a9d 100644 --- a/tools/dtoc/dtb_platdata.py +++ b/tools/dtoc/dtb_platdata.py @@ -83,17 +83,17 @@ class DtbPlatdata(object): _fdt: Fdt object, referencing the device tree _dtb_fname: Filename of the input device tree binary file _valid_nodes: A list of Node object with compatible strings - _options: Command-line options + _include_disabled: true to include nodes marked status = "disabled" _phandle_nodes: A dict of nodes indexed by phandle number (1, 2...) _outfile: The current output file (sys.stdout or a real file) _lines: Stashed list of output lines for outputting in the future _phandle_nodes: A dict of Nodes indexed by phandle (an integer) """ - def __init__(self, dtb_fname, options): + def __init__(self, dtb_fname, include_disabled): self._fdt = None self._dtb_fname = dtb_fname self._valid_nodes = None - self._options = options + self._include_disabled = include_disabled self._phandle_nodes = {} self._outfile = None self._lines = [] @@ -196,7 +196,7 @@ class DtbPlatdata(object): for node in root.subnodes: if 'compatible' in node.props: status = node.props.get('status') - if (not self._options.include_disabled and not status or + if (not self._include_disabled and not status or status.value != 'disabled'): self._valid_nodes.append(node) phandle_prop = node.props.get('phandle') diff --git a/tools/dtoc/dtoc.py b/tools/dtoc/dtoc.py index abda9191fa4..1f17ea47e02 100755 --- a/tools/dtoc/dtoc.py +++ b/tools/dtoc/dtoc.py @@ -52,7 +52,7 @@ parser.add_option('-o', '--output', action='store', default='-', if not args: raise ValueError('Please specify a command: struct, platdata') -plat = dtb_platdata.DtbPlatdata(options.dtb_file, options) +plat = dtb_platdata.DtbPlatdata(options.dtb_file, options.include_disabled) plat.scan_dtb() plat.scan_tree() plat.setup_output(options.output) -- cgit v1.2.3 From 56e0bbe0577f33d5bb2486a60267ba759e2ea643 Mon Sep 17 00:00:00 2001 From: Simon Glass Date: Sun, 18 Jun 2017 22:09:02 -0600 Subject: dtoc: Move static functions out of the class Rather than using static functions within the class, move them out of the class. This will make it slightly easier for tests to call them. Signed-off-by: Simon Glass --- tools/dtoc/dtb_platdata.py | 125 +++++++++++++++++++++++---------------------- 1 file changed, 63 insertions(+), 62 deletions(-) (limited to 'tools') diff --git a/tools/dtoc/dtb_platdata.py b/tools/dtoc/dtb_platdata.py index de4a88b5a9d..a1f32e164a1 100644 --- a/tools/dtoc/dtb_platdata.py +++ b/tools/dtoc/dtb_platdata.py @@ -72,6 +72,60 @@ def tab_to(num_tabs, line): return line + ' ' return line + '\t' * (num_tabs - len(line) // 8) +def get_value(ftype, value): + """Get a value as a C expression + + For integers this returns a byte-swapped (little-endian) hex string + For bytes this returns a hex string, e.g. 0x12 + For strings this returns a literal string enclosed in quotes + For booleans this return 'true' + + Args: + type: Data type (fdt_util) + value: Data value, as a string of bytes + """ + if ftype == fdt.TYPE_INT: + return '%#x' % fdt_util.fdt32_to_cpu(value) + elif ftype == fdt.TYPE_BYTE: + return '%#x' % ord(value[0]) + elif ftype == fdt.TYPE_STRING: + return '"%s"' % value + elif ftype == fdt.TYPE_BOOL: + return 'true' + +def get_compat_name(node): + """Get a node's first compatible string as a C identifier + + Args: + node: Node object to check + Return: + Tuple: + C identifier for the first compatible string + List of C identifiers for all the other compatible strings + (possibly empty) + """ + compat = node.props['compatible'].value + aliases = [] + if isinstance(compat, list): + compat, aliases = compat[0], compat[1:] + return conv_name_to_c(compat), [conv_name_to_c(a) for a in aliases] + +def is_phandle(prop): + """Check if a node contains phandles + + We have no reliable way of detecting whether a node uses a phandle + or not. As an interim measure, use a list of known property names. + + Args: + prop: Prop object to check + Return: + True if the object value contains phandles, else False + """ + if prop.name in ['clocks']: + return True + return False + + class DtbPlatdata(object): """Provide a means to convert device tree binary data to platform data @@ -139,43 +193,6 @@ class DtbPlatdata(object): self._lines = [] return lines - @staticmethod - def get_value(ftype, value): - """Get a value as a C expression - - For integers this returns a byte-swapped (little-endian) hex string - For bytes this returns a hex string, e.g. 0x12 - For strings this returns a literal string enclosed in quotes - For booleans this return 'true' - - Args: - type: Data type (fdt_util) - value: Data value, as a string of bytes - """ - if ftype == fdt.TYPE_INT: - return '%#x' % fdt_util.fdt32_to_cpu(value) - elif ftype == fdt.TYPE_BYTE: - return '%#x' % ord(value[0]) - elif ftype == fdt.TYPE_STRING: - return '"%s"' % value - elif ftype == fdt.TYPE_BOOL: - return 'true' - - @staticmethod - def get_compat_name(node): - """Get a node's first compatible string as a C identifier - - Args: - node: Node object to check - Return: - C identifier for the first compatible string - """ - compat = node.props['compatible'].value - aliases = [] - if isinstance(compat, list): - compat, aliases = compat[0], compat[1:] - return conv_name_to_c(compat), [conv_name_to_c(a) for a in aliases] - def scan_dtb(self): """Scan the device tree to obtain a tree of notes and properties @@ -219,22 +236,6 @@ class DtbPlatdata(object): self._valid_nodes = [] return self.scan_node(self._fdt.GetRoot()) - @staticmethod - def is_phandle(prop): - """Check if a node contains phandles - - We have no reliable way of detecting whether a node uses a phandle - or not. As an interim measure, use a list of known property names. - - Args: - prop: Prop object to check - Return: - True if the object value contains phandles, else False - """ - if prop.name in ['clocks']: - return True - return False - def scan_structs(self): """Scan the device tree building up the C structures we will use. @@ -248,7 +249,7 @@ class DtbPlatdata(object): """ structs = {} for node in self._valid_nodes: - node_name, _ = self.get_compat_name(node) + node_name, _ = get_compat_name(node) fields = {} # Get a list of all the valid properties in this node. @@ -272,14 +273,14 @@ class DtbPlatdata(object): upto = 0 for node in self._valid_nodes: - node_name, _ = self.get_compat_name(node) + node_name, _ = get_compat_name(node) struct = structs[node_name] for name, prop in node.props.items(): if name not in PROP_IGNORE_LIST and name[0] != '#': prop.Widen(struct[name]) upto += 1 - struct_name, aliases = self.get_compat_name(node) + struct_name, aliases = get_compat_name(node) for alias in aliases: self._aliases[alias] = struct_name @@ -302,7 +303,7 @@ class DtbPlatdata(object): if pname in PROP_IGNORE_LIST or pname[0] == '#': continue if isinstance(prop.value, list): - if self.is_phandle(prop): + if is_phandle(prop): # Process the list as pairs of (phandle, id) value_it = iter(prop.value) for phandle_cell, _ in zip(value_it, value_it): @@ -326,7 +327,7 @@ class DtbPlatdata(object): self.out('struct %s%s {\n' % (STRUCT_PREFIX, name)) for pname in sorted(structs[name]): prop = structs[name][pname] - if self.is_phandle(prop): + if is_phandle(prop): # For phandles, include a reference to the target self.out('\t%s%s[%d]' % (tab_to(2, 'struct phandle_2_cell'), conv_name_to_c(prop.name), @@ -350,7 +351,7 @@ class DtbPlatdata(object): Args: node: node to output """ - struct_name, _ = self.get_compat_name(node) + struct_name, _ = get_compat_name(node) var_name = conv_name_to_c(node.name) self.buf('static struct %s%s %s%s = {\n' % (STRUCT_PREFIX, struct_name, VAL_PREFIX, var_name)) @@ -366,7 +367,7 @@ class DtbPlatdata(object): vals = [] # For phandles, output a reference to the platform data # of the target node. - if self.is_phandle(prop): + if is_phandle(prop): # Process the list as pairs of (phandle, id) value_it = iter(prop.value) for phandle_cell, id_cell in zip(value_it, value_it): @@ -377,11 +378,11 @@ class DtbPlatdata(object): vals.append('{&%s%s, %d}' % (VAL_PREFIX, name, id_num)) else: for val in prop.value: - vals.append(self.get_value(prop.type, val)) + vals.append(get_value(prop.type, val)) self.buf(', '.join(vals)) self.buf('}') else: - self.buf(self.get_value(prop.type, prop.value)) + self.buf(get_value(prop.type, prop.value)) self.buf(',\n') self.buf('};\n') -- cgit v1.2.3 From fa0ea5b09ead2b0fa17754c0bf99249533fd36a3 Mon Sep 17 00:00:00 2001 From: Simon Glass Date: Sun, 18 Jun 2017 22:09:03 -0600 Subject: dtoc: Move the main logic into the dtb_platdata file Collect the main logic of dtoc into a function and put it into dtb_platdata. This will allow tests to use this function instead of duplicating the code themselves. Signed-off-by: Simon Glass --- tools/dtoc/dtb_platdata.py | 29 +++++++++++++++++++++++++++++ tools/dtoc/dtoc.py | 19 ++----------------- 2 files changed, 31 insertions(+), 17 deletions(-) (limited to 'tools') diff --git a/tools/dtoc/dtb_platdata.py b/tools/dtoc/dtb_platdata.py index a1f32e164a1..9923892dc35 100644 --- a/tools/dtoc/dtb_platdata.py +++ b/tools/dtoc/dtb_platdata.py @@ -422,3 +422,32 @@ class DtbPlatdata(object): nodes_to_output.remove(req_node) self.output_node(node) nodes_to_output.remove(node) + + +def run_steps(args, dtb_file, include_disabled, output): + """Run all the steps of the dtoc tool + + Args: + args: List of non-option arguments provided to the problem + dtb_file: Filename of dtb file to process + include_disabled: True to include disabled nodes + output: Name of output file + """ + if not args: + raise ValueError('Please specify a command: struct, platdata') + + plat = DtbPlatdata(dtb_file, include_disabled) + plat.scan_dtb() + plat.scan_tree() + plat.setup_output(output) + structs = plat.scan_structs() + plat.scan_phandles() + + for cmd in args[0].split(','): + if cmd == 'struct': + plat.generate_structs(structs) + elif cmd == 'platdata': + plat.generate_tables() + else: + raise ValueError("Unknown command '%s': (use: struct, platdata)" % + cmd) diff --git a/tools/dtoc/dtoc.py b/tools/dtoc/dtoc.py index 1f17ea47e02..140a19e9d47 100755 --- a/tools/dtoc/dtoc.py +++ b/tools/dtoc/dtoc.py @@ -49,20 +49,5 @@ parser.add_option('-o', '--output', action='store', default='-', help='Select output filename') (options, args) = parser.parse_args() -if not args: - raise ValueError('Please specify a command: struct, platdata') - -plat = dtb_platdata.DtbPlatdata(options.dtb_file, options.include_disabled) -plat.scan_dtb() -plat.scan_tree() -plat.setup_output(options.output) -structs = plat.scan_structs() -plat.scan_phandles() - -for cmd in args[0].split(','): - if cmd == 'struct': - plat.generate_structs(structs) - elif cmd == 'platdata': - plat.generate_tables() - else: - raise ValueError("Unknown command '%s': (use: struct, platdata)" % cmd) +dtb_platdata.run_steps(args, options.dtb_file, options.include_disabled, + options.output) -- cgit v1.2.3 From 30107b08d7df87e4c92e413d92896a847f1c74dd Mon Sep 17 00:00:00 2001 From: Simon Glass Date: Sun, 18 Jun 2017 22:09:04 -0600 Subject: dtoc: Add a comment about string replace in conv_name_to_c() This function uses several separate string replaces where a regular expression might seem more reasonable. Add a comment justifying the way it is currently done. Signed-off-by: Simon Glass --- tools/dtoc/dtb_platdata.py | 3 +++ 1 file changed, 3 insertions(+) (limited to 'tools') diff --git a/tools/dtoc/dtb_platdata.py b/tools/dtoc/dtb_platdata.py index 9923892dc35..1f85343a9fd 100644 --- a/tools/dtoc/dtb_platdata.py +++ b/tools/dtoc/dtb_platdata.py @@ -46,6 +46,9 @@ VAL_PREFIX = 'dtv_' def conv_name_to_c(name): """Convert a device-tree name to a C identifier + This uses multiple replace() calls instead of re.sub() since it is faster + (400ms for 1m calls versus 1000ms for the 're' version). + Args: name: Name to convert Return: -- cgit v1.2.3 From c07919281c521c57d34eba8bfbac910c9632beda Mon Sep 17 00:00:00 2001 From: Simon Glass Date: Sun, 18 Jun 2017 22:09:06 -0600 Subject: dtoc: Add tests Add some tests of dtoc's functionality to make it easier to expand and enhance the tool. Signed-off-by: Simon Glass --- tools/dtoc/dtoc.py | 31 ++++- tools/dtoc/dtoc_test.dts | 12 ++ tools/dtoc/dtoc_test_aliases.dts | 18 +++ tools/dtoc/dtoc_test_empty.dts | 12 ++ tools/dtoc/dtoc_test_phandle.dts | 23 ++++ tools/dtoc/dtoc_test_simple.dts | 48 +++++++ tools/dtoc/test_dtoc.py | 271 +++++++++++++++++++++++++++++++++++++++ 7 files changed, 411 insertions(+), 4 deletions(-) create mode 100644 tools/dtoc/dtoc_test.dts create mode 100644 tools/dtoc/dtoc_test_aliases.dts create mode 100644 tools/dtoc/dtoc_test_empty.dts create mode 100644 tools/dtoc/dtoc_test_phandle.dts create mode 100644 tools/dtoc/dtoc_test_simple.dts create mode 100644 tools/dtoc/test_dtoc.py (limited to 'tools') diff --git a/tools/dtoc/dtoc.py b/tools/dtoc/dtoc.py index 140a19e9d47..ce7bc054e5e 100755 --- a/tools/dtoc/dtoc.py +++ b/tools/dtoc/dtoc.py @@ -29,6 +29,7 @@ see doc/driver-model/of-plat.txt from optparse import OptionParser import os import sys +import unittest # Bring in the patman libraries our_path = os.path.dirname(os.path.realpath(__file__)) @@ -36,9 +37,24 @@ sys.path.append(os.path.join(our_path, '../patman')) import dtb_platdata +def run_tests(): + """Run all the test we have for dtoc""" + import test_dtoc -if __name__ != "__main__": - pass + result = unittest.TestResult() + sys.argv = [sys.argv[0]] + for module in (test_dtoc.TestDtoc,): + suite = unittest.TestLoader().loadTestsFromTestCase(module) + suite.run(result) + + print result + for _, err in result.errors: + print err + for _, err in result.failures: + print err + +if __name__ != '__main__': + sys.exit(1) parser = OptionParser() parser.add_option('-d', '--dtb-file', action='store', @@ -47,7 +63,14 @@ parser.add_option('--include-disabled', action='store_true', help='Include disabled nodes') parser.add_option('-o', '--output', action='store', default='-', help='Select output filename') +parser.add_option('-t', '--test', action='store_true', dest='test', + default=False, help='run tests') (options, args) = parser.parse_args() -dtb_platdata.run_steps(args, options.dtb_file, options.include_disabled, - options.output) +# Run our meagre tests +if options.test: + run_tests() + +else: + dtb_platdata.run_steps(args, options.dtb_file, options.include_disabled, + options.output) diff --git a/tools/dtoc/dtoc_test.dts b/tools/dtoc/dtoc_test.dts new file mode 100644 index 00000000000..1e866559757 --- /dev/null +++ b/tools/dtoc/dtoc_test.dts @@ -0,0 +1,12 @@ +/* + * Test device tree file for dtoc + * + * Copyright 2017 Google, Inc + * + * SPDX-License-Identifier: GPL-2.0+ + */ + + /dts-v1/; + +/ { +}; diff --git a/tools/dtoc/dtoc_test_aliases.dts b/tools/dtoc/dtoc_test_aliases.dts new file mode 100644 index 00000000000..c727f185afb --- /dev/null +++ b/tools/dtoc/dtoc_test_aliases.dts @@ -0,0 +1,18 @@ +/* + * Test device tree file for dtoc + * + * Copyright 2017 Google, Inc + * + * SPDX-License-Identifier: GPL-2.0+ + */ + + /dts-v1/; + +/ { + spl-test { + u-boot,dm-pre-reloc; + compatible = "compat1", "compat2.1-fred", "compat3"; + intval = <1>; + }; + +}; diff --git a/tools/dtoc/dtoc_test_empty.dts b/tools/dtoc/dtoc_test_empty.dts new file mode 100644 index 00000000000..1e866559757 --- /dev/null +++ b/tools/dtoc/dtoc_test_empty.dts @@ -0,0 +1,12 @@ +/* + * Test device tree file for dtoc + * + * Copyright 2017 Google, Inc + * + * SPDX-License-Identifier: GPL-2.0+ + */ + + /dts-v1/; + +/ { +}; diff --git a/tools/dtoc/dtoc_test_phandle.dts b/tools/dtoc/dtoc_test_phandle.dts new file mode 100644 index 00000000000..e9828a695b5 --- /dev/null +++ b/tools/dtoc/dtoc_test_phandle.dts @@ -0,0 +1,23 @@ +/* + * Test device tree file for dtoc + * + * Copyright 2017 Google, Inc + * + * SPDX-License-Identifier: GPL-2.0+ + */ + + /dts-v1/; + +/ { + phandle: phandle-target { + u-boot,dm-pre-reloc; + compatible = "target"; + intval = <1>; + }; + + phandle-source { + u-boot,dm-pre-reloc; + compatible = "source"; + clocks = <&phandle 1>; + }; +}; diff --git a/tools/dtoc/dtoc_test_simple.dts b/tools/dtoc/dtoc_test_simple.dts new file mode 100644 index 00000000000..c7366862637 --- /dev/null +++ b/tools/dtoc/dtoc_test_simple.dts @@ -0,0 +1,48 @@ +/* + * Test device tree file for dtoc + * + * Copyright 2017 Google, Inc + * + * SPDX-License-Identifier: GPL-2.0+ + */ + + /dts-v1/; + +/ { + spl-test { + u-boot,dm-pre-reloc; + compatible = "sandbox,spl-test"; + boolval; + intval = <1>; + intarray = <2 3 4>; + byteval = [05]; + bytearray = [06]; + longbytearray = [09 0a 0b 0c 0d 0e 0f 10 11]; + stringval = "message"; + stringarray = "multi-word", "message"; + }; + + spl-test2 { + u-boot,dm-pre-reloc; + compatible = "sandbox,spl-test"; + intval = <3>; + intarray = <5>; + byteval = [08]; + bytearray = [01 23 34]; + longbytearray = [09 0a 0b 0c]; + stringval = "message2"; + stringarray = "another", "multi-word", "message"; + }; + + spl-test3 { + u-boot,dm-pre-reloc; + compatible = "sandbox,spl-test"; + stringarray = "one"; + }; + + spl-test4 { + u-boot,dm-pre-reloc; + compatible = "sandbox,spl-test.2"; + }; + +}; diff --git a/tools/dtoc/test_dtoc.py b/tools/dtoc/test_dtoc.py new file mode 100644 index 00000000000..8b95c4124f0 --- /dev/null +++ b/tools/dtoc/test_dtoc.py @@ -0,0 +1,271 @@ +# +# Copyright (c) 2012 The Chromium OS Authors. +# +# SPDX-License-Identifier: GPL-2.0+ +# + +"""Tests for the dtb_platdata module + +This includes unit tests for some functions and functional tests for +""" + +import collections +import os +import struct +import unittest + +import dtb_platdata +from dtb_platdata import conv_name_to_c +from dtb_platdata import get_compat_name +from dtb_platdata import get_value +from dtb_platdata import tab_to +import fdt +import fdt_util +import tools + +our_path = os.path.dirname(os.path.realpath(__file__)) + + +def get_dtb_file(dts_fname): + """Compile a .dts file to a .dtb + + Args: + dts_fname: Filename of .dts file in the current directory + + Returns: + Filename of compiled file in output directory + """ + return fdt_util.EnsureCompiled(os.path.join(our_path, dts_fname)) + + +class TestDtoc(unittest.TestCase): + """Tests for dtoc""" + @classmethod + def setUpClass(cls): + tools.PrepareOutputDir(None) + + @classmethod + def tearDownClass(cls): + tools._RemoveOutputDir() + + def test_name(self): + """Test conversion of device tree names to C identifiers""" + self.assertEqual('serial_at_0x12', conv_name_to_c('serial@0x12')) + self.assertEqual('vendor_clock_frequency', + conv_name_to_c('vendor,clock-frequency')) + self.assertEqual('rockchip_rk3399_sdhci_5_1', + conv_name_to_c('rockchip,rk3399-sdhci-5.1')) + + def test_tab_to(self): + """Test operation of tab_to() function""" + self.assertEqual('fred ', tab_to(0, 'fred')) + self.assertEqual('fred\t', tab_to(1, 'fred')) + self.assertEqual('fred was here ', tab_to(1, 'fred was here')) + self.assertEqual('fred was here\t\t', tab_to(3, 'fred was here')) + self.assertEqual('exactly8 ', tab_to(1, 'exactly8')) + self.assertEqual('exactly8\t', tab_to(2, 'exactly8')) + + def test_get_value(self): + """Test operation of get_value() function""" + self.assertEqual('0x45', + get_value(fdt.TYPE_INT, struct.pack('>I', 0x45))) + self.assertEqual('0x45', + get_value(fdt.TYPE_BYTE, struct.pack('I', 0x45))) + self.assertEqual('"test"', get_value(fdt.TYPE_STRING, 'test')) + self.assertEqual('true', get_value(fdt.TYPE_BOOL, None)) + + def test_get_compat_name(self): + """Test operation of get_compat_name() function""" + Prop = collections.namedtuple('Prop', ['value']) + Node = collections.namedtuple('Node', ['props']) + + prop = Prop(['rockchip,rk3399-sdhci-5.1', 'arasan,sdhci-5.1']) + node = Node({'compatible': prop}) + self.assertEqual(('rockchip_rk3399_sdhci_5_1', ['arasan_sdhci_5_1']), + get_compat_name(node)) + + prop = Prop(['rockchip,rk3399-sdhci-5.1']) + node = Node({'compatible': prop}) + self.assertEqual(('rockchip_rk3399_sdhci_5_1', []), + get_compat_name(node)) + + prop = Prop(['rockchip,rk3399-sdhci-5.1', 'arasan,sdhci-5.1', 'third']) + node = Node({'compatible': prop}) + self.assertEqual(('rockchip_rk3399_sdhci_5_1', + ['arasan_sdhci_5_1', 'third']), + get_compat_name(node)) + + def test_empty_file(self): + """Test output from a device tree file with no nodes""" + dtb_file = get_dtb_file('dtoc_test_empty.dts') + output = tools.GetOutputFilename('output') + dtb_platdata.run_steps(['struct'], dtb_file, False, output) + with open(output) as infile: + lines = infile.read().splitlines() + self.assertEqual(['#include ', '#include '], lines) + + dtb_platdata.run_steps(['platdata'], dtb_file, False, output) + with open(output) as infile: + lines = infile.read().splitlines() + self.assertEqual(['#include ', '#include ', + '#include ', ''], lines) + + def test_simple(self): + """Test output from some simple nodes with various types of data""" + dtb_file = get_dtb_file('dtoc_test_simple.dts') + output = tools.GetOutputFilename('output') + dtb_platdata.run_steps(['struct'], dtb_file, False, output) + with open(output) as infile: + data = infile.read() + self.assertEqual('''#include +#include +struct dtd_sandbox_spl_test { +\tbool\t\tboolval; +\tunsigned char\tbytearray[3]; +\tunsigned char\tbyteval; +\tfdt32_t\t\tintarray[4]; +\tfdt32_t\t\tintval; +\tunsigned char\tlongbytearray[9]; +\tconst char *\tstringarray[3]; +\tconst char *\tstringval; +}; +struct dtd_sandbox_spl_test_2 { +}; +''', data) + + dtb_platdata.run_steps(['platdata'], dtb_file, False, output) + with open(output) as infile: + data = infile.read() + self.assertEqual('''#include +#include +#include + +static struct dtd_sandbox_spl_test dtv_spl_test = { +\t.bytearray\t\t= {0x6, 0x0, 0x0}, +\t.byteval\t\t= 0x5, +\t.intval\t\t\t= 0x1, +\t.longbytearray\t\t= {0x9, 0xa, 0xb, 0xc, 0xd, 0xe, 0xf, 0x10, 0x11}, +\t.stringval\t\t= "message", +\t.boolval\t\t= true, +\t.intarray\t\t= {0x2, 0x3, 0x4, 0x0}, +\t.stringarray\t\t= {"multi-word", "message", ""}, +}; +U_BOOT_DEVICE(spl_test) = { +\t.name\t\t= "sandbox_spl_test", +\t.platdata\t= &dtv_spl_test, +\t.platdata_size\t= sizeof(dtv_spl_test), +}; + +static struct dtd_sandbox_spl_test dtv_spl_test2 = { +\t.bytearray\t\t= {0x1, 0x23, 0x34}, +\t.byteval\t\t= 0x8, +\t.intval\t\t\t= 0x3, +\t.longbytearray\t\t= {0x9, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0}, +\t.stringval\t\t= "message2", +\t.intarray\t\t= {0x5, 0x0, 0x0, 0x0}, +\t.stringarray\t\t= {"another", "multi-word", "message"}, +}; +U_BOOT_DEVICE(spl_test2) = { +\t.name\t\t= "sandbox_spl_test", +\t.platdata\t= &dtv_spl_test2, +\t.platdata_size\t= sizeof(dtv_spl_test2), +}; + +static struct dtd_sandbox_spl_test dtv_spl_test3 = { +\t.stringarray\t\t= {"one", "", ""}, +}; +U_BOOT_DEVICE(spl_test3) = { +\t.name\t\t= "sandbox_spl_test", +\t.platdata\t= &dtv_spl_test3, +\t.platdata_size\t= sizeof(dtv_spl_test3), +}; + +static struct dtd_sandbox_spl_test_2 dtv_spl_test4 = { +}; +U_BOOT_DEVICE(spl_test4) = { +\t.name\t\t= "sandbox_spl_test_2", +\t.platdata\t= &dtv_spl_test4, +\t.platdata_size\t= sizeof(dtv_spl_test4), +}; + +''', data) + + def test_phandle(self): + """Test output from a node containing a phandle reference""" + dtb_file = get_dtb_file('dtoc_test_phandle.dts') + output = tools.GetOutputFilename('output') + dtb_platdata.run_steps(['struct'], dtb_file, False, output) + with open(output) as infile: + data = infile.read() + self.assertEqual('''#include +#include +struct dtd_source { +\tstruct phandle_2_cell clocks[1]; +}; +struct dtd_target { +\tfdt32_t\t\tintval; +}; +''', data) + + dtb_platdata.run_steps(['platdata'], dtb_file, False, output) + with open(output) as infile: + data = infile.read() + self.assertEqual('''#include +#include +#include + +static struct dtd_target dtv_phandle_target = { +\t.intval\t\t\t= 0x1, +}; +U_BOOT_DEVICE(phandle_target) = { +\t.name\t\t= "target", +\t.platdata\t= &dtv_phandle_target, +\t.platdata_size\t= sizeof(dtv_phandle_target), +}; + +static struct dtd_source dtv_phandle_source = { +\t.clocks\t\t\t= {{&dtv_phandle_target, 1}}, +}; +U_BOOT_DEVICE(phandle_source) = { +\t.name\t\t= "source", +\t.platdata\t= &dtv_phandle_source, +\t.platdata_size\t= sizeof(dtv_phandle_source), +}; + +''', data) + + def test_aliases(self): + """Test output from a node with multiple compatible strings""" + dtb_file = get_dtb_file('dtoc_test_aliases.dts') + output = tools.GetOutputFilename('output') + dtb_platdata.run_steps(['struct'], dtb_file, False, output) + with open(output) as infile: + data = infile.read() + self.assertEqual('''#include +#include +struct dtd_compat1 { +\tfdt32_t\t\tintval; +}; +#define dtd_compat2_1_fred dtd_compat1 +#define dtd_compat3 dtd_compat1 +''', data) + + dtb_platdata.run_steps(['platdata'], dtb_file, False, output) + with open(output) as infile: + data = infile.read() + self.assertEqual('''#include +#include +#include + +static struct dtd_compat1 dtv_spl_test = { +\t.intval\t\t\t= 0x1, +}; +U_BOOT_DEVICE(spl_test) = { +\t.name\t\t= "compat1", +\t.platdata\t= &dtv_spl_test, +\t.platdata_size\t= sizeof(dtv_spl_test), +}; + +''', data) -- cgit v1.2.3 From 7feccfdc45d5708ffd682a025d6d6e8907f1a97a Mon Sep 17 00:00:00 2001 From: Simon Glass Date: Tue, 20 Jun 2017 21:28:49 -0600 Subject: binman: Put our local modules ahead of system modules If a system module is named the same as one of those used by binman we currently pick the system module. Adjust the ordering so that our modules are chosen instead. The module conflict reported was 'tools' from jira-python. I cannot access that package to test it. Signed-off-by: Simon Glass Reported-by: Kevin Hilman Acked-by: Kevin Hilman --- tools/binman/binman.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) (limited to 'tools') diff --git a/tools/binman/binman.py b/tools/binman/binman.py index 95d3a048d86..09dc36a3f79 100755 --- a/tools/binman/binman.py +++ b/tools/binman/binman.py @@ -17,15 +17,14 @@ import unittest # Bring in the patman and dtoc libraries our_path = os.path.dirname(os.path.realpath(__file__)) -sys.path.append(os.path.join(our_path, '../patman')) -sys.path.append(os.path.join(our_path, '../dtoc')) -sys.path.append(os.path.join(our_path, '../')) +for dirname in ['../patman', '../dtoc', '..']: + sys.path.insert(0, os.path.join(our_path, dirname)) # Bring in the libfdt module -sys.path.append('tools') +sys.path.insert(0, 'tools') # Also allow entry-type modules to be brought in from the etype directory. -sys.path.append(os.path.join(our_path, 'etype')) +sys.path.insert(0, os.path.join(our_path, 'etype')) import cmdline import command -- cgit v1.2.3