1 # SPDX-License-Identifier: GPL-2.0+
2 # Copyright (c) 2016 Google, Inc
3 # Written by Simon Glass <sjg@chromium.org>
5 # Creates binary images from input files controlled by a description
8 from collections import OrderedDict
11 import importlib.resources
12 except ImportError: # pragma: no cover
14 import importlib_resources
21 from binman import bintool
22 from binman import cbfs_util
23 from binman import elf
24 from binman import entry
25 from dtoc import fdt_util
26 from u_boot_pylib import command
27 from u_boot_pylib import tools
28 from u_boot_pylib import tout
30 # These are imported if needed since they import libfdt
34 # List of images we plan to create
35 # Make this global so that it can be referenced from tests
36 images = OrderedDict()
38 # Help text for each type of missing blob, dict:
39 # key: Value of the entry's 'missing-msg' or entry name
40 # value: Text for the help
41 missing_blob_help = {}
43 def _ReadImageDesc(binman_node, use_expanded):
44 """Read the image descriptions from the /binman node
46 This normally produces a single Image object called 'image'. But if
47 multiple images are present, they will all be returned.
50 binman_node: Node object of the /binman node
51 use_expanded: True if the FDT will be updated with the entry information
53 OrderedDict of Image objects, each of which describes an image
56 # pylint: disable=E1102
57 images = OrderedDict()
58 if 'multiple-images' in binman_node.props:
59 for node in binman_node.subnodes:
60 if not node.name.startswith('template'):
61 images[node.name] = Image(node.name, node,
62 use_expanded=use_expanded)
64 images['image'] = Image('image', binman_node, use_expanded=use_expanded)
67 def _FindBinmanNode(dtb):
68 """Find the 'binman' node in the device tree
71 dtb: Fdt object to scan
73 Node object of /binman node, or None if not found
75 for node in dtb.GetRoot().subnodes:
76 if node.name == 'binman':
80 def _ReadMissingBlobHelp():
81 """Read the missing-blob-help file
83 This file containins help messages explaining what to do when external blobs
88 key: Message tag (str)
89 value: Message text (str)
92 def _FinishTag(tag, msg, result):
94 result[tag] = msg.rstrip()
99 my_data = pkg_resources.resource_string(__name__, 'missing-blob-help')
100 re_tag = re.compile('^([-a-z0-9]+):$')
104 for line in my_data.decode('utf-8').splitlines():
105 if not line.startswith('#'):
106 m_tag = re_tag.match(line)
108 _, msg = _FinishTag(tag, msg, result)
112 _FinishTag(tag, msg, result)
115 def _ShowBlobHelp(level, path, text, fname):
116 tout.do_output(level, '%s (%s):' % (path, fname))
117 for line in text.splitlines():
118 tout.do_output(level, ' %s' % line)
119 tout.do_output(level, '')
121 def _ShowHelpForMissingBlobs(level, missing_list):
122 """Show help for each missing blob to help the user take action
125 missing_list: List of Entry objects to show help for
127 global missing_blob_help
129 if not missing_blob_help:
130 missing_blob_help = _ReadMissingBlobHelp()
132 for entry in missing_list:
133 tags = entry.GetHelpTags()
135 # Show the first match help message
138 if tag in missing_blob_help:
139 _ShowBlobHelp(level, entry._node.path, missing_blob_help[tag],
140 entry.GetDefaultFilename())
143 # Or a generic help message
145 _ShowBlobHelp(level, entry._node.path, "Missing blob",
146 entry.GetDefaultFilename())
148 def GetEntryModules(include_testing=True):
149 """Get a set of entry class implementations
152 Set of paths to entry class filenames
154 glob_list = pkg_resources.resource_listdir(__name__, 'etype')
155 glob_list = [fname for fname in glob_list if fname.endswith('.py')]
156 return set([os.path.splitext(os.path.basename(item))[0]
157 for item in glob_list
158 if include_testing or '_testing' not in item])
160 def WriteEntryDocs(modules, test_missing=None):
161 """Write out documentation for all entries
164 modules: List of Module objects to get docs for
165 test_missing: Used for testing only, to force an entry's documentation
166 to show as missing even if it is present. Should be set to None in
169 from binman.entry import Entry
170 Entry.WriteDocs(modules, test_missing)
173 def write_bintool_docs(modules, test_missing=None):
174 """Write out documentation for all bintools
177 modules: List of Module objects to get docs for
178 test_missing: Used for testing only, to force an entry's documentation
179 to show as missing even if it is present. Should be set to None in
182 bintool.Bintool.WriteDocs(modules, test_missing)
185 def ListEntries(image_fname, entry_paths):
186 """List the entries in an image
188 This decodes the supplied image and displays a table of entries from that
189 image, preceded by a header.
192 image_fname: Image filename to process
193 entry_paths: List of wildcarded paths (e.g. ['*dtb*', 'u-boot*',
196 image = Image.FromFile(image_fname)
198 entries, lines, widths = image.GetListEntries(entry_paths)
200 num_columns = len(widths)
201 for linenum, line in enumerate(lines):
204 print('-' * (sum(widths) + num_columns * 2))
206 for i, item in enumerate(line):
208 if item.startswith('>'):
211 txt = '%*s ' % (width, item)
216 def ReadEntry(image_fname, entry_path, decomp=True):
217 """Extract an entry from an image
219 This extracts the data from a particular entry in an image
222 image_fname: Image filename to process
223 entry_path: Path to entry to extract
224 decomp: True to return uncompressed data, if the data is compress
225 False to return the raw data
228 data extracted from the entry
231 from binman.image import Image
233 image = Image.FromFile(image_fname)
234 image.CollectBintools()
235 entry = image.FindEntryPath(entry_path)
236 return entry.ReadData(decomp)
239 def ShowAltFormats(image):
240 """Show alternative formats available for entries in the image
242 This shows a list of formats available.
245 image (Image): Image to check
248 image.CheckAltFormats(alt_formats)
249 print('%-10s %-20s %s' % ('Flag (-F)', 'Entry type', 'Description'))
250 for name, val in alt_formats.items():
251 entry, helptext = val
252 print('%-10s %-20s %s' % (name, entry.etype, helptext))
255 def ExtractEntries(image_fname, output_fname, outdir, entry_paths,
256 decomp=True, alt_format=None):
257 """Extract the data from one or more entries and write it to files
260 image_fname: Image filename to process
261 output_fname: Single output filename to use if extracting one file, None
263 outdir: Output directory to use (for any number of files), else None
264 entry_paths: List of entry paths to extract
265 decomp: True to decompress the entry data
268 List of EntryInfo records that were written
270 image = Image.FromFile(image_fname)
271 image.CollectBintools()
273 if alt_format == 'list':
274 ShowAltFormats(image)
277 # Output an entry to a single file, as a special case
280 raise ValueError('Must specify an entry path to write with -f')
281 if len(entry_paths) != 1:
282 raise ValueError('Must specify exactly one entry path to write with -f')
283 entry = image.FindEntryPath(entry_paths[0])
284 data = entry.ReadData(decomp, alt_format)
285 tools.write_file(output_fname, data)
286 tout.notice("Wrote %#x bytes to file '%s'" % (len(data), output_fname))
289 # Otherwise we will output to a path given by the entry path of each entry.
290 # This means that entries will appear in subdirectories if they are part of
292 einfos = image.GetListEntries(entry_paths)[0]
293 tout.notice('%d entries match and will be written' % len(einfos))
296 data = entry.ReadData(decomp, alt_format)
297 path = entry.GetPath()[1:]
298 fname = os.path.join(outdir, path)
300 # If this entry has children, create a directory for it and put its
301 # data in a file called 'root' in that directory
302 if entry.GetEntries():
303 if fname and not os.path.exists(fname):
305 fname = os.path.join(fname, 'root')
306 tout.notice("Write entry '%s' size %x to '%s'" %
307 (entry.GetPath(), len(data), fname))
308 tools.write_file(fname, data)
312 def BeforeReplace(image, allow_resize):
313 """Handle getting an image ready for replacing entries in it
316 image: Image to prepare
318 state.PrepareFromLoadedData(image)
319 image.CollectBintools()
320 image.LoadData(decomp=False)
322 # If repacking, drop the old offset/size values except for the original
323 # ones, so we are only left with the constraints.
324 if image.allow_repack and allow_resize:
328 def ReplaceOneEntry(image, entry, data, do_compress, allow_resize):
329 """Handle replacing a single entry an an image
332 image: Image to update
333 entry: Entry to write
334 data: Data to replace with
335 do_compress: True to compress the data if needed, False if data is
336 already compressed so should be used as is
337 allow_resize: True to allow entries to change size (this does a re-pack
338 of the entries), False to raise an exception
340 if not entry.WriteData(data, do_compress):
341 if not image.allow_repack:
342 entry.Raise('Entry data size does not match, but allow-repack is not present for this image')
344 entry.Raise('Entry data size does not match, but resize is disabled')
347 def AfterReplace(image, allow_resize, write_map):
348 """Handle write out an image after replacing entries in it
351 image: Image to write
352 allow_resize: True to allow entries to change size (this does a re-pack
353 of the entries), False to raise an exception
354 write_map: True to write a map file
356 tout.info('Processing image')
357 ProcessImage(image, update_fdt=True, write_map=write_map,
358 get_contents=False, allow_resize=allow_resize)
361 def WriteEntryToImage(image, entry, data, do_compress=True, allow_resize=True,
363 BeforeReplace(image, allow_resize)
364 tout.info('Writing data to %s' % entry.GetPath())
365 ReplaceOneEntry(image, entry, data, do_compress, allow_resize)
366 AfterReplace(image, allow_resize=allow_resize, write_map=write_map)
369 def WriteEntry(image_fname, entry_path, data, do_compress=True,
370 allow_resize=True, write_map=False):
371 """Replace an entry in an image
373 This replaces the data in a particular entry in an image. This size of the
374 new data must match the size of the old data unless allow_resize is True.
377 image_fname: Image filename to process
378 entry_path: Path to entry to extract
379 data: Data to replace with
380 do_compress: True to compress the data if needed, False if data is
381 already compressed so should be used as is
382 allow_resize: True to allow entries to change size (this does a re-pack
383 of the entries), False to raise an exception
384 write_map: True to write a map file
387 Image object that was updated
389 tout.info("Write entry '%s', file '%s'" % (entry_path, image_fname))
390 image = Image.FromFile(image_fname)
391 image.CollectBintools()
392 entry = image.FindEntryPath(entry_path)
393 WriteEntryToImage(image, entry, data, do_compress=do_compress,
394 allow_resize=allow_resize, write_map=write_map)
399 def ReplaceEntries(image_fname, input_fname, indir, entry_paths,
400 do_compress=True, allow_resize=True, write_map=False):
401 """Replace the data from one or more entries from input files
404 image_fname: Image filename to process
405 input_fname: Single input filename to use if replacing one file, None
407 indir: Input directory to use (for any number of files), else None
408 entry_paths: List of entry paths to replace
409 do_compress: True if the input data is uncompressed and may need to be
410 compressed if the entry requires it, False if the data is already
412 write_map: True to write a map file
415 List of EntryInfo records that were written
417 image_fname = os.path.abspath(image_fname)
418 image = Image.FromFile(image_fname)
420 image.mark_build_done()
422 # Replace an entry from a single file, as a special case
425 raise ValueError('Must specify an entry path to read with -f')
426 if len(entry_paths) != 1:
427 raise ValueError('Must specify exactly one entry path to write with -f')
428 entry = image.FindEntryPath(entry_paths[0])
429 data = tools.read_file(input_fname)
430 tout.notice("Read %#x bytes from file '%s'" % (len(data), input_fname))
431 WriteEntryToImage(image, entry, data, do_compress=do_compress,
432 allow_resize=allow_resize, write_map=write_map)
435 # Otherwise we will input from a path given by the entry path of each entry.
436 # This means that files must appear in subdirectories if they are part of
438 einfos = image.GetListEntries(entry_paths)[0]
439 tout.notice("Replacing %d matching entries in image '%s'" %
440 (len(einfos), image_fname))
442 BeforeReplace(image, allow_resize)
446 if entry.GetEntries():
447 tout.info("Skipping section entry '%s'" % entry.GetPath())
450 path = entry.GetPath()[1:]
451 fname = os.path.join(indir, path)
453 if os.path.exists(fname):
454 tout.notice("Write entry '%s' from file '%s'" %
455 (entry.GetPath(), fname))
456 data = tools.read_file(fname)
457 ReplaceOneEntry(image, entry, data, do_compress, allow_resize)
459 tout.warning("Skipping entry '%s' from missing file '%s'" %
460 (entry.GetPath(), fname))
462 AfterReplace(image, allow_resize=allow_resize, write_map=write_map)
465 def SignEntries(image_fname, input_fname, privatekey_fname, algo, entry_paths,
467 """Sign and replace the data from one or more entries from input files
470 image_fname: Image filename to process
471 input_fname: Single input filename to use if replacing one file, None
473 algo: Hashing algorithm
474 entry_paths: List of entry paths to sign
475 privatekey_fname: Private key filename
476 write_map (bool): True to write the map file
478 image_fname = os.path.abspath(image_fname)
479 image = Image.FromFile(image_fname)
481 image.mark_build_done()
483 BeforeReplace(image, allow_resize=True)
485 for entry_path in entry_paths:
486 entry = image.FindEntryPath(entry_path)
487 entry.UpdateSignatures(privatekey_fname, algo, input_fname)
489 AfterReplace(image, allow_resize=True, write_map=write_map)
491 def _ProcessTemplates(parent):
492 """Handle any templates in the binman description
495 parent: Binman node to process (typically /binman)
498 bool: True if any templates were processed
500 Search though each target node looking for those with an 'insert-template'
501 property. Use that as a list of references to template nodes to use to
502 adjust the target node.
504 Processing involves copying each subnode of the template node into the
507 This is done recursively, so templates can be at any level of the binman
508 image, e.g. inside a section.
510 See 'Templates' in the Binman documnentation for details.
513 for node in parent.subnodes:
514 tmpl = fdt_util.GetPhandleList(node, 'insert-template')
516 node.copy_subnodes_from_phandles(tmpl)
519 found |= _ProcessTemplates(node)
522 def _RemoveTemplates(parent):
523 """Remove any templates in the binman description
525 for node in parent.subnodes:
526 if node.name.startswith('template'):
529 def PrepareImagesAndDtbs(dtb_fname, select_images, update_fdt, use_expanded):
530 """Prepare the images to be processed and select the device tree
533 - reads in the device tree
534 - finds and scans the binman node to create all entries
535 - selects which images to build
536 - Updates the device tress with placeholder properties for offset,
540 dtb_fname: Filename of the device tree file to use (.dts or .dtb)
541 selected_images: List of images to output, or None for all
542 update_fdt: True to update the FDT wth entry offsets, etc.
543 use_expanded: True to use expanded versions of entries, if available.
544 So if 'u-boot' is called for, we use 'u-boot-expanded' instead. This
545 is needed if update_fdt is True (although tests may disable it)
548 OrderedDict of images:
549 key: Image name (str)
552 # Import these here in case libfdt.py is not available, in which case
553 # the above help option still works.
555 from dtoc import fdt_util
558 # Get the device tree ready by compiling it and copying the compiled
559 # output into a file in our output directly. Then scan it for use
561 dtb_fname = fdt_util.EnsureCompiled(dtb_fname)
562 fname = tools.get_output_filename('u-boot.dtb.out')
563 tools.write_file(fname, tools.read_file(dtb_fname))
564 dtb = fdt.FdtScan(fname)
566 node = _FindBinmanNode(dtb)
568 raise ValueError("Device tree '%s' does not have a 'binman' "
571 if _ProcessTemplates(node):
573 fname = tools.get_output_filename('u-boot.dtb.tmpl1')
574 tools.write_file(fname, dtb.GetContents())
576 _RemoveTemplates(node)
579 # Rescan the dtb to pick up the new phandles
581 node = _FindBinmanNode(dtb)
582 fname = tools.get_output_filename('u-boot.dtb.tmpl2')
583 tools.write_file(fname, dtb.GetContents())
585 images = _ReadImageDesc(node, use_expanded)
589 new_images = OrderedDict()
590 for name, image in images.items():
591 if name in select_images:
592 new_images[name] = image
596 tout.notice('Skipping images: %s' % ', '.join(skip))
598 state.Prepare(images, dtb)
600 # Prepare the device tree by making sure that any missing
601 # properties are added (e.g. 'pos' and 'size'). The values of these
602 # may not be correct yet, but we add placeholders so that the
603 # size of the device tree is correct. Later, in
604 # SetCalculatedProperties() we will insert the correct values
605 # without changing the device-tree size, thus ensuring that our
606 # entry offsets remain the same.
607 for image in images.values():
609 image.CollectBintools()
611 image.AddMissingProperties(True)
612 image.ProcessFdt(dtb)
614 for dtb_item in state.GetAllFdts():
615 dtb_item.Sync(auto_resize=True)
621 def ProcessImage(image, update_fdt, write_map, get_contents=True,
622 allow_resize=True, allow_missing=False,
623 allow_fake_blobs=False):
624 """Perform all steps for this image, including checking and # writing it.
626 This means that errors found with a later image will be reported after
627 earlier images are already completed and written, but that does not seem
631 image: Image to process
632 update_fdt: True to update the FDT wth entry offsets, etc.
633 write_map: True to write a map file
634 get_contents: True to get the image contents from files, etc., False if
635 the contents is already present
636 allow_resize: True to allow entries to change size (this does a re-pack
637 of the entries), False to raise an exception
638 allow_missing: Allow blob_ext objects to be missing
639 allow_fake_blobs: Allow blob_ext objects to be faked with dummy files
642 True if one or more external blobs are missing or faked,
643 False if all are present
646 image.SetAllowMissing(allow_missing)
647 image.SetAllowFakeBlob(allow_fake_blobs)
648 image.GetEntryContents()
650 image.GetEntryOffsets()
652 # We need to pack the entries to figure out where everything
653 # should be placed. This sets the offset/size of each entry.
654 # However, after packing we call ProcessEntryContents() which
655 # may result in an entry changing size. In that case we need to
656 # do another pass. Since the device tree often contains the
657 # final offset/size information we try to make space for this in
658 # AddMissingProperties() above. However, if the device is
659 # compressed we cannot know this compressed size in advance,
660 # since changing an offset from 0x100 to 0x104 (for example) can
661 # alter the compressed size of the device tree. So we need a
662 # third pass for this.
664 for pack_pass in range(passes):
667 except Exception as e:
669 fname = image.WriteMap()
670 print("Wrote map file '%s' to show errors" % fname)
674 image.SetCalculatedProperties()
675 for dtb_item in state.GetAllFdts():
679 sizes_ok = image.ProcessEntryContents()
683 tout.info('Pack completed after %d pass(es)' % (pack_pass + 1))
685 image.Raise('Entries changed size after packing (tried %s passes)' %
693 image.CheckMissing(missing_list)
695 tout.error("Image '%s' is missing external blobs and is non-functional: %s\n" %
696 (image.name, ' '.join([e.name for e in missing_list])))
697 _ShowHelpForMissingBlobs(tout.ERROR, missing_list)
700 image.CheckFakedBlobs(faked_list)
703 "Image '%s' has faked external blobs and is non-functional: %s\n" %
704 (image.name, ' '.join([os.path.basename(e.GetDefaultFilename())
705 for e in faked_list])))
708 image.CheckOptional(optional_list)
711 "Image '%s' is missing optional external blobs but is still functional: %s\n" %
712 (image.name, ' '.join([e.name for e in optional_list])))
713 _ShowHelpForMissingBlobs(tout.WARNING, optional_list)
715 missing_bintool_list = []
716 image.check_missing_bintools(missing_bintool_list)
717 if missing_bintool_list:
719 "Image '%s' has missing bintools and is non-functional: %s\n" %
720 (image.name, ' '.join([os.path.basename(bintool.name)
721 for bintool in missing_bintool_list])))
722 return any([missing_list, faked_list, missing_bintool_list])
726 """The main control code for binman
728 This assumes that help and test options have already been dealt with. It
729 deals with the core task of building images.
732 args: Command line arguments Namespace object
738 with importlib.resources.path('binman', 'README.rst') as readme:
739 tools.print_full_help(str(readme))
742 # Put these here so that we can import this module without libfdt
743 from binman.image import Image
744 from binman import state
748 tool_paths += args.toolpath
750 tool_paths.append(args.tooldir)
751 tools.set_tool_paths(tool_paths or None)
752 bintool.Bintool.set_tool_dir(args.tooldir)
754 if args.cmd in ['ls', 'extract', 'replace', 'tool', 'sign']:
756 tout.init(args.verbosity)
757 if args.cmd == 'replace':
758 tools.prepare_output_dir(args.outdir, args.preserve)
760 tools.prepare_output_dir(None)
762 ListEntries(args.image, args.paths)
764 if args.cmd == 'extract':
765 ExtractEntries(args.image, args.filename, args.outdir, args.paths,
766 not args.uncompressed, args.format)
768 if args.cmd == 'replace':
769 ReplaceEntries(args.image, args.filename, args.indir, args.paths,
770 do_compress=not args.compressed,
771 allow_resize=not args.fix_size, write_map=args.map)
773 if args.cmd == 'sign':
774 SignEntries(args.image, args.file, args.key, args.algo, args.paths)
776 if args.cmd == 'tool':
778 bintool.Bintool.list_all()
780 if not args.bintools:
782 "Please specify bintools to fetch or 'all' or 'missing'")
783 bintool.Bintool.fetch_tools(bintool.FETCH_ANY,
786 raise ValueError("Invalid arguments to 'tool' subcommand")
790 tools.finalise_output_dir()
794 if args.update_fdt_in_elf:
795 elf_params = args.update_fdt_in_elf.split(',')
796 if len(elf_params) != 4:
797 raise ValueError('Invalid args %s to --update-fdt-in-elf: expected infile,outfile,begin_sym,end_sym' %
800 # Try to figure out which device tree contains our image description
806 raise ValueError('Must provide a board to process (use -b <board>)')
807 board_pathname = os.path.join(args.build_dir, board)
808 dtb_fname = os.path.join(board_pathname, 'u-boot.dtb')
811 args.indir.append(board_pathname)
814 tout.init(args.verbosity)
815 elf.debug = args.debug
816 cbfs_util.VERBOSE = args.verbosity > 2
817 state.use_fake_dtb = args.fake_dtb
819 # Normally we replace the 'u-boot' etype with 'u-boot-expanded', etc.
820 # When running tests this can be disabled using this flag. When not
821 # updating the FDT in image, it is not needed by binman, but we use it
822 # for consistency, so that the images look the same to U-Boot at
824 use_expanded = not args.no_expanded
826 tools.set_input_dirs(args.indir)
827 tools.prepare_output_dir(args.outdir, args.preserve)
828 state.SetEntryArgs(args.entry_arg)
829 state.SetThreads(args.threads)
831 images = PrepareImagesAndDtbs(dtb_fname, args.image,
832 args.update_fdt, use_expanded)
834 if args.test_section_timeout:
835 # Set the first image to timeout, used in testThreadTimeout()
836 images[list(images.keys())[0]].test_section_timeout = True
838 bintool.Bintool.set_missing_list(
839 args.force_missing_bintools.split(',') if
840 args.force_missing_bintools else None)
842 # Create the directory here instead of Entry.check_fake_fname()
843 # since that is called from a threaded context so different threads
844 # may race to create the directory
845 if args.fake_ext_blobs:
846 entry.Entry.create_fake_dir()
848 for image in images.values():
849 invalid |= ProcessImage(image, args.update_fdt, args.map,
850 allow_missing=args.allow_missing,
851 allow_fake_blobs=args.fake_ext_blobs)
853 # Write the updated FDTs to our output files
854 for dtb_item in state.GetAllFdts():
855 tools.write_file(dtb_item._fname, dtb_item.GetContents())
858 data = state.GetFdtForEtype('u-boot-dtb').GetContents()
859 elf.UpdateFile(*elf_params, data)
861 # This can only be True if -M is provided, since otherwise binman
862 # would have raised an error already
864 msg = 'Some images are invalid'
865 if args.ignore_missing:
871 # Use this to debug the time take to pack the image
874 tools.finalise_output_dir()