1 # SPDX-License-Identifier: GPL-2.0+
2 # Copyright (c) 2016 Google, Inc
3 # Written by Simon Glass <sjg@chromium.org>
5 # Creates binary images from input files controlled by a description
8 from collections import OrderedDict
15 from patman import tools
17 from binman import bintool
18 from binman import cbfs_util
19 from binman import elf
20 from patman import command
21 from patman import tout
23 # These are imported if needed since they import libfdt
27 # List of images we plan to create
28 # Make this global so that it can be referenced from tests
29 images = OrderedDict()
31 # Help text for each type of missing blob, dict:
32 # key: Value of the entry's 'missing-msg' or entry name
33 # value: Text for the help
34 missing_blob_help = {}
36 def _ReadImageDesc(binman_node, use_expanded):
37 """Read the image descriptions from the /binman node
39 This normally produces a single Image object called 'image'. But if
40 multiple images are present, they will all be returned.
43 binman_node: Node object of the /binman node
44 use_expanded: True if the FDT will be updated with the entry information
46 OrderedDict of Image objects, each of which describes an image
49 # pylint: disable=E1102
50 images = OrderedDict()
51 if 'multiple-images' in binman_node.props:
52 for node in binman_node.subnodes:
53 images[node.name] = Image(node.name, node,
54 use_expanded=use_expanded)
56 images['image'] = Image('image', binman_node, use_expanded=use_expanded)
59 def _FindBinmanNode(dtb):
60 """Find the 'binman' node in the device tree
63 dtb: Fdt object to scan
65 Node object of /binman node, or None if not found
67 for node in dtb.GetRoot().subnodes:
68 if node.name == 'binman':
72 def _ReadMissingBlobHelp():
73 """Read the missing-blob-help file
75 This file containins help messages explaining what to do when external blobs
80 key: Message tag (str)
81 value: Message text (str)
84 def _FinishTag(tag, msg, result):
86 result[tag] = msg.rstrip()
91 my_data = pkg_resources.resource_string(__name__, 'missing-blob-help')
92 re_tag = re.compile('^([-a-z0-9]+):$')
96 for line in my_data.decode('utf-8').splitlines():
97 if not line.startswith('#'):
98 m_tag = re_tag.match(line)
100 _, msg = _FinishTag(tag, msg, result)
104 _FinishTag(tag, msg, result)
107 def _ShowBlobHelp(path, text):
108 tout.warning('\n%s:' % path)
109 for line in text.splitlines():
110 tout.warning(' %s' % line)
112 def _ShowHelpForMissingBlobs(missing_list):
113 """Show help for each missing blob to help the user take action
116 missing_list: List of Entry objects to show help for
118 global missing_blob_help
120 if not missing_blob_help:
121 missing_blob_help = _ReadMissingBlobHelp()
123 for entry in missing_list:
124 tags = entry.GetHelpTags()
126 # Show the first match help message
128 if tag in missing_blob_help:
129 _ShowBlobHelp(entry._node.path, missing_blob_help[tag])
132 def GetEntryModules(include_testing=True):
133 """Get a set of entry class implementations
136 Set of paths to entry class filenames
138 glob_list = pkg_resources.resource_listdir(__name__, 'etype')
139 glob_list = [fname for fname in glob_list if fname.endswith('.py')]
140 return set([os.path.splitext(os.path.basename(item))[0]
141 for item in glob_list
142 if include_testing or '_testing' not in item])
144 def WriteEntryDocs(modules, test_missing=None):
145 """Write out documentation for all entries
148 modules: List of Module objects to get docs for
149 test_missing: Used for testing only, to force an entry's documentation
150 to show as missing even if it is present. Should be set to None in
153 from binman.entry import Entry
154 Entry.WriteDocs(modules, test_missing)
157 def write_bintool_docs(modules, test_missing=None):
158 """Write out documentation for all bintools
161 modules: List of Module objects to get docs for
162 test_missing: Used for testing only, to force an entry's documentation
163 to show as missing even if it is present. Should be set to None in
166 bintool.Bintool.WriteDocs(modules, test_missing)
169 def ListEntries(image_fname, entry_paths):
170 """List the entries in an image
172 This decodes the supplied image and displays a table of entries from that
173 image, preceded by a header.
176 image_fname: Image filename to process
177 entry_paths: List of wildcarded paths (e.g. ['*dtb*', 'u-boot*',
180 image = Image.FromFile(image_fname)
182 entries, lines, widths = image.GetListEntries(entry_paths)
184 num_columns = len(widths)
185 for linenum, line in enumerate(lines):
188 print('-' * (sum(widths) + num_columns * 2))
190 for i, item in enumerate(line):
192 if item.startswith('>'):
195 txt = '%*s ' % (width, item)
200 def ReadEntry(image_fname, entry_path, decomp=True):
201 """Extract an entry from an image
203 This extracts the data from a particular entry in an image
206 image_fname: Image filename to process
207 entry_path: Path to entry to extract
208 decomp: True to return uncompressed data, if the data is compress
209 False to return the raw data
212 data extracted from the entry
215 from binman.image import Image
217 image = Image.FromFile(image_fname)
218 entry = image.FindEntryPath(entry_path)
219 return entry.ReadData(decomp)
222 def ShowAltFormats(image):
223 """Show alternative formats available for entries in the image
225 This shows a list of formats available.
228 image (Image): Image to check
231 image.CheckAltFormats(alt_formats)
232 print('%-10s %-20s %s' % ('Flag (-F)', 'Entry type', 'Description'))
233 for name, val in alt_formats.items():
234 entry, helptext = val
235 print('%-10s %-20s %s' % (name, entry.etype, helptext))
238 def ExtractEntries(image_fname, output_fname, outdir, entry_paths,
239 decomp=True, alt_format=None):
240 """Extract the data from one or more entries and write it to files
243 image_fname: Image filename to process
244 output_fname: Single output filename to use if extracting one file, None
246 outdir: Output directory to use (for any number of files), else None
247 entry_paths: List of entry paths to extract
248 decomp: True to decompress the entry data
251 List of EntryInfo records that were written
253 image = Image.FromFile(image_fname)
255 if alt_format == 'list':
256 ShowAltFormats(image)
259 # Output an entry to a single file, as a special case
262 raise ValueError('Must specify an entry path to write with -f')
263 if len(entry_paths) != 1:
264 raise ValueError('Must specify exactly one entry path to write with -f')
265 entry = image.FindEntryPath(entry_paths[0])
266 data = entry.ReadData(decomp, alt_format)
267 tools.write_file(output_fname, data)
268 tout.notice("Wrote %#x bytes to file '%s'" % (len(data), output_fname))
271 # Otherwise we will output to a path given by the entry path of each entry.
272 # This means that entries will appear in subdirectories if they are part of
274 einfos = image.GetListEntries(entry_paths)[0]
275 tout.notice('%d entries match and will be written' % len(einfos))
278 data = entry.ReadData(decomp, alt_format)
279 path = entry.GetPath()[1:]
280 fname = os.path.join(outdir, path)
282 # If this entry has children, create a directory for it and put its
283 # data in a file called 'root' in that directory
284 if entry.GetEntries():
285 if fname and not os.path.exists(fname):
287 fname = os.path.join(fname, 'root')
288 tout.notice("Write entry '%s' size %x to '%s'" %
289 (entry.GetPath(), len(data), fname))
290 tools.write_file(fname, data)
294 def BeforeReplace(image, allow_resize):
295 """Handle getting an image ready for replacing entries in it
298 image: Image to prepare
300 state.PrepareFromLoadedData(image)
303 # If repacking, drop the old offset/size values except for the original
304 # ones, so we are only left with the constraints.
309 def ReplaceOneEntry(image, entry, data, do_compress, allow_resize):
310 """Handle replacing a single entry an an image
313 image: Image to update
314 entry: Entry to write
315 data: Data to replace with
316 do_compress: True to compress the data if needed, False if data is
317 already compressed so should be used as is
318 allow_resize: True to allow entries to change size (this does a re-pack
319 of the entries), False to raise an exception
321 if not entry.WriteData(data, do_compress):
322 if not image.allow_repack:
323 entry.Raise('Entry data size does not match, but allow-repack is not present for this image')
325 entry.Raise('Entry data size does not match, but resize is disabled')
328 def AfterReplace(image, allow_resize, write_map):
329 """Handle write out an image after replacing entries in it
332 image: Image to write
333 allow_resize: True to allow entries to change size (this does a re-pack
334 of the entries), False to raise an exception
335 write_map: True to write a map file
337 tout.info('Processing image')
338 ProcessImage(image, update_fdt=True, write_map=write_map,
339 get_contents=False, allow_resize=allow_resize)
342 def WriteEntryToImage(image, entry, data, do_compress=True, allow_resize=True,
344 BeforeReplace(image, allow_resize)
345 tout.info('Writing data to %s' % entry.GetPath())
346 ReplaceOneEntry(image, entry, data, do_compress, allow_resize)
347 AfterReplace(image, allow_resize=allow_resize, write_map=write_map)
350 def WriteEntry(image_fname, entry_path, data, do_compress=True,
351 allow_resize=True, write_map=False):
352 """Replace an entry in an image
354 This replaces the data in a particular entry in an image. This size of the
355 new data must match the size of the old data unless allow_resize is True.
358 image_fname: Image filename to process
359 entry_path: Path to entry to extract
360 data: Data to replace with
361 do_compress: True to compress the data if needed, False if data is
362 already compressed so should be used as is
363 allow_resize: True to allow entries to change size (this does a re-pack
364 of the entries), False to raise an exception
365 write_map: True to write a map file
368 Image object that was updated
370 tout.info("Write entry '%s', file '%s'" % (entry_path, image_fname))
371 image = Image.FromFile(image_fname)
372 entry = image.FindEntryPath(entry_path)
373 WriteEntryToImage(image, entry, data, do_compress=do_compress,
374 allow_resize=allow_resize, write_map=write_map)
379 def ReplaceEntries(image_fname, input_fname, indir, entry_paths,
380 do_compress=True, allow_resize=True, write_map=False):
381 """Replace the data from one or more entries from input files
384 image_fname: Image filename to process
385 input_fname: Single input filename to use if replacing one file, None
387 indir: Input directory to use (for any number of files), else None
388 entry_paths: List of entry paths to replace
389 do_compress: True if the input data is uncompressed and may need to be
390 compressed if the entry requires it, False if the data is already
392 write_map: True to write a map file
395 List of EntryInfo records that were written
397 image_fname = os.path.abspath(image_fname)
398 image = Image.FromFile(image_fname)
400 # Replace an entry from a single file, as a special case
403 raise ValueError('Must specify an entry path to read with -f')
404 if len(entry_paths) != 1:
405 raise ValueError('Must specify exactly one entry path to write with -f')
406 entry = image.FindEntryPath(entry_paths[0])
407 data = tools.read_file(input_fname)
408 tout.notice("Read %#x bytes from file '%s'" % (len(data), input_fname))
409 WriteEntryToImage(image, entry, data, do_compress=do_compress,
410 allow_resize=allow_resize, write_map=write_map)
413 # Otherwise we will input from a path given by the entry path of each entry.
414 # This means that files must appear in subdirectories if they are part of
416 einfos = image.GetListEntries(entry_paths)[0]
417 tout.notice("Replacing %d matching entries in image '%s'" %
418 (len(einfos), image_fname))
420 BeforeReplace(image, allow_resize)
424 if entry.GetEntries():
425 tout.info("Skipping section entry '%s'" % entry.GetPath())
428 path = entry.GetPath()[1:]
429 fname = os.path.join(indir, path)
431 if os.path.exists(fname):
432 tout.notice("Write entry '%s' from file '%s'" %
433 (entry.GetPath(), fname))
434 data = tools.read_file(fname)
435 ReplaceOneEntry(image, entry, data, do_compress, allow_resize)
437 tout.warning("Skipping entry '%s' from missing file '%s'" %
438 (entry.GetPath(), fname))
440 AfterReplace(image, allow_resize=allow_resize, write_map=write_map)
444 def PrepareImagesAndDtbs(dtb_fname, select_images, update_fdt, use_expanded):
445 """Prepare the images to be processed and select the device tree
448 - reads in the device tree
449 - finds and scans the binman node to create all entries
450 - selects which images to build
451 - Updates the device tress with placeholder properties for offset,
455 dtb_fname: Filename of the device tree file to use (.dts or .dtb)
456 selected_images: List of images to output, or None for all
457 update_fdt: True to update the FDT wth entry offsets, etc.
458 use_expanded: True to use expanded versions of entries, if available.
459 So if 'u-boot' is called for, we use 'u-boot-expanded' instead. This
460 is needed if update_fdt is True (although tests may disable it)
463 OrderedDict of images:
464 key: Image name (str)
467 # Import these here in case libfdt.py is not available, in which case
468 # the above help option still works.
470 from dtoc import fdt_util
473 # Get the device tree ready by compiling it and copying the compiled
474 # output into a file in our output directly. Then scan it for use
476 dtb_fname = fdt_util.EnsureCompiled(dtb_fname)
477 fname = tools.get_output_filename('u-boot.dtb.out')
478 tools.write_file(fname, tools.read_file(dtb_fname))
479 dtb = fdt.FdtScan(fname)
481 node = _FindBinmanNode(dtb)
483 raise ValueError("Device tree '%s' does not have a 'binman' "
486 images = _ReadImageDesc(node, use_expanded)
490 new_images = OrderedDict()
491 for name, image in images.items():
492 if name in select_images:
493 new_images[name] = image
497 tout.notice('Skipping images: %s' % ', '.join(skip))
499 state.Prepare(images, dtb)
501 # Prepare the device tree by making sure that any missing
502 # properties are added (e.g. 'pos' and 'size'). The values of these
503 # may not be correct yet, but we add placeholders so that the
504 # size of the device tree is correct. Later, in
505 # SetCalculatedProperties() we will insert the correct values
506 # without changing the device-tree size, thus ensuring that our
507 # entry offsets remain the same.
508 for image in images.values():
509 image.CollectBintools()
510 image.ExpandEntries()
512 image.AddMissingProperties(True)
513 image.ProcessFdt(dtb)
515 for dtb_item in state.GetAllFdts():
516 dtb_item.Sync(auto_resize=True)
522 def ProcessImage(image, update_fdt, write_map, get_contents=True,
523 allow_resize=True, allow_missing=False,
524 allow_fake_blobs=False):
525 """Perform all steps for this image, including checking and # writing it.
527 This means that errors found with a later image will be reported after
528 earlier images are already completed and written, but that does not seem
532 image: Image to process
533 update_fdt: True to update the FDT wth entry offsets, etc.
534 write_map: True to write a map file
535 get_contents: True to get the image contents from files, etc., False if
536 the contents is already present
537 allow_resize: True to allow entries to change size (this does a re-pack
538 of the entries), False to raise an exception
539 allow_missing: Allow blob_ext objects to be missing
540 allow_fake_blobs: Allow blob_ext objects to be faked with dummy files
543 True if one or more external blobs are missing or faked,
544 False if all are present
547 image.SetAllowMissing(allow_missing)
548 image.SetAllowFakeBlob(allow_fake_blobs)
549 image.GetEntryContents()
550 image.GetEntryOffsets()
552 # We need to pack the entries to figure out where everything
553 # should be placed. This sets the offset/size of each entry.
554 # However, after packing we call ProcessEntryContents() which
555 # may result in an entry changing size. In that case we need to
556 # do another pass. Since the device tree often contains the
557 # final offset/size information we try to make space for this in
558 # AddMissingProperties() above. However, if the device is
559 # compressed we cannot know this compressed size in advance,
560 # since changing an offset from 0x100 to 0x104 (for example) can
561 # alter the compressed size of the device tree. So we need a
562 # third pass for this.
564 for pack_pass in range(passes):
567 except Exception as e:
569 fname = image.WriteMap()
570 print("Wrote map file '%s' to show errors" % fname)
574 image.SetCalculatedProperties()
575 for dtb_item in state.GetAllFdts():
579 sizes_ok = image.ProcessEntryContents()
583 tout.info('Pack completed after %d pass(es)' % (pack_pass + 1))
585 image.Raise('Entries changed size after packing (tried %s passes)' %
592 image.CheckMissing(missing_list)
594 tout.warning("Image '%s' is missing external blobs and is non-functional: %s" %
595 (image.name, ' '.join([e.name for e in missing_list])))
596 _ShowHelpForMissingBlobs(missing_list)
598 image.CheckFakedBlobs(faked_list)
601 "Image '%s' has faked external blobs and is non-functional: %s" %
602 (image.name, ' '.join([os.path.basename(e.GetDefaultFilename())
603 for e in faked_list])))
604 missing_bintool_list = []
605 image.check_missing_bintools(missing_bintool_list)
606 if missing_bintool_list:
608 "Image '%s' has missing bintools and is non-functional: %s" %
609 (image.name, ' '.join([os.path.basename(bintool.name)
610 for bintool in missing_bintool_list])))
611 return any([missing_list, faked_list, missing_bintool_list])
615 """The main control code for binman
617 This assumes that help and test options have already been dealt with. It
618 deals with the core task of building images.
621 args: Command line arguments Namespace object
627 tools.print_full_help(
628 os.path.join(os.path.dirname(os.path.realpath(sys.argv[0])), 'README.rst')
632 # Put these here so that we can import this module without libfdt
633 from binman.image import Image
634 from binman import state
636 if args.cmd in ['ls', 'extract', 'replace', 'tool']:
638 tout.init(args.verbosity)
639 tools.prepare_output_dir(None)
641 ListEntries(args.image, args.paths)
643 if args.cmd == 'extract':
644 ExtractEntries(args.image, args.filename, args.outdir, args.paths,
645 not args.uncompressed, args.format)
647 if args.cmd == 'replace':
648 ReplaceEntries(args.image, args.filename, args.indir, args.paths,
649 do_compress=not args.compressed,
650 allow_resize=not args.fix_size, write_map=args.map)
652 if args.cmd == 'tool':
653 tools.set_tool_paths(args.toolpath)
655 bintool.Bintool.list_all()
657 if not args.bintools:
659 "Please specify bintools to fetch or 'all' or 'missing'")
660 bintool.Bintool.fetch_tools(bintool.FETCH_ANY,
663 raise ValueError("Invalid arguments to 'tool' subcommand")
667 tools.finalise_output_dir()
671 if args.update_fdt_in_elf:
672 elf_params = args.update_fdt_in_elf.split(',')
673 if len(elf_params) != 4:
674 raise ValueError('Invalid args %s to --update-fdt-in-elf: expected infile,outfile,begin_sym,end_sym' %
677 # Try to figure out which device tree contains our image description
683 raise ValueError('Must provide a board to process (use -b <board>)')
684 board_pathname = os.path.join(args.build_dir, board)
685 dtb_fname = os.path.join(board_pathname, 'u-boot.dtb')
688 args.indir.append(board_pathname)
691 tout.init(args.verbosity)
692 elf.debug = args.debug
693 cbfs_util.VERBOSE = args.verbosity > 2
694 state.use_fake_dtb = args.fake_dtb
696 # Normally we replace the 'u-boot' etype with 'u-boot-expanded', etc.
697 # When running tests this can be disabled using this flag. When not
698 # updating the FDT in image, it is not needed by binman, but we use it
699 # for consistency, so that the images look the same to U-Boot at
701 use_expanded = not args.no_expanded
703 tools.set_input_dirs(args.indir)
704 tools.prepare_output_dir(args.outdir, args.preserve)
705 tools.set_tool_paths(args.toolpath)
706 state.SetEntryArgs(args.entry_arg)
707 state.SetThreads(args.threads)
709 images = PrepareImagesAndDtbs(dtb_fname, args.image,
710 args.update_fdt, use_expanded)
712 if args.test_section_timeout:
713 # Set the first image to timeout, used in testThreadTimeout()
714 images[list(images.keys())[0]].test_section_timeout = True
716 bintool.Bintool.set_missing_list(
717 args.force_missing_bintools.split(',') if
718 args.force_missing_bintools else None)
719 for image in images.values():
720 invalid |= ProcessImage(image, args.update_fdt, args.map,
721 allow_missing=args.allow_missing,
722 allow_fake_blobs=args.fake_ext_blobs)
724 # Write the updated FDTs to our output files
725 for dtb_item in state.GetAllFdts():
726 tools.write_file(dtb_item._fname, dtb_item.GetContents())
729 data = state.GetFdtForEtype('u-boot-dtb').GetContents()
730 elf.UpdateFile(*elf_params, data)
733 tout.warning("\nSome images are invalid")
735 # Use this to debug the time take to pack the image
738 tools.finalise_output_dir()