1 # SPDX-License-Identifier: GPL-2.0+
2 # Copyright (c) 2016 Google, Inc
4 # Base class for all entries
7 from collections import namedtuple
13 from binman import bintool
14 from binman import comp_util
15 from dtoc import fdt_util
16 from patman import tools
17 from patman.tools import to_hex, to_hex_size
18 from patman import tout
22 # This is imported if needed
25 # An argument which can be passed to entries on the command line, in lieu of
26 # device-tree properties.
27 EntryArg = namedtuple('EntryArg', ['name', 'datatype'])
29 # Information about an entry for use when displaying summaries
30 EntryInfo = namedtuple('EntryInfo', ['indent', 'name', 'etype', 'size',
31 'image_pos', 'uncomp_size', 'offset',
35 """An Entry in the section
37 An entry corresponds to a single node in the device-tree description
38 of the section. Each entry ends up being a part of the final section.
39 Entries can be placed either right next to each other, or with padding
40 between them. The type of the entry determines the data that is in it.
42 This class is not used by itself. All entry objects are subclasses of
46 section: Section object containing this entry
47 node: The node that created this entry
48 offset: Offset of entry within the section, None if not known yet (in
49 which case it will be calculated by Pack())
50 size: Entry size in bytes, None if not known
51 pre_reset_size: size as it was before ResetForPack(). This allows us to
52 keep track of the size we started with and detect size changes
53 uncomp_size: Size of uncompressed data in bytes, if the entry is
55 contents_size: Size of contents in bytes, 0 by default
56 align: Entry start offset alignment relative to the start of the
57 containing section, or None
58 align_size: Entry size alignment, or None
59 align_end: Entry end offset alignment relative to the start of the
60 containing section, or None
61 pad_before: Number of pad bytes before the contents when it is placed
62 in the containing section, 0 if none. The pad bytes become part of
64 pad_after: Number of pad bytes after the contents when it is placed in
65 the containing section, 0 if none. The pad bytes become part of
67 data: Contents of entry (string of bytes). This does not include
68 padding created by pad_before or pad_after. If the entry is
69 compressed, this contains the compressed data.
70 uncomp_data: Original uncompressed data, if this entry is compressed,
72 compress: Compression algoithm used (e.g. 'lz4'), 'none' if none
73 orig_offset: Original offset value read from node
74 orig_size: Original size value read from node
75 missing: True if this entry is missing its contents
76 allow_missing: Allow children of this entry to be missing (used by
77 subclasses such as Entry_section)
78 allow_fake: Allow creating a dummy fake file if the blob file is not
79 available. This is mainly used for testing.
80 external: True if this entry contains an external binary blob
81 bintools: Bintools used by this entry (only populated for Image)
82 missing_bintools: List of missing bintools for this entry
83 update_hash: True if this entry's "hash" subnode should be
84 updated with a hash of the entry contents
86 def __init__(self, section, etype, node, name_prefix=''):
87 # Put this here to allow entry-docs and help to work without libfdt
89 from binman import state
91 self.section = section
94 self.name = node and (name_prefix + node.name) or 'none'
97 self.pre_reset_size = None
98 self.uncomp_size = None
100 self.uncomp_data = None
101 self.contents_size = 0
103 self.align_size = None
104 self.align_end = None
107 self.offset_unset = False
108 self.image_pos = None
109 self.extend_size = False
110 self.compress = 'none'
113 self.external = False
114 self.allow_missing = False
115 self.allow_fake = False
117 self.missing_bintools = []
118 self.update_hash = True
121 def FindEntryClass(etype, expanded):
122 """Look up the entry class for a node.
125 node_node: Path name of Node object containing information about
126 the entry to create (used for errors)
127 etype: Entry type to use
128 expanded: Use the expanded version of etype
131 The entry class object if found, else None if not found and expanded
132 is True, else a tuple:
133 module name that could not be found
136 # Convert something like 'u-boot@0' to 'u_boot' since we are only
137 # interested in the type.
138 module_name = etype.replace('-', '_')
140 if '@' in module_name:
141 module_name = module_name.split('@')[0]
143 module_name += '_expanded'
144 module = modules.get(module_name)
146 # Also allow entry-type modules to be brought in from the etype directory.
148 # Import the module if we have not already done so.
151 module = importlib.import_module('binman.etype.' + module_name)
152 except ImportError as e:
155 return module_name, e
156 modules[module_name] = module
158 # Look up the expected class name
159 return getattr(module, 'Entry_%s' % module_name)
162 def Lookup(node_path, etype, expanded, missing_etype=False):
163 """Look up the entry class for a node.
166 node_node (str): Path name of Node object containing information
167 about the entry to create (used for errors)
168 etype (str): Entry type to use
169 expanded (bool): Use the expanded version of etype
170 missing_etype (bool): True to default to a blob etype if the
171 requested etype is not found
174 The entry class object if found, else None if not found and expanded
178 ValueError if expanded is False and the class is not found
180 # Convert something like 'u-boot@0' to 'u_boot' since we are only
181 # interested in the type.
182 cls = Entry.FindEntryClass(etype, expanded)
185 elif isinstance(cls, tuple):
187 cls = Entry.FindEntryClass('blob', False)
188 if isinstance(cls, tuple): # This should not fail
191 "Unknown entry type '%s' in node '%s' (expected etype/%s.py, error '%s'" %
192 (etype, node_path, module_name, e))
196 def Create(section, node, etype=None, expanded=False, missing_etype=False):
197 """Create a new entry for a node.
200 section (entry_Section): Section object containing this node
201 node (Node): Node object containing information about the entry to
203 etype (str): Entry type to use, or None to work it out (used for
205 expanded (bool): Use the expanded version of etype
206 missing_etype (bool): True to default to a blob etype if the
207 requested etype is not found
210 A new Entry object of the correct type (a subclass of Entry)
213 etype = fdt_util.GetString(node, 'type', node.name)
214 obj = Entry.Lookup(node.path, etype, expanded, missing_etype)
216 # Check whether to use the expanded entry
217 new_etype = etype + '-expanded'
218 can_expand = not fdt_util.GetBool(node, 'no-expanded')
219 if can_expand and obj.UseExpanded(node, etype, new_etype):
224 obj = Entry.Lookup(node.path, etype, False, missing_etype)
226 # Call its constructor to get the object we want.
227 return obj(section, etype, node)
230 """Read entry information from the node
232 This must be called as the first thing after the Entry is created.
234 This reads all the fields we recognise from the node, ready for use.
236 if 'pos' in self._node.props:
237 self.Raise("Please use 'offset' instead of 'pos'")
238 if 'expand-size' in self._node.props:
239 self.Raise("Please use 'extend-size' instead of 'expand-size'")
240 self.offset = fdt_util.GetInt(self._node, 'offset')
241 self.size = fdt_util.GetInt(self._node, 'size')
242 self.orig_offset = fdt_util.GetInt(self._node, 'orig-offset')
243 self.orig_size = fdt_util.GetInt(self._node, 'orig-size')
244 if self.GetImage().copy_to_orig:
245 self.orig_offset = self.offset
246 self.orig_size = self.size
248 # These should not be set in input files, but are set in an FDT map,
249 # which is also read by this code.
250 self.image_pos = fdt_util.GetInt(self._node, 'image-pos')
251 self.uncomp_size = fdt_util.GetInt(self._node, 'uncomp-size')
253 self.align = fdt_util.GetInt(self._node, 'align')
254 if tools.not_power_of_two(self.align):
255 raise ValueError("Node '%s': Alignment %s must be a power of two" %
256 (self._node.path, self.align))
257 if self.section and self.align is None:
258 self.align = self.section.align_default
259 self.pad_before = fdt_util.GetInt(self._node, 'pad-before', 0)
260 self.pad_after = fdt_util.GetInt(self._node, 'pad-after', 0)
261 self.align_size = fdt_util.GetInt(self._node, 'align-size')
262 if tools.not_power_of_two(self.align_size):
263 self.Raise("Alignment size %s must be a power of two" %
265 self.align_end = fdt_util.GetInt(self._node, 'align-end')
266 self.offset_unset = fdt_util.GetBool(self._node, 'offset-unset')
267 self.extend_size = fdt_util.GetBool(self._node, 'extend-size')
268 self.missing_msg = fdt_util.GetString(self._node, 'missing-msg')
270 # This is only supported by blobs and sections at present
271 self.compress = fdt_util.GetString(self._node, 'compress', 'none')
273 def GetDefaultFilename(self):
277 """Get the device trees used by this entry
280 Empty dict, if this entry is not a .dtb, otherwise:
282 key: Filename from this entry (without the path)
284 Entry object for this dtb
285 Filename of file containing this dtb
289 def gen_entries(self):
290 """Allow entries to generate other entries
292 Some entries generate subnodes automatically, from which sub-entries
293 are then created. This method allows those to be added to the binman
294 definition for the current image. An entry which implements this method
295 should call state.AddSubnode() to add a subnode and can add properties
296 with state.AddString(), etc.
298 An example is 'files', which produces a section containing a list of
303 def AddMissingProperties(self, have_image_pos):
304 """Add new properties to the device tree as needed for this entry
307 have_image_pos: True if this entry has an image position. This can
308 be False if its parent section is compressed, since compression
309 groups all entries together into a compressed block of data,
310 obscuring the start of each individual child entry
312 for prop in ['offset', 'size']:
313 if not prop in self._node.props:
314 state.AddZeroProp(self._node, prop)
315 if have_image_pos and 'image-pos' not in self._node.props:
316 state.AddZeroProp(self._node, 'image-pos')
317 if self.GetImage().allow_repack:
318 if self.orig_offset is not None:
319 state.AddZeroProp(self._node, 'orig-offset', True)
320 if self.orig_size is not None:
321 state.AddZeroProp(self._node, 'orig-size', True)
323 if self.compress != 'none':
324 state.AddZeroProp(self._node, 'uncomp-size')
327 err = state.CheckAddHashProp(self._node)
331 def SetCalculatedProperties(self):
332 """Set the value of device-tree properties calculated by binman"""
333 state.SetInt(self._node, 'offset', self.offset)
334 state.SetInt(self._node, 'size', self.size)
335 base = self.section.GetRootSkipAtStart() if self.section else 0
336 if self.image_pos is not None:
337 state.SetInt(self._node, 'image-pos', self.image_pos - base)
338 if self.GetImage().allow_repack:
339 if self.orig_offset is not None:
340 state.SetInt(self._node, 'orig-offset', self.orig_offset, True)
341 if self.orig_size is not None:
342 state.SetInt(self._node, 'orig-size', self.orig_size, True)
343 if self.uncomp_size is not None:
344 state.SetInt(self._node, 'uncomp-size', self.uncomp_size)
347 state.CheckSetHashValue(self._node, self.GetData)
349 def ProcessFdt(self, fdt):
350 """Allow entries to adjust the device tree
352 Some entries need to adjust the device tree for their purposes. This
353 may involve adding or deleting properties.
356 True if processing is complete
357 False if processing could not be completed due to a dependency.
358 This will cause the entry to be retried after others have been
363 def SetPrefix(self, prefix):
364 """Set the name prefix for a node
367 prefix: Prefix to set, or '' to not use a prefix
370 self.name = prefix + self.name
372 def SetContents(self, data):
373 """Set the contents of an entry
375 This sets both the data and content_size properties
378 data: Data to set to the contents (bytes)
381 self.contents_size = len(self.data)
383 def ProcessContentsUpdate(self, data):
384 """Update the contents of an entry, after the size is fixed
386 This checks that the new data is the same size as the old. If the size
387 has changed, this triggers a re-run of the packing algorithm.
390 data: Data to set to the contents (bytes)
393 ValueError if the new data size is not the same as the old
397 if state.AllowEntryExpansion() and new_size > self.contents_size:
398 # self.data will indicate the new size needed
400 elif state.AllowEntryContraction() and new_size < self.contents_size:
403 # If not allowed to change, try to deal with it or give up
405 if new_size > self.contents_size:
406 self.Raise('Cannot update entry size from %d to %d' %
407 (self.contents_size, new_size))
409 # Don't let the data shrink. Pad it if necessary
410 if size_ok and new_size < self.contents_size:
411 data += tools.get_bytes(0, self.contents_size - new_size)
414 tout.debug("Entry '%s' size change from %s to %s" % (
415 self._node.path, to_hex(self.contents_size),
417 self.SetContents(data)
420 def ObtainContents(self, skip_entry=None, fake_size=0):
421 """Figure out the contents of an entry.
424 skip_entry (Entry): Entry to skip when obtaining section contents
425 fake_size (int): Size of fake file to create if needed
428 True if the contents were found, False if another call is needed
429 after the other entries are processed.
431 # No contents by default: subclasses can implement this
434 def ResetForPack(self):
435 """Reset offset/size fields so that packing can be done again"""
436 self.Detail('ResetForPack: offset %s->%s, size %s->%s' %
437 (to_hex(self.offset), to_hex(self.orig_offset),
438 to_hex(self.size), to_hex(self.orig_size)))
439 self.pre_reset_size = self.size
440 self.offset = self.orig_offset
441 self.size = self.orig_size
443 def Pack(self, offset):
444 """Figure out how to pack the entry into the section
446 Most of the time the entries are not fully specified. There may be
447 an alignment but no size. In that case we take the size from the
448 contents of the entry.
450 If an entry has no hard-coded offset, it will be placed at @offset.
452 Once this function is complete, both the offset and size of the
456 Current section offset pointer
459 New section offset pointer (after this entry)
461 self.Detail('Packing: offset=%s, size=%s, content_size=%x' %
462 (to_hex(self.offset), to_hex(self.size),
464 if self.offset is None:
465 if self.offset_unset:
466 self.Raise('No offset set with offset-unset: should another '
467 'entry provide this correct offset?')
468 self.offset = tools.align(offset, self.align)
469 needed = self.pad_before + self.contents_size + self.pad_after
470 needed = tools.align(needed, self.align_size)
474 new_offset = self.offset + size
475 aligned_offset = tools.align(new_offset, self.align_end)
476 if aligned_offset != new_offset:
477 size = aligned_offset - self.offset
478 new_offset = aligned_offset
483 if self.size < needed:
484 self.Raise("Entry contents size is %#x (%d) but entry size is "
485 "%#x (%d)" % (needed, needed, self.size, self.size))
486 # Check that the alignment is correct. It could be wrong if the
487 # and offset or size values were provided (i.e. not calculated), but
488 # conflict with the provided alignment values
489 if self.size != tools.align(self.size, self.align_size):
490 self.Raise("Size %#x (%d) does not match align-size %#x (%d)" %
491 (self.size, self.size, self.align_size, self.align_size))
492 if self.offset != tools.align(self.offset, self.align):
493 self.Raise("Offset %#x (%d) does not match align %#x (%d)" %
494 (self.offset, self.offset, self.align, self.align))
495 self.Detail(' - packed: offset=%#x, size=%#x, content_size=%#x, next_offset=%x' %
496 (self.offset, self.size, self.contents_size, new_offset))
500 def Raise(self, msg):
501 """Convenience function to raise an error referencing a node"""
502 raise ValueError("Node '%s': %s" % (self._node.path, msg))
505 """Convenience function to log info referencing a node"""
506 tag = "Info '%s'" % self._node.path
507 tout.detail('%30s: %s' % (tag, msg))
509 def Detail(self, msg):
510 """Convenience function to log detail referencing a node"""
511 tag = "Node '%s'" % self._node.path
512 tout.detail('%30s: %s' % (tag, msg))
514 def GetEntryArgsOrProps(self, props, required=False):
515 """Return the values of a set of properties
518 props: List of EntryArg objects
521 ValueError if a property is not found
526 python_prop = prop.name.replace('-', '_')
527 if hasattr(self, python_prop):
528 value = getattr(self, python_prop)
532 value = self.GetArg(prop.name, prop.datatype)
533 if value is None and required:
534 missing.append(prop.name)
537 self.GetImage().MissingArgs(self, missing)
541 """Get the path of a node
544 Full path of the node for this entry
546 return self._node.path
548 def GetData(self, required=True):
549 """Get the contents of an entry
552 required: True if the data must be present, False if it is OK to
556 bytes content of the entry, excluding any padding. If the entry is
557 compressed, the compressed data is returned
559 self.Detail('GetData: size %s' % to_hex_size(self.data))
562 def GetPaddedData(self, data=None):
563 """Get the data for an entry including any padding
565 Gets the entry data and uses its section's pad-byte value to add padding
566 before and after as defined by the pad-before and pad-after properties.
568 This does not consider alignment.
571 Contents of the entry along with any pad bytes before and
575 data = self.GetData()
576 return self.section.GetPaddedDataForEntry(self, data)
578 def GetOffsets(self):
579 """Get the offsets for siblings
581 Some entry types can contain information about the position or size of
582 other entries. An example of this is the Intel Flash Descriptor, which
583 knows where the Intel Management Engine section should go.
585 If this entry knows about the position of other entries, it can specify
586 this by returning values here
591 value: List containing position and size of the given entry
592 type. Either can be None if not known
596 def SetOffsetSize(self, offset, size):
597 """Set the offset and/or size of an entry
600 offset: New offset, or None to leave alone
601 size: New size, or None to leave alone
603 if offset is not None:
608 def SetImagePos(self, image_pos):
609 """Set the position in the image
612 image_pos: Position of this entry in the image
614 self.image_pos = image_pos + self.offset
616 def ProcessContents(self):
617 """Do any post-packing updates of entry contents
619 This function should call ProcessContentsUpdate() to update the entry
620 contents, if necessary, returning its return value here.
623 data: Data to set to the contents (bytes)
626 True if the new data size is OK, False if expansion is needed
629 ValueError if the new data size is not the same as the old and
630 state.AllowEntryExpansion() is False
634 def WriteSymbols(self, section):
635 """Write symbol values into binary files for access at run time
638 section: Section containing the entry
642 def CheckEntries(self):
643 """Check that the entry offsets are correct
645 This is used for entries which have extra offset requirements (other
646 than having to be fully inside their section). Sub-classes can implement
647 this function and raise if there is a problem.
655 return '%08x' % value
658 def WriteMapLine(fd, indent, name, offset, size, image_pos):
659 print('%s %s%s %s %s' % (Entry.GetStr(image_pos), ' ' * indent,
660 Entry.GetStr(offset), Entry.GetStr(size),
663 def WriteMap(self, fd, indent):
664 """Write a map of the entry to a .map file
667 fd: File to write the map to
668 indent: Curent indent level of map (0=none, 1=one level, etc.)
670 self.WriteMapLine(fd, indent, self.name, self.offset, self.size,
673 def GetEntries(self):
674 """Return a list of entries contained by this entry
677 List of entries, or None if none. A normal entry has no entries
678 within it so will return None
682 def GetArg(self, name, datatype=str):
683 """Get the value of an entry argument or device-tree-node property
685 Some node properties can be provided as arguments to binman. First check
686 the entry arguments, and fall back to the device tree if not found
690 datatype: Data type (str or int)
693 Value of argument as a string or int, or None if no value
696 ValueError if the argument cannot be converted to in
698 value = state.GetEntryArg(name)
699 if value is not None:
704 self.Raise("Cannot convert entry arg '%s' (value '%s') to integer" %
706 elif datatype == str:
709 raise ValueError("GetArg() internal error: Unknown data type '%s'" %
712 value = fdt_util.GetDatatype(self._node, name, datatype)
716 def WriteDocs(modules, test_missing=None):
717 """Write out documentation about the various entry types to stdout
720 modules: List of modules to include
721 test_missing: Used for testing. This is a module to report
724 print('''Binman Entry Documentation
725 ===========================
727 This file describes the entry types supported by binman. These entry types can
728 be placed in an image one by one to build up a final firmware image. It is
729 fairly easy to create new entry types. Just add a new file to the 'etype'
730 directory. You can use the existing entries as examples.
732 Note that some entries are subclasses of others, using and extending their
733 features to produce new behaviours.
737 modules = sorted(modules)
739 # Don't show the test entry
740 if '_testing' in modules:
741 modules.remove('_testing')
744 module = Entry.Lookup('WriteDocs', name, False)
745 docs = getattr(module, '__doc__')
746 if test_missing == name:
749 lines = docs.splitlines()
750 first_line = lines[0]
751 rest = [line[4:] for line in lines[1:]]
752 hdr = 'Entry: %s: %s' % (name.replace('_', '-'), first_line)
754 print('-' * len(hdr))
755 print('\n'.join(rest))
762 raise ValueError('Documentation is missing for modules: %s' %
765 def GetUniqueName(self):
766 """Get a unique name for a node
769 String containing a unique name for a node, consisting of the name
770 of all ancestors (starting from within the 'binman' node) separated
771 by a dot ('.'). This can be useful for generating unique filesnames
772 in the output directory.
778 if node.name == 'binman':
780 name = '%s.%s' % (node.name, name)
783 def extend_to_limit(self, limit):
784 """Extend an entry so that it ends at the given offset limit"""
785 if self.offset + self.size < limit:
786 self.size = limit - self.offset
787 # Request the contents again, since changing the size requires that
788 # the data grows. This should not fail, but check it to be sure.
789 if not self.ObtainContents():
790 self.Raise('Cannot obtain contents when expanding entry')
792 def HasSibling(self, name):
793 """Check if there is a sibling of a given name
796 True if there is an entry with this name in the the same section,
799 return name in self.section.GetEntries()
801 def GetSiblingImagePos(self, name):
802 """Return the image position of the given sibling
805 Image position of sibling, or None if the sibling has no position,
806 or False if there is no such sibling
808 if not self.HasSibling(name):
810 return self.section.GetEntries()[name].image_pos
813 def AddEntryInfo(entries, indent, name, etype, size, image_pos,
814 uncomp_size, offset, entry):
815 """Add a new entry to the entries list
818 entries: List (of EntryInfo objects) to add to
819 indent: Current indent level to add to list
820 name: Entry name (string)
821 etype: Entry type (string)
822 size: Entry size in bytes (int)
823 image_pos: Position within image in bytes (int)
824 uncomp_size: Uncompressed size if the entry uses compression, else
826 offset: Entry offset within parent in bytes (int)
829 entries.append(EntryInfo(indent, name, etype, size, image_pos,
830 uncomp_size, offset, entry))
832 def ListEntries(self, entries, indent):
833 """Add files in this entry to the list of entries
835 This can be overridden by subclasses which need different behaviour.
838 entries: List (of EntryInfo objects) to add to
839 indent: Current indent level to add to list
841 self.AddEntryInfo(entries, indent, self.name, self.etype, self.size,
842 self.image_pos, self.uncomp_size, self.offset, self)
844 def ReadData(self, decomp=True, alt_format=None):
845 """Read the data for an entry from the image
847 This is used when the image has been read in and we want to extract the
848 data for a particular entry from that image.
851 decomp: True to decompress any compressed data before returning it;
852 False to return the raw, uncompressed data
857 # Use True here so that we get an uncompressed section to work from,
858 # although compressed sections are currently not supported
859 tout.debug("ReadChildData section '%s', entry '%s'" %
860 (self.section.GetPath(), self.GetPath()))
861 data = self.section.ReadChildData(self, decomp, alt_format)
864 def ReadChildData(self, child, decomp=True, alt_format=None):
865 """Read the data for a particular child entry
867 This reads data from the parent and extracts the piece that relates to
871 child (Entry): Child entry to read data for (must be valid)
872 decomp (bool): True to decompress any compressed data before
873 returning it; False to return the raw, uncompressed data
874 alt_format (str): Alternative format to read in, or None
877 Data for the child (bytes)
881 def LoadData(self, decomp=True):
882 data = self.ReadData(decomp)
883 self.contents_size = len(data)
884 self.ProcessContentsUpdate(data)
885 self.Detail('Loaded data size %x' % len(data))
887 def GetAltFormat(self, data, alt_format):
888 """Read the data for an extry in an alternative format
890 Supported formats are list in the documentation for each entry. An
891 example is fdtmap which provides .
894 data (bytes): Data to convert (this should have been produced by the
896 alt_format (str): Format to use
902 """Get the image containing this entry
905 Image object containing this entry
907 return self.section.GetImage()
909 def WriteData(self, data, decomp=True):
910 """Write the data to an entry in the image
912 This is used when the image has been read in and we want to replace the
913 data for a particular entry in that image.
915 The image must be re-packed and written out afterwards.
918 data: Data to replace it with
919 decomp: True to compress the data if needed, False if data is
920 already compressed so should be used as is
923 True if the data did not result in a resize of this entry, False if
924 the entry must be resized
926 if self.size is not None:
927 self.contents_size = self.size
929 self.contents_size = self.pre_reset_size
930 ok = self.ProcessContentsUpdate(data)
931 self.Detail('WriteData: size=%x, ok=%s' % (len(data), ok))
932 section_ok = self.section.WriteChildData(self)
933 return ok and section_ok
935 def WriteChildData(self, child):
936 """Handle writing the data in a child entry
938 This should be called on the child's parent section after the child's
939 data has been updated. It should update any data structures needed to
940 validate that the update is successful.
942 This base-class implementation does nothing, since the base Entry object
943 does not have any children.
946 child: Child Entry that was written
949 True if the section could be updated successfully, False if the
950 data is such that the section could not update
954 def GetSiblingOrder(self):
955 """Get the relative order of an entry amoung its siblings
958 'start' if this entry is first among siblings, 'end' if last,
961 entries = list(self.section.GetEntries().values())
963 if self == entries[0]:
965 elif self == entries[-1]:
969 def SetAllowMissing(self, allow_missing):
970 """Set whether a section allows missing external blobs
973 allow_missing: True if allowed, False if not allowed
975 # This is meaningless for anything other than sections
978 def SetAllowFakeBlob(self, allow_fake):
979 """Set whether a section allows to create a fake blob
982 allow_fake: True if allowed, False if not allowed
984 self.allow_fake = allow_fake
986 def CheckMissing(self, missing_list):
987 """Check if any entries in this section have missing external blobs
989 If there are missing blobs, the entries are added to the list
992 missing_list: List of Entry objects to be added to
995 missing_list.append(self)
997 def check_fake_fname(self, fname, size=0):
998 """If the file is missing and the entry allows fake blobs, fake it
1000 Sets self.faked to True if faked
1003 fname (str): Filename to check
1004 size (int): Size of fake file to create
1008 fname (str): Filename of faked file
1009 bool: True if the blob was faked, False if not
1011 if self.allow_fake and not pathlib.Path(fname).is_file():
1012 outfname = tools.get_output_filename(os.path.basename(fname))
1013 with open(outfname, "wb") as out:
1016 tout.info(f"Entry '{self._node.path}': Faked file '{outfname}'")
1017 return outfname, True
1020 def CheckFakedBlobs(self, faked_blobs_list):
1021 """Check if any entries in this section have faked external blobs
1023 If there are faked blobs, the entries are added to the list
1026 fake_blobs_list: List of Entry objects to be added to
1028 # This is meaningless for anything other than blobs
1031 def GetAllowMissing(self):
1032 """Get whether a section allows missing external blobs
1035 True if allowed, False if not allowed
1037 return self.allow_missing
1039 def record_missing_bintool(self, bintool):
1040 """Record a missing bintool that was needed to produce this entry
1043 bintool (Bintool): Bintool that was missing
1045 self.missing_bintools.append(bintool)
1047 def check_missing_bintools(self, missing_list):
1048 """Check if any entries in this section have missing bintools
1050 If there are missing bintools, these are added to the list
1053 missing_list: List of Bintool objects to be added to
1055 missing_list += self.missing_bintools
1057 def GetHelpTags(self):
1058 """Get the tags use for missing-blob help
1061 list of possible tags, most desirable first
1063 return list(filter(None, [self.missing_msg, self.name, self.etype]))
1065 def CompressData(self, indata):
1066 """Compress data according to the entry's compression method
1069 indata: Data to compress
1072 Compressed data (first word is the compressed size)
1074 self.uncomp_data = indata
1075 if self.compress != 'none':
1076 self.uncomp_size = len(indata)
1077 data = comp_util.compress(indata, self.compress)
1081 def UseExpanded(cls, node, etype, new_etype):
1082 """Check whether to use an expanded entry type
1084 This is called by Entry.Create() when it finds an expanded version of
1085 an entry type (e.g. 'u-boot-expanded'). If this method returns True then
1086 it will be used (e.g. in place of 'u-boot'). If it returns False, it is
1090 node: Node object containing information about the entry to
1092 etype: Original entry type being used
1093 new_etype: New entry type proposed
1096 True to use this entry type, False to use the original one
1098 tout.info("Node '%s': etype '%s': %s selected" %
1099 (node.path, etype, new_etype))
1102 def CheckAltFormats(self, alt_formats):
1103 """Add any alternative formats supported by this entry type
1106 alt_formats (dict): Dict to add alt_formats to:
1107 key: Name of alt format
1112 def AddBintools(self, btools):
1113 """Add the bintools used by this entry type
1116 btools (dict of Bintool):
1121 def AddBintool(self, tools, name):
1122 """Add a new bintool to the tools used by this etype
1125 name: Name of the tool
1127 btool = bintool.Bintool.create(name)
1131 def SetUpdateHash(self, update_hash):
1132 """Set whether this entry's "hash" subnode should be updated
1135 update_hash: True if hash should be updated, False if not
1137 self.update_hash = update_hash
1139 def collect_contents_to_file(self, entries, prefix, fake_size=0):
1140 """Put the contents of a list of entries into a file
1143 entries (list of Entry): Entries to collect
1144 prefix (str): Filename prefix of file to write to
1145 fake_size (int): Size of fake file to create if needed
1147 If any entry does not have contents yet, this function returns False
1152 bytes: Concatenated data from all the entries (or None)
1153 str: Filename of file written (or None if no data)
1154 str: Unique portion of filename (or None if no data)
1157 for entry in entries:
1158 # First get the input data and put it in a file. If not available,
1160 if not entry.ObtainContents(fake_size=fake_size):
1161 return None, None, None
1162 data += entry.GetData()
1163 uniq = self.GetUniqueName()
1164 fname = tools.get_output_filename(f'{prefix}.{uniq}')
1165 tools.write_file(fname, data)
1166 return data, fname, uniq