1 # SPDX-License-Identifier: GPL-2.0+
2 # Copyright (c) 2016 Google, Inc
4 # Base class for all entries
7 from collections import namedtuple
13 from binman import bintool
14 from binman import comp_util
15 from dtoc import fdt_util
16 from patman import tools
17 from patman.tools import to_hex, to_hex_size
18 from patman import tout
22 # This is imported if needed
25 # An argument which can be passed to entries on the command line, in lieu of
26 # device-tree properties.
27 EntryArg = namedtuple('EntryArg', ['name', 'datatype'])
29 # Information about an entry for use when displaying summaries
30 EntryInfo = namedtuple('EntryInfo', ['indent', 'name', 'etype', 'size',
31 'image_pos', 'uncomp_size', 'offset',
35 """An Entry in the section
37 An entry corresponds to a single node in the device-tree description
38 of the section. Each entry ends up being a part of the final section.
39 Entries can be placed either right next to each other, or with padding
40 between them. The type of the entry determines the data that is in it.
42 This class is not used by itself. All entry objects are subclasses of
46 section: Section object containing this entry
47 node: The node that created this entry
48 offset: Offset of entry within the section, None if not known yet (in
49 which case it will be calculated by Pack())
50 size: Entry size in bytes, None if not known
51 pre_reset_size: size as it was before ResetForPack(). This allows us to
52 keep track of the size we started with and detect size changes
53 uncomp_size: Size of uncompressed data in bytes, if the entry is
55 contents_size: Size of contents in bytes, 0 by default
56 align: Entry start offset alignment relative to the start of the
57 containing section, or None
58 align_size: Entry size alignment, or None
59 align_end: Entry end offset alignment relative to the start of the
60 containing section, or None
61 pad_before: Number of pad bytes before the contents when it is placed
62 in the containing section, 0 if none. The pad bytes become part of
64 pad_after: Number of pad bytes after the contents when it is placed in
65 the containing section, 0 if none. The pad bytes become part of
67 data: Contents of entry (string of bytes). This does not include
68 padding created by pad_before or pad_after. If the entry is
69 compressed, this contains the compressed data.
70 uncomp_data: Original uncompressed data, if this entry is compressed,
72 compress: Compression algoithm used (e.g. 'lz4'), 'none' if none
73 orig_offset: Original offset value read from node
74 orig_size: Original size value read from node
75 missing: True if this entry is missing its contents
76 allow_missing: Allow children of this entry to be missing (used by
77 subclasses such as Entry_section)
78 allow_fake: Allow creating a dummy fake file if the blob file is not
79 available. This is mainly used for testing.
80 external: True if this entry contains an external binary blob
81 bintools: Bintools used by this entry (only populated for Image)
82 missing_bintools: List of missing bintools for this entry
83 update_hash: True if this entry's "hash" subnode should be
84 updated with a hash of the entry contents
86 def __init__(self, section, etype, node, name_prefix=''):
87 # Put this here to allow entry-docs and help to work without libfdt
89 from binman import state
91 self.section = section
94 self.name = node and (name_prefix + node.name) or 'none'
97 self.pre_reset_size = None
98 self.uncomp_size = None
100 self.uncomp_data = None
101 self.contents_size = 0
103 self.align_size = None
104 self.align_end = None
107 self.offset_unset = False
108 self.image_pos = None
109 self.extend_size = False
110 self.compress = 'none'
113 self.external = False
114 self.allow_missing = False
115 self.allow_fake = False
117 self.missing_bintools = []
118 self.update_hash = True
121 def FindEntryClass(etype, expanded):
122 """Look up the entry class for a node.
125 node_node: Path name of Node object containing information about
126 the entry to create (used for errors)
127 etype: Entry type to use
128 expanded: Use the expanded version of etype
131 The entry class object if found, else None if not found and expanded
132 is True, else a tuple:
133 module name that could not be found
136 # Convert something like 'u-boot@0' to 'u_boot' since we are only
137 # interested in the type.
138 module_name = etype.replace('-', '_')
140 if '@' in module_name:
141 module_name = module_name.split('@')[0]
143 module_name += '_expanded'
144 module = modules.get(module_name)
146 # Also allow entry-type modules to be brought in from the etype directory.
148 # Import the module if we have not already done so.
151 module = importlib.import_module('binman.etype.' + module_name)
152 except ImportError as e:
155 return module_name, e
156 modules[module_name] = module
158 # Look up the expected class name
159 return getattr(module, 'Entry_%s' % module_name)
162 def Lookup(node_path, etype, expanded, missing_etype=False):
163 """Look up the entry class for a node.
166 node_node (str): Path name of Node object containing information
167 about the entry to create (used for errors)
168 etype (str): Entry type to use
169 expanded (bool): Use the expanded version of etype
170 missing_etype (bool): True to default to a blob etype if the
171 requested etype is not found
174 The entry class object if found, else None if not found and expanded
178 ValueError if expanded is False and the class is not found
180 # Convert something like 'u-boot@0' to 'u_boot' since we are only
181 # interested in the type.
182 cls = Entry.FindEntryClass(etype, expanded)
185 elif isinstance(cls, tuple):
187 cls = Entry.FindEntryClass('blob', False)
188 if isinstance(cls, tuple): # This should not fail
191 "Unknown entry type '%s' in node '%s' (expected etype/%s.py, error '%s'" %
192 (etype, node_path, module_name, e))
196 def Create(section, node, etype=None, expanded=False, missing_etype=False):
197 """Create a new entry for a node.
200 section (entry_Section): Section object containing this node
201 node (Node): Node object containing information about the entry to
203 etype (str): Entry type to use, or None to work it out (used for
205 expanded (bool): Use the expanded version of etype
206 missing_etype (bool): True to default to a blob etype if the
207 requested etype is not found
210 A new Entry object of the correct type (a subclass of Entry)
213 etype = fdt_util.GetString(node, 'type', node.name)
214 obj = Entry.Lookup(node.path, etype, expanded, missing_etype)
216 # Check whether to use the expanded entry
217 new_etype = etype + '-expanded'
218 can_expand = not fdt_util.GetBool(node, 'no-expanded')
219 if can_expand and obj.UseExpanded(node, etype, new_etype):
224 obj = Entry.Lookup(node.path, etype, False, missing_etype)
226 # Call its constructor to get the object we want.
227 return obj(section, etype, node)
230 """Read entry information from the node
232 This must be called as the first thing after the Entry is created.
234 This reads all the fields we recognise from the node, ready for use.
236 if 'pos' in self._node.props:
237 self.Raise("Please use 'offset' instead of 'pos'")
238 if 'expand-size' in self._node.props:
239 self.Raise("Please use 'extend-size' instead of 'expand-size'")
240 self.offset = fdt_util.GetInt(self._node, 'offset')
241 self.size = fdt_util.GetInt(self._node, 'size')
242 self.orig_offset = fdt_util.GetInt(self._node, 'orig-offset')
243 self.orig_size = fdt_util.GetInt(self._node, 'orig-size')
244 if self.GetImage().copy_to_orig:
245 self.orig_offset = self.offset
246 self.orig_size = self.size
248 # These should not be set in input files, but are set in an FDT map,
249 # which is also read by this code.
250 self.image_pos = fdt_util.GetInt(self._node, 'image-pos')
251 self.uncomp_size = fdt_util.GetInt(self._node, 'uncomp-size')
253 self.align = fdt_util.GetInt(self._node, 'align')
254 if tools.not_power_of_two(self.align):
255 raise ValueError("Node '%s': Alignment %s must be a power of two" %
256 (self._node.path, self.align))
257 if self.section and self.align is None:
258 self.align = self.section.align_default
259 self.pad_before = fdt_util.GetInt(self._node, 'pad-before', 0)
260 self.pad_after = fdt_util.GetInt(self._node, 'pad-after', 0)
261 self.align_size = fdt_util.GetInt(self._node, 'align-size')
262 if tools.not_power_of_two(self.align_size):
263 self.Raise("Alignment size %s must be a power of two" %
265 self.align_end = fdt_util.GetInt(self._node, 'align-end')
266 self.offset_unset = fdt_util.GetBool(self._node, 'offset-unset')
267 self.extend_size = fdt_util.GetBool(self._node, 'extend-size')
268 self.missing_msg = fdt_util.GetString(self._node, 'missing-msg')
270 # This is only supported by blobs and sections at present
271 self.compress = fdt_util.GetString(self._node, 'compress', 'none')
273 def GetDefaultFilename(self):
277 """Get the device trees used by this entry
280 Empty dict, if this entry is not a .dtb, otherwise:
282 key: Filename from this entry (without the path)
284 Entry object for this dtb
285 Filename of file containing this dtb
289 def ExpandEntries(self):
290 """Expand out entries which produce other entries
292 Some entries generate subnodes automatically, from which sub-entries
293 are then created. This method allows those to be added to the binman
294 definition for the current image. An entry which implements this method
295 should call state.AddSubnode() to add a subnode and can add properties
296 with state.AddString(), etc.
298 An example is 'files', which produces a section containing a list of
303 def AddMissingProperties(self, have_image_pos):
304 """Add new properties to the device tree as needed for this entry
307 have_image_pos: True if this entry has an image position. This can
308 be False if its parent section is compressed, since compression
309 groups all entries together into a compressed block of data,
310 obscuring the start of each individual child entry
312 for prop in ['offset', 'size']:
313 if not prop in self._node.props:
314 state.AddZeroProp(self._node, prop)
315 if have_image_pos and 'image-pos' not in self._node.props:
316 state.AddZeroProp(self._node, 'image-pos')
317 if self.GetImage().allow_repack:
318 if self.orig_offset is not None:
319 state.AddZeroProp(self._node, 'orig-offset', True)
320 if self.orig_size is not None:
321 state.AddZeroProp(self._node, 'orig-size', True)
323 if self.compress != 'none':
324 state.AddZeroProp(self._node, 'uncomp-size')
327 err = state.CheckAddHashProp(self._node)
331 def SetCalculatedProperties(self):
332 """Set the value of device-tree properties calculated by binman"""
333 state.SetInt(self._node, 'offset', self.offset)
334 state.SetInt(self._node, 'size', self.size)
335 base = self.section.GetRootSkipAtStart() if self.section else 0
336 if self.image_pos is not None:
337 state.SetInt(self._node, 'image-pos', self.image_pos - base)
338 if self.GetImage().allow_repack:
339 if self.orig_offset is not None:
340 state.SetInt(self._node, 'orig-offset', self.orig_offset, True)
341 if self.orig_size is not None:
342 state.SetInt(self._node, 'orig-size', self.orig_size, True)
343 if self.uncomp_size is not None:
344 state.SetInt(self._node, 'uncomp-size', self.uncomp_size)
347 state.CheckSetHashValue(self._node, self.GetData)
349 def ProcessFdt(self, fdt):
350 """Allow entries to adjust the device tree
352 Some entries need to adjust the device tree for their purposes. This
353 may involve adding or deleting properties.
356 True if processing is complete
357 False if processing could not be completed due to a dependency.
358 This will cause the entry to be retried after others have been
363 def SetPrefix(self, prefix):
364 """Set the name prefix for a node
367 prefix: Prefix to set, or '' to not use a prefix
370 self.name = prefix + self.name
372 def SetContents(self, data):
373 """Set the contents of an entry
375 This sets both the data and content_size properties
378 data: Data to set to the contents (bytes)
381 self.contents_size = len(self.data)
383 def ProcessContentsUpdate(self, data):
384 """Update the contents of an entry, after the size is fixed
386 This checks that the new data is the same size as the old. If the size
387 has changed, this triggers a re-run of the packing algorithm.
390 data: Data to set to the contents (bytes)
393 ValueError if the new data size is not the same as the old
397 if state.AllowEntryExpansion() and new_size > self.contents_size:
398 # self.data will indicate the new size needed
400 elif state.AllowEntryContraction() and new_size < self.contents_size:
403 # If not allowed to change, try to deal with it or give up
405 if new_size > self.contents_size:
406 self.Raise('Cannot update entry size from %d to %d' %
407 (self.contents_size, new_size))
409 # Don't let the data shrink. Pad it if necessary
410 if size_ok and new_size < self.contents_size:
411 data += tools.get_bytes(0, self.contents_size - new_size)
414 tout.debug("Entry '%s' size change from %s to %s" % (
415 self._node.path, to_hex(self.contents_size),
417 self.SetContents(data)
420 def ObtainContents(self):
421 """Figure out the contents of an entry.
424 True if the contents were found, False if another call is needed
425 after the other entries are processed.
427 # No contents by default: subclasses can implement this
430 def ResetForPack(self):
431 """Reset offset/size fields so that packing can be done again"""
432 self.Detail('ResetForPack: offset %s->%s, size %s->%s' %
433 (to_hex(self.offset), to_hex(self.orig_offset),
434 to_hex(self.size), to_hex(self.orig_size)))
435 self.pre_reset_size = self.size
436 self.offset = self.orig_offset
437 self.size = self.orig_size
439 def Pack(self, offset):
440 """Figure out how to pack the entry into the section
442 Most of the time the entries are not fully specified. There may be
443 an alignment but no size. In that case we take the size from the
444 contents of the entry.
446 If an entry has no hard-coded offset, it will be placed at @offset.
448 Once this function is complete, both the offset and size of the
452 Current section offset pointer
455 New section offset pointer (after this entry)
457 self.Detail('Packing: offset=%s, size=%s, content_size=%x' %
458 (to_hex(self.offset), to_hex(self.size),
460 if self.offset is None:
461 if self.offset_unset:
462 self.Raise('No offset set with offset-unset: should another '
463 'entry provide this correct offset?')
464 self.offset = tools.align(offset, self.align)
465 needed = self.pad_before + self.contents_size + self.pad_after
466 needed = tools.align(needed, self.align_size)
470 new_offset = self.offset + size
471 aligned_offset = tools.align(new_offset, self.align_end)
472 if aligned_offset != new_offset:
473 size = aligned_offset - self.offset
474 new_offset = aligned_offset
479 if self.size < needed:
480 self.Raise("Entry contents size is %#x (%d) but entry size is "
481 "%#x (%d)" % (needed, needed, self.size, self.size))
482 # Check that the alignment is correct. It could be wrong if the
483 # and offset or size values were provided (i.e. not calculated), but
484 # conflict with the provided alignment values
485 if self.size != tools.align(self.size, self.align_size):
486 self.Raise("Size %#x (%d) does not match align-size %#x (%d)" %
487 (self.size, self.size, self.align_size, self.align_size))
488 if self.offset != tools.align(self.offset, self.align):
489 self.Raise("Offset %#x (%d) does not match align %#x (%d)" %
490 (self.offset, self.offset, self.align, self.align))
491 self.Detail(' - packed: offset=%#x, size=%#x, content_size=%#x, next_offset=%x' %
492 (self.offset, self.size, self.contents_size, new_offset))
496 def Raise(self, msg):
497 """Convenience function to raise an error referencing a node"""
498 raise ValueError("Node '%s': %s" % (self._node.path, msg))
501 """Convenience function to log info referencing a node"""
502 tag = "Info '%s'" % self._node.path
503 tout.detail('%30s: %s' % (tag, msg))
505 def Detail(self, msg):
506 """Convenience function to log detail referencing a node"""
507 tag = "Node '%s'" % self._node.path
508 tout.detail('%30s: %s' % (tag, msg))
510 def GetEntryArgsOrProps(self, props, required=False):
511 """Return the values of a set of properties
514 props: List of EntryArg objects
517 ValueError if a property is not found
522 python_prop = prop.name.replace('-', '_')
523 if hasattr(self, python_prop):
524 value = getattr(self, python_prop)
528 value = self.GetArg(prop.name, prop.datatype)
529 if value is None and required:
530 missing.append(prop.name)
533 self.GetImage().MissingArgs(self, missing)
537 """Get the path of a node
540 Full path of the node for this entry
542 return self._node.path
544 def GetData(self, required=True):
545 """Get the contents of an entry
548 required: True if the data must be present, False if it is OK to
552 bytes content of the entry, excluding any padding. If the entry is
553 compressed, the compressed data is returned
555 self.Detail('GetData: size %s' % to_hex_size(self.data))
558 def GetPaddedData(self, data=None):
559 """Get the data for an entry including any padding
561 Gets the entry data and uses its section's pad-byte value to add padding
562 before and after as defined by the pad-before and pad-after properties.
564 This does not consider alignment.
567 Contents of the entry along with any pad bytes before and
571 data = self.GetData()
572 return self.section.GetPaddedDataForEntry(self, data)
574 def GetOffsets(self):
575 """Get the offsets for siblings
577 Some entry types can contain information about the position or size of
578 other entries. An example of this is the Intel Flash Descriptor, which
579 knows where the Intel Management Engine section should go.
581 If this entry knows about the position of other entries, it can specify
582 this by returning values here
587 value: List containing position and size of the given entry
588 type. Either can be None if not known
592 def SetOffsetSize(self, offset, size):
593 """Set the offset and/or size of an entry
596 offset: New offset, or None to leave alone
597 size: New size, or None to leave alone
599 if offset is not None:
604 def SetImagePos(self, image_pos):
605 """Set the position in the image
608 image_pos: Position of this entry in the image
610 self.image_pos = image_pos + self.offset
612 def ProcessContents(self):
613 """Do any post-packing updates of entry contents
615 This function should call ProcessContentsUpdate() to update the entry
616 contents, if necessary, returning its return value here.
619 data: Data to set to the contents (bytes)
622 True if the new data size is OK, False if expansion is needed
625 ValueError if the new data size is not the same as the old and
626 state.AllowEntryExpansion() is False
630 def WriteSymbols(self, section):
631 """Write symbol values into binary files for access at run time
634 section: Section containing the entry
638 def CheckEntries(self):
639 """Check that the entry offsets are correct
641 This is used for entries which have extra offset requirements (other
642 than having to be fully inside their section). Sub-classes can implement
643 this function and raise if there is a problem.
651 return '%08x' % value
654 def WriteMapLine(fd, indent, name, offset, size, image_pos):
655 print('%s %s%s %s %s' % (Entry.GetStr(image_pos), ' ' * indent,
656 Entry.GetStr(offset), Entry.GetStr(size),
659 def WriteMap(self, fd, indent):
660 """Write a map of the entry to a .map file
663 fd: File to write the map to
664 indent: Curent indent level of map (0=none, 1=one level, etc.)
666 self.WriteMapLine(fd, indent, self.name, self.offset, self.size,
669 def GetEntries(self):
670 """Return a list of entries contained by this entry
673 List of entries, or None if none. A normal entry has no entries
674 within it so will return None
678 def GetArg(self, name, datatype=str):
679 """Get the value of an entry argument or device-tree-node property
681 Some node properties can be provided as arguments to binman. First check
682 the entry arguments, and fall back to the device tree if not found
686 datatype: Data type (str or int)
689 Value of argument as a string or int, or None if no value
692 ValueError if the argument cannot be converted to in
694 value = state.GetEntryArg(name)
695 if value is not None:
700 self.Raise("Cannot convert entry arg '%s' (value '%s') to integer" %
702 elif datatype == str:
705 raise ValueError("GetArg() internal error: Unknown data type '%s'" %
708 value = fdt_util.GetDatatype(self._node, name, datatype)
712 def WriteDocs(modules, test_missing=None):
713 """Write out documentation about the various entry types to stdout
716 modules: List of modules to include
717 test_missing: Used for testing. This is a module to report
720 print('''Binman Entry Documentation
721 ===========================
723 This file describes the entry types supported by binman. These entry types can
724 be placed in an image one by one to build up a final firmware image. It is
725 fairly easy to create new entry types. Just add a new file to the 'etype'
726 directory. You can use the existing entries as examples.
728 Note that some entries are subclasses of others, using and extending their
729 features to produce new behaviours.
733 modules = sorted(modules)
735 # Don't show the test entry
736 if '_testing' in modules:
737 modules.remove('_testing')
740 module = Entry.Lookup('WriteDocs', name, False)
741 docs = getattr(module, '__doc__')
742 if test_missing == name:
745 lines = docs.splitlines()
746 first_line = lines[0]
747 rest = [line[4:] for line in lines[1:]]
748 hdr = 'Entry: %s: %s' % (name.replace('_', '-'), first_line)
750 print('-' * len(hdr))
751 print('\n'.join(rest))
758 raise ValueError('Documentation is missing for modules: %s' %
761 def GetUniqueName(self):
762 """Get a unique name for a node
765 String containing a unique name for a node, consisting of the name
766 of all ancestors (starting from within the 'binman' node) separated
767 by a dot ('.'). This can be useful for generating unique filesnames
768 in the output directory.
774 if node.name == 'binman':
776 name = '%s.%s' % (node.name, name)
779 def extend_to_limit(self, limit):
780 """Extend an entry so that it ends at the given offset limit"""
781 if self.offset + self.size < limit:
782 self.size = limit - self.offset
783 # Request the contents again, since changing the size requires that
784 # the data grows. This should not fail, but check it to be sure.
785 if not self.ObtainContents():
786 self.Raise('Cannot obtain contents when expanding entry')
788 def HasSibling(self, name):
789 """Check if there is a sibling of a given name
792 True if there is an entry with this name in the the same section,
795 return name in self.section.GetEntries()
797 def GetSiblingImagePos(self, name):
798 """Return the image position of the given sibling
801 Image position of sibling, or None if the sibling has no position,
802 or False if there is no such sibling
804 if not self.HasSibling(name):
806 return self.section.GetEntries()[name].image_pos
809 def AddEntryInfo(entries, indent, name, etype, size, image_pos,
810 uncomp_size, offset, entry):
811 """Add a new entry to the entries list
814 entries: List (of EntryInfo objects) to add to
815 indent: Current indent level to add to list
816 name: Entry name (string)
817 etype: Entry type (string)
818 size: Entry size in bytes (int)
819 image_pos: Position within image in bytes (int)
820 uncomp_size: Uncompressed size if the entry uses compression, else
822 offset: Entry offset within parent in bytes (int)
825 entries.append(EntryInfo(indent, name, etype, size, image_pos,
826 uncomp_size, offset, entry))
828 def ListEntries(self, entries, indent):
829 """Add files in this entry to the list of entries
831 This can be overridden by subclasses which need different behaviour.
834 entries: List (of EntryInfo objects) to add to
835 indent: Current indent level to add to list
837 self.AddEntryInfo(entries, indent, self.name, self.etype, self.size,
838 self.image_pos, self.uncomp_size, self.offset, self)
840 def ReadData(self, decomp=True, alt_format=None):
841 """Read the data for an entry from the image
843 This is used when the image has been read in and we want to extract the
844 data for a particular entry from that image.
847 decomp: True to decompress any compressed data before returning it;
848 False to return the raw, uncompressed data
853 # Use True here so that we get an uncompressed section to work from,
854 # although compressed sections are currently not supported
855 tout.debug("ReadChildData section '%s', entry '%s'" %
856 (self.section.GetPath(), self.GetPath()))
857 data = self.section.ReadChildData(self, decomp, alt_format)
860 def ReadChildData(self, child, decomp=True, alt_format=None):
861 """Read the data for a particular child entry
863 This reads data from the parent and extracts the piece that relates to
867 child (Entry): Child entry to read data for (must be valid)
868 decomp (bool): True to decompress any compressed data before
869 returning it; False to return the raw, uncompressed data
870 alt_format (str): Alternative format to read in, or None
873 Data for the child (bytes)
877 def LoadData(self, decomp=True):
878 data = self.ReadData(decomp)
879 self.contents_size = len(data)
880 self.ProcessContentsUpdate(data)
881 self.Detail('Loaded data size %x' % len(data))
883 def GetAltFormat(self, data, alt_format):
884 """Read the data for an extry in an alternative format
886 Supported formats are list in the documentation for each entry. An
887 example is fdtmap which provides .
890 data (bytes): Data to convert (this should have been produced by the
892 alt_format (str): Format to use
898 """Get the image containing this entry
901 Image object containing this entry
903 return self.section.GetImage()
905 def WriteData(self, data, decomp=True):
906 """Write the data to an entry in the image
908 This is used when the image has been read in and we want to replace the
909 data for a particular entry in that image.
911 The image must be re-packed and written out afterwards.
914 data: Data to replace it with
915 decomp: True to compress the data if needed, False if data is
916 already compressed so should be used as is
919 True if the data did not result in a resize of this entry, False if
920 the entry must be resized
922 if self.size is not None:
923 self.contents_size = self.size
925 self.contents_size = self.pre_reset_size
926 ok = self.ProcessContentsUpdate(data)
927 self.Detail('WriteData: size=%x, ok=%s' % (len(data), ok))
928 section_ok = self.section.WriteChildData(self)
929 return ok and section_ok
931 def WriteChildData(self, child):
932 """Handle writing the data in a child entry
934 This should be called on the child's parent section after the child's
935 data has been updated. It should update any data structures needed to
936 validate that the update is successful.
938 This base-class implementation does nothing, since the base Entry object
939 does not have any children.
942 child: Child Entry that was written
945 True if the section could be updated successfully, False if the
946 data is such that the section could not update
950 def GetSiblingOrder(self):
951 """Get the relative order of an entry amoung its siblings
954 'start' if this entry is first among siblings, 'end' if last,
957 entries = list(self.section.GetEntries().values())
959 if self == entries[0]:
961 elif self == entries[-1]:
965 def SetAllowMissing(self, allow_missing):
966 """Set whether a section allows missing external blobs
969 allow_missing: True if allowed, False if not allowed
971 # This is meaningless for anything other than sections
974 def SetAllowFakeBlob(self, allow_fake):
975 """Set whether a section allows to create a fake blob
978 allow_fake: True if allowed, False if not allowed
980 self.allow_fake = allow_fake
982 def CheckMissing(self, missing_list):
983 """Check if any entries in this section have missing external blobs
985 If there are missing blobs, the entries are added to the list
988 missing_list: List of Entry objects to be added to
991 missing_list.append(self)
993 def check_fake_fname(self, fname):
994 """If the file is missing and the entry allows fake blobs, fake it
996 Sets self.faked to True if faked
999 fname (str): Filename to check
1002 fname (str): Filename of faked file
1004 if self.allow_fake and not pathlib.Path(fname).is_file():
1005 outfname = tools.get_output_filename(os.path.basename(fname))
1006 with open(outfname, "wb") as out:
1012 def CheckFakedBlobs(self, faked_blobs_list):
1013 """Check if any entries in this section have faked external blobs
1015 If there are faked blobs, the entries are added to the list
1018 fake_blobs_list: List of Entry objects to be added to
1020 # This is meaningless for anything other than blobs
1023 def GetAllowMissing(self):
1024 """Get whether a section allows missing external blobs
1027 True if allowed, False if not allowed
1029 return self.allow_missing
1031 def record_missing_bintool(self, bintool):
1032 """Record a missing bintool that was needed to produce this entry
1035 bintool (Bintool): Bintool that was missing
1037 self.missing_bintools.append(bintool)
1039 def check_missing_bintools(self, missing_list):
1040 """Check if any entries in this section have missing bintools
1042 If there are missing bintools, these are added to the list
1045 missing_list: List of Bintool objects to be added to
1047 missing_list += self.missing_bintools
1049 def GetHelpTags(self):
1050 """Get the tags use for missing-blob help
1053 list of possible tags, most desirable first
1055 return list(filter(None, [self.missing_msg, self.name, self.etype]))
1057 def CompressData(self, indata):
1058 """Compress data according to the entry's compression method
1061 indata: Data to compress
1064 Compressed data (first word is the compressed size)
1066 self.uncomp_data = indata
1067 if self.compress != 'none':
1068 self.uncomp_size = len(indata)
1069 data = comp_util.compress(indata, self.compress)
1073 def UseExpanded(cls, node, etype, new_etype):
1074 """Check whether to use an expanded entry type
1076 This is called by Entry.Create() when it finds an expanded version of
1077 an entry type (e.g. 'u-boot-expanded'). If this method returns True then
1078 it will be used (e.g. in place of 'u-boot'). If it returns False, it is
1082 node: Node object containing information about the entry to
1084 etype: Original entry type being used
1085 new_etype: New entry type proposed
1088 True to use this entry type, False to use the original one
1090 tout.info("Node '%s': etype '%s': %s selected" %
1091 (node.path, etype, new_etype))
1094 def CheckAltFormats(self, alt_formats):
1095 """Add any alternative formats supported by this entry type
1098 alt_formats (dict): Dict to add alt_formats to:
1099 key: Name of alt format
1104 def AddBintools(self, tools):
1105 """Add the bintools used by this entry type
1108 tools (dict of Bintool):
1113 def AddBintool(self, tools, name):
1114 """Add a new bintool to the tools used by this etype
1117 name: Name of the tool
1119 btool = bintool.Bintool.create(name)
1123 def SetUpdateHash(self, update_hash):
1124 """Set whether this entry's "hash" subnode should be updated
1127 update_hash: True if hash should be updated, False if not
1129 self.update_hash = update_hash
1131 def collect_contents_to_file(self, entries, prefix):
1132 """Put the contents of a list of entries into a file
1135 entries (list of Entry): Entries to collect
1136 prefix (str): Filename prefix of file to write to
1138 If any entry does not have contents yet, this function returns False
1143 bytes: Concatenated data from all the entries (or None)
1144 str: Filename of file written (or None if no data)
1145 str: Unique portion of filename (or None if no data)
1148 for entry in entries:
1149 # First get the input data and put it in a file. If not available,
1151 if not entry.ObtainContents():
1152 return None, None, None
1153 data += entry.GetData()
1154 uniq = self.GetUniqueName()
1155 fname = tools.get_output_filename(f'{prefix}.{uniq}')
1156 tools.write_file(fname, data)
1157 return data, fname, uniq