1 # SPDX-License-Identifier: GPL-2.0+
2 # Copyright (c) 2016 Google, Inc
4 # Base class for all entries
7 from collections import namedtuple
14 from binman import bintool
15 from binman import comp_util
16 from dtoc import fdt_util
17 from patman import tools
18 from patman.tools import to_hex, to_hex_size
19 from patman import tout
23 # This is imported if needed
26 # An argument which can be passed to entries on the command line, in lieu of
27 # device-tree properties.
28 EntryArg = namedtuple('EntryArg', ['name', 'datatype'])
30 # Information about an entry for use when displaying summaries
31 EntryInfo = namedtuple('EntryInfo', ['indent', 'name', 'etype', 'size',
32 'image_pos', 'uncomp_size', 'offset',
36 """An Entry in the section
38 An entry corresponds to a single node in the device-tree description
39 of the section. Each entry ends up being a part of the final section.
40 Entries can be placed either right next to each other, or with padding
41 between them. The type of the entry determines the data that is in it.
43 This class is not used by itself. All entry objects are subclasses of
47 section: Section object containing this entry
48 node: The node that created this entry
49 offset: Offset of entry within the section, None if not known yet (in
50 which case it will be calculated by Pack())
51 size: Entry size in bytes, None if not known
52 pre_reset_size: size as it was before ResetForPack(). This allows us to
53 keep track of the size we started with and detect size changes
54 uncomp_size: Size of uncompressed data in bytes, if the entry is
56 contents_size: Size of contents in bytes, 0 by default
57 align: Entry start offset alignment relative to the start of the
58 containing section, or None
59 align_size: Entry size alignment, or None
60 align_end: Entry end offset alignment relative to the start of the
61 containing section, or None
62 pad_before: Number of pad bytes before the contents when it is placed
63 in the containing section, 0 if none. The pad bytes become part of
65 pad_after: Number of pad bytes after the contents when it is placed in
66 the containing section, 0 if none. The pad bytes become part of
68 data: Contents of entry (string of bytes). This does not include
69 padding created by pad_before or pad_after. If the entry is
70 compressed, this contains the compressed data.
71 uncomp_data: Original uncompressed data, if this entry is compressed,
73 compress: Compression algoithm used (e.g. 'lz4'), 'none' if none
74 orig_offset: Original offset value read from node
75 orig_size: Original size value read from node
76 missing: True if this entry is missing its contents
77 allow_missing: Allow children of this entry to be missing (used by
78 subclasses such as Entry_section)
79 allow_fake: Allow creating a dummy fake file if the blob file is not
80 available. This is mainly used for testing.
81 external: True if this entry contains an external binary blob
82 bintools: Bintools used by this entry (only populated for Image)
83 missing_bintools: List of missing bintools for this entry
84 update_hash: True if this entry's "hash" subnode should be
85 updated with a hash of the entry contents
86 fake_fname: Fake filename, if one was created, else None
87 required_props (dict of str): Properties which must be present. This can
88 be added to by subclasses
92 def __init__(self, section, etype, node, name_prefix=''):
93 # Put this here to allow entry-docs and help to work without libfdt
95 from binman import state
97 self.section = section
100 self.name = node and (name_prefix + node.name) or 'none'
103 self.pre_reset_size = None
104 self.uncomp_size = None
106 self.uncomp_data = None
107 self.contents_size = 0
109 self.align_size = None
110 self.align_end = None
113 self.offset_unset = False
114 self.image_pos = None
115 self.extend_size = False
116 self.compress = 'none'
119 self.external = False
120 self.allow_missing = False
121 self.allow_fake = False
123 self.missing_bintools = []
124 self.update_hash = True
125 self.fake_fname = None
126 self.required_props = []
129 def FindEntryClass(etype, expanded):
130 """Look up the entry class for a node.
133 node_node: Path name of Node object containing information about
134 the entry to create (used for errors)
135 etype: Entry type to use
136 expanded: Use the expanded version of etype
139 The entry class object if found, else None if not found and expanded
140 is True, else a tuple:
141 module name that could not be found
144 # Convert something like 'u-boot@0' to 'u_boot' since we are only
145 # interested in the type.
146 module_name = etype.replace('-', '_')
148 if '@' in module_name:
149 module_name = module_name.split('@')[0]
151 module_name += '_expanded'
152 module = modules.get(module_name)
154 # Also allow entry-type modules to be brought in from the etype directory.
156 # Import the module if we have not already done so.
159 module = importlib.import_module('binman.etype.' + module_name)
160 except ImportError as e:
163 return module_name, e
164 modules[module_name] = module
166 # Look up the expected class name
167 return getattr(module, 'Entry_%s' % module_name)
170 def Lookup(node_path, etype, expanded, missing_etype=False):
171 """Look up the entry class for a node.
174 node_node (str): Path name of Node object containing information
175 about the entry to create (used for errors)
176 etype (str): Entry type to use
177 expanded (bool): Use the expanded version of etype
178 missing_etype (bool): True to default to a blob etype if the
179 requested etype is not found
182 The entry class object if found, else None if not found and expanded
186 ValueError if expanded is False and the class is not found
188 # Convert something like 'u-boot@0' to 'u_boot' since we are only
189 # interested in the type.
190 cls = Entry.FindEntryClass(etype, expanded)
193 elif isinstance(cls, tuple):
195 cls = Entry.FindEntryClass('blob', False)
196 if isinstance(cls, tuple): # This should not fail
199 "Unknown entry type '%s' in node '%s' (expected etype/%s.py, error '%s'" %
200 (etype, node_path, module_name, e))
204 def Create(section, node, etype=None, expanded=False, missing_etype=False):
205 """Create a new entry for a node.
208 section (entry_Section): Section object containing this node
209 node (Node): Node object containing information about the entry to
211 etype (str): Entry type to use, or None to work it out (used for
213 expanded (bool): Use the expanded version of etype
214 missing_etype (bool): True to default to a blob etype if the
215 requested etype is not found
218 A new Entry object of the correct type (a subclass of Entry)
221 etype = fdt_util.GetString(node, 'type', node.name)
222 obj = Entry.Lookup(node.path, etype, expanded, missing_etype)
224 # Check whether to use the expanded entry
225 new_etype = etype + '-expanded'
226 can_expand = not fdt_util.GetBool(node, 'no-expanded')
227 if can_expand and obj.UseExpanded(node, etype, new_etype):
232 obj = Entry.Lookup(node.path, etype, False, missing_etype)
234 # Call its constructor to get the object we want.
235 return obj(section, etype, node)
238 """Read entry information from the node
240 This must be called as the first thing after the Entry is created.
242 This reads all the fields we recognise from the node, ready for use.
245 if 'pos' in self._node.props:
246 self.Raise("Please use 'offset' instead of 'pos'")
247 if 'expand-size' in self._node.props:
248 self.Raise("Please use 'extend-size' instead of 'expand-size'")
249 self.offset = fdt_util.GetInt(self._node, 'offset')
250 self.size = fdt_util.GetInt(self._node, 'size')
251 self.orig_offset = fdt_util.GetInt(self._node, 'orig-offset')
252 self.orig_size = fdt_util.GetInt(self._node, 'orig-size')
253 if self.GetImage().copy_to_orig:
254 self.orig_offset = self.offset
255 self.orig_size = self.size
257 # These should not be set in input files, but are set in an FDT map,
258 # which is also read by this code.
259 self.image_pos = fdt_util.GetInt(self._node, 'image-pos')
260 self.uncomp_size = fdt_util.GetInt(self._node, 'uncomp-size')
262 self.align = fdt_util.GetInt(self._node, 'align')
263 if tools.not_power_of_two(self.align):
264 raise ValueError("Node '%s': Alignment %s must be a power of two" %
265 (self._node.path, self.align))
266 if self.section and self.align is None:
267 self.align = self.section.align_default
268 self.pad_before = fdt_util.GetInt(self._node, 'pad-before', 0)
269 self.pad_after = fdt_util.GetInt(self._node, 'pad-after', 0)
270 self.align_size = fdt_util.GetInt(self._node, 'align-size')
271 if tools.not_power_of_two(self.align_size):
272 self.Raise("Alignment size %s must be a power of two" %
274 self.align_end = fdt_util.GetInt(self._node, 'align-end')
275 self.offset_unset = fdt_util.GetBool(self._node, 'offset-unset')
276 self.extend_size = fdt_util.GetBool(self._node, 'extend-size')
277 self.missing_msg = fdt_util.GetString(self._node, 'missing-msg')
279 # This is only supported by blobs and sections at present
280 self.compress = fdt_util.GetString(self._node, 'compress', 'none')
282 def GetDefaultFilename(self):
286 """Get the device trees used by this entry
289 Empty dict, if this entry is not a .dtb, otherwise:
291 key: Filename from this entry (without the path)
293 Entry object for this dtb
294 Filename of file containing this dtb
298 def gen_entries(self):
299 """Allow entries to generate other entries
301 Some entries generate subnodes automatically, from which sub-entries
302 are then created. This method allows those to be added to the binman
303 definition for the current image. An entry which implements this method
304 should call state.AddSubnode() to add a subnode and can add properties
305 with state.AddString(), etc.
307 An example is 'files', which produces a section containing a list of
312 def AddMissingProperties(self, have_image_pos):
313 """Add new properties to the device tree as needed for this entry
316 have_image_pos: True if this entry has an image position. This can
317 be False if its parent section is compressed, since compression
318 groups all entries together into a compressed block of data,
319 obscuring the start of each individual child entry
321 for prop in ['offset', 'size']:
322 if not prop in self._node.props:
323 state.AddZeroProp(self._node, prop)
324 if have_image_pos and 'image-pos' not in self._node.props:
325 state.AddZeroProp(self._node, 'image-pos')
326 if self.GetImage().allow_repack:
327 if self.orig_offset is not None:
328 state.AddZeroProp(self._node, 'orig-offset', True)
329 if self.orig_size is not None:
330 state.AddZeroProp(self._node, 'orig-size', True)
332 if self.compress != 'none':
333 state.AddZeroProp(self._node, 'uncomp-size')
336 err = state.CheckAddHashProp(self._node)
340 def SetCalculatedProperties(self):
341 """Set the value of device-tree properties calculated by binman"""
342 state.SetInt(self._node, 'offset', self.offset)
343 state.SetInt(self._node, 'size', self.size)
344 base = self.section.GetRootSkipAtStart() if self.section else 0
345 if self.image_pos is not None:
346 state.SetInt(self._node, 'image-pos', self.image_pos - base)
347 if self.GetImage().allow_repack:
348 if self.orig_offset is not None:
349 state.SetInt(self._node, 'orig-offset', self.orig_offset, True)
350 if self.orig_size is not None:
351 state.SetInt(self._node, 'orig-size', self.orig_size, True)
352 if self.uncomp_size is not None:
353 state.SetInt(self._node, 'uncomp-size', self.uncomp_size)
356 state.CheckSetHashValue(self._node, self.GetData)
358 def ProcessFdt(self, fdt):
359 """Allow entries to adjust the device tree
361 Some entries need to adjust the device tree for their purposes. This
362 may involve adding or deleting properties.
365 True if processing is complete
366 False if processing could not be completed due to a dependency.
367 This will cause the entry to be retried after others have been
372 def SetPrefix(self, prefix):
373 """Set the name prefix for a node
376 prefix: Prefix to set, or '' to not use a prefix
379 self.name = prefix + self.name
381 def SetContents(self, data):
382 """Set the contents of an entry
384 This sets both the data and content_size properties
387 data: Data to set to the contents (bytes)
390 self.contents_size = len(self.data)
392 def ProcessContentsUpdate(self, data):
393 """Update the contents of an entry, after the size is fixed
395 This checks that the new data is the same size as the old. If the size
396 has changed, this triggers a re-run of the packing algorithm.
399 data: Data to set to the contents (bytes)
402 ValueError if the new data size is not the same as the old
406 if state.AllowEntryExpansion() and new_size > self.contents_size:
407 # self.data will indicate the new size needed
409 elif state.AllowEntryContraction() and new_size < self.contents_size:
412 # If not allowed to change, try to deal with it or give up
414 if new_size > self.contents_size:
415 self.Raise('Cannot update entry size from %d to %d' %
416 (self.contents_size, new_size))
418 # Don't let the data shrink. Pad it if necessary
419 if size_ok and new_size < self.contents_size:
420 data += tools.get_bytes(0, self.contents_size - new_size)
423 tout.debug("Entry '%s' size change from %s to %s" % (
424 self._node.path, to_hex(self.contents_size),
426 self.SetContents(data)
429 def ObtainContents(self, skip_entry=None, fake_size=0):
430 """Figure out the contents of an entry.
433 skip_entry (Entry): Entry to skip when obtaining section contents
434 fake_size (int): Size of fake file to create if needed
437 True if the contents were found, False if another call is needed
438 after the other entries are processed.
440 # No contents by default: subclasses can implement this
443 def ResetForPack(self):
444 """Reset offset/size fields so that packing can be done again"""
445 self.Detail('ResetForPack: offset %s->%s, size %s->%s' %
446 (to_hex(self.offset), to_hex(self.orig_offset),
447 to_hex(self.size), to_hex(self.orig_size)))
448 self.pre_reset_size = self.size
449 self.offset = self.orig_offset
450 self.size = self.orig_size
452 def Pack(self, offset):
453 """Figure out how to pack the entry into the section
455 Most of the time the entries are not fully specified. There may be
456 an alignment but no size. In that case we take the size from the
457 contents of the entry.
459 If an entry has no hard-coded offset, it will be placed at @offset.
461 Once this function is complete, both the offset and size of the
465 Current section offset pointer
468 New section offset pointer (after this entry)
470 self.Detail('Packing: offset=%s, size=%s, content_size=%x' %
471 (to_hex(self.offset), to_hex(self.size),
473 if self.offset is None:
474 if self.offset_unset:
475 self.Raise('No offset set with offset-unset: should another '
476 'entry provide this correct offset?')
477 self.offset = tools.align(offset, self.align)
478 needed = self.pad_before + self.contents_size + self.pad_after
479 needed = tools.align(needed, self.align_size)
483 new_offset = self.offset + size
484 aligned_offset = tools.align(new_offset, self.align_end)
485 if aligned_offset != new_offset:
486 size = aligned_offset - self.offset
487 new_offset = aligned_offset
492 if self.size < needed:
493 self.Raise("Entry contents size is %#x (%d) but entry size is "
494 "%#x (%d)" % (needed, needed, self.size, self.size))
495 # Check that the alignment is correct. It could be wrong if the
496 # and offset or size values were provided (i.e. not calculated), but
497 # conflict with the provided alignment values
498 if self.size != tools.align(self.size, self.align_size):
499 self.Raise("Size %#x (%d) does not match align-size %#x (%d)" %
500 (self.size, self.size, self.align_size, self.align_size))
501 if self.offset != tools.align(self.offset, self.align):
502 self.Raise("Offset %#x (%d) does not match align %#x (%d)" %
503 (self.offset, self.offset, self.align, self.align))
504 self.Detail(' - packed: offset=%#x, size=%#x, content_size=%#x, next_offset=%x' %
505 (self.offset, self.size, self.contents_size, new_offset))
509 def Raise(self, msg):
510 """Convenience function to raise an error referencing a node"""
511 raise ValueError("Node '%s': %s" % (self._node.path, msg))
514 """Convenience function to log info referencing a node"""
515 tag = "Info '%s'" % self._node.path
516 tout.detail('%30s: %s' % (tag, msg))
518 def Detail(self, msg):
519 """Convenience function to log detail referencing a node"""
520 tag = "Node '%s'" % self._node.path
521 tout.detail('%30s: %s' % (tag, msg))
523 def GetEntryArgsOrProps(self, props, required=False):
524 """Return the values of a set of properties
527 props: List of EntryArg objects
530 ValueError if a property is not found
535 python_prop = prop.name.replace('-', '_')
536 if hasattr(self, python_prop):
537 value = getattr(self, python_prop)
541 value = self.GetArg(prop.name, prop.datatype)
542 if value is None and required:
543 missing.append(prop.name)
546 self.GetImage().MissingArgs(self, missing)
550 """Get the path of a node
553 Full path of the node for this entry
555 return self._node.path
557 def GetData(self, required=True):
558 """Get the contents of an entry
561 required: True if the data must be present, False if it is OK to
565 bytes content of the entry, excluding any padding. If the entry is
566 compressed, the compressed data is returned
568 self.Detail('GetData: size %s' % to_hex_size(self.data))
571 def GetPaddedData(self, data=None):
572 """Get the data for an entry including any padding
574 Gets the entry data and uses its section's pad-byte value to add padding
575 before and after as defined by the pad-before and pad-after properties.
577 This does not consider alignment.
580 Contents of the entry along with any pad bytes before and
584 data = self.GetData()
585 return self.section.GetPaddedDataForEntry(self, data)
587 def GetOffsets(self):
588 """Get the offsets for siblings
590 Some entry types can contain information about the position or size of
591 other entries. An example of this is the Intel Flash Descriptor, which
592 knows where the Intel Management Engine section should go.
594 If this entry knows about the position of other entries, it can specify
595 this by returning values here
600 value: List containing position and size of the given entry
601 type. Either can be None if not known
605 def SetOffsetSize(self, offset, size):
606 """Set the offset and/or size of an entry
609 offset: New offset, or None to leave alone
610 size: New size, or None to leave alone
612 if offset is not None:
617 def SetImagePos(self, image_pos):
618 """Set the position in the image
621 image_pos: Position of this entry in the image
623 self.image_pos = image_pos + self.offset
625 def ProcessContents(self):
626 """Do any post-packing updates of entry contents
628 This function should call ProcessContentsUpdate() to update the entry
629 contents, if necessary, returning its return value here.
632 data: Data to set to the contents (bytes)
635 True if the new data size is OK, False if expansion is needed
638 ValueError if the new data size is not the same as the old and
639 state.AllowEntryExpansion() is False
643 def WriteSymbols(self, section):
644 """Write symbol values into binary files for access at run time
647 section: Section containing the entry
651 def CheckEntries(self):
652 """Check that the entry offsets are correct
654 This is used for entries which have extra offset requirements (other
655 than having to be fully inside their section). Sub-classes can implement
656 this function and raise if there is a problem.
664 return '%08x' % value
667 def WriteMapLine(fd, indent, name, offset, size, image_pos):
668 print('%s %s%s %s %s' % (Entry.GetStr(image_pos), ' ' * indent,
669 Entry.GetStr(offset), Entry.GetStr(size),
672 def WriteMap(self, fd, indent):
673 """Write a map of the entry to a .map file
676 fd: File to write the map to
677 indent: Curent indent level of map (0=none, 1=one level, etc.)
679 self.WriteMapLine(fd, indent, self.name, self.offset, self.size,
682 # pylint: disable=assignment-from-none
683 def GetEntries(self):
684 """Return a list of entries contained by this entry
687 List of entries, or None if none. A normal entry has no entries
688 within it so will return None
692 def FindEntryByNode(self, find_node):
693 """Find a node in an entry, searching all subentries
695 This does a recursive search.
698 find_node (fdt.Node): Node to find
701 Entry: entry, if found, else None
703 entries = self.GetEntries()
705 for entry in entries.values():
706 if entry._node == find_node:
708 found = entry.FindEntryByNode(find_node)
714 def GetArg(self, name, datatype=str):
715 """Get the value of an entry argument or device-tree-node property
717 Some node properties can be provided as arguments to binman. First check
718 the entry arguments, and fall back to the device tree if not found
722 datatype: Data type (str or int)
725 Value of argument as a string or int, or None if no value
728 ValueError if the argument cannot be converted to in
730 value = state.GetEntryArg(name)
731 if value is not None:
736 self.Raise("Cannot convert entry arg '%s' (value '%s') to integer" %
738 elif datatype == str:
741 raise ValueError("GetArg() internal error: Unknown data type '%s'" %
744 value = fdt_util.GetDatatype(self._node, name, datatype)
748 def WriteDocs(modules, test_missing=None):
749 """Write out documentation about the various entry types to stdout
752 modules: List of modules to include
753 test_missing: Used for testing. This is a module to report
756 print('''Binman Entry Documentation
757 ===========================
759 This file describes the entry types supported by binman. These entry types can
760 be placed in an image one by one to build up a final firmware image. It is
761 fairly easy to create new entry types. Just add a new file to the 'etype'
762 directory. You can use the existing entries as examples.
764 Note that some entries are subclasses of others, using and extending their
765 features to produce new behaviours.
769 modules = sorted(modules)
771 # Don't show the test entry
772 if '_testing' in modules:
773 modules.remove('_testing')
776 module = Entry.Lookup('WriteDocs', name, False)
777 docs = getattr(module, '__doc__')
778 if test_missing == name:
781 lines = docs.splitlines()
782 first_line = lines[0]
783 rest = [line[4:] for line in lines[1:]]
784 hdr = 'Entry: %s: %s' % (name.replace('_', '-'), first_line)
786 # Create a reference for use by rST docs
787 ref_name = f'etype_{module.__name__[6:]}'.lower()
788 print('.. _%s:' % ref_name)
791 print('-' * len(hdr))
792 print('\n'.join(rest))
799 raise ValueError('Documentation is missing for modules: %s' %
802 def GetUniqueName(self):
803 """Get a unique name for a node
806 String containing a unique name for a node, consisting of the name
807 of all ancestors (starting from within the 'binman' node) separated
808 by a dot ('.'). This can be useful for generating unique filesnames
809 in the output directory.
815 if node.name in ('binman', '/'):
817 name = '%s.%s' % (node.name, name)
820 def extend_to_limit(self, limit):
821 """Extend an entry so that it ends at the given offset limit"""
822 if self.offset + self.size < limit:
823 self.size = limit - self.offset
824 # Request the contents again, since changing the size requires that
825 # the data grows. This should not fail, but check it to be sure.
826 if not self.ObtainContents():
827 self.Raise('Cannot obtain contents when expanding entry')
829 def HasSibling(self, name):
830 """Check if there is a sibling of a given name
833 True if there is an entry with this name in the the same section,
836 return name in self.section.GetEntries()
838 def GetSiblingImagePos(self, name):
839 """Return the image position of the given sibling
842 Image position of sibling, or None if the sibling has no position,
843 or False if there is no such sibling
845 if not self.HasSibling(name):
847 return self.section.GetEntries()[name].image_pos
850 def AddEntryInfo(entries, indent, name, etype, size, image_pos,
851 uncomp_size, offset, entry):
852 """Add a new entry to the entries list
855 entries: List (of EntryInfo objects) to add to
856 indent: Current indent level to add to list
857 name: Entry name (string)
858 etype: Entry type (string)
859 size: Entry size in bytes (int)
860 image_pos: Position within image in bytes (int)
861 uncomp_size: Uncompressed size if the entry uses compression, else
863 offset: Entry offset within parent in bytes (int)
866 entries.append(EntryInfo(indent, name, etype, size, image_pos,
867 uncomp_size, offset, entry))
869 def ListEntries(self, entries, indent):
870 """Add files in this entry to the list of entries
872 This can be overridden by subclasses which need different behaviour.
875 entries: List (of EntryInfo objects) to add to
876 indent: Current indent level to add to list
878 self.AddEntryInfo(entries, indent, self.name, self.etype, self.size,
879 self.image_pos, self.uncomp_size, self.offset, self)
881 def ReadData(self, decomp=True, alt_format=None):
882 """Read the data for an entry from the image
884 This is used when the image has been read in and we want to extract the
885 data for a particular entry from that image.
888 decomp: True to decompress any compressed data before returning it;
889 False to return the raw, uncompressed data
894 # Use True here so that we get an uncompressed section to work from,
895 # although compressed sections are currently not supported
896 tout.debug("ReadChildData section '%s', entry '%s'" %
897 (self.section.GetPath(), self.GetPath()))
898 data = self.section.ReadChildData(self, decomp, alt_format)
901 def ReadChildData(self, child, decomp=True, alt_format=None):
902 """Read the data for a particular child entry
904 This reads data from the parent and extracts the piece that relates to
908 child (Entry): Child entry to read data for (must be valid)
909 decomp (bool): True to decompress any compressed data before
910 returning it; False to return the raw, uncompressed data
911 alt_format (str): Alternative format to read in, or None
914 Data for the child (bytes)
918 def LoadData(self, decomp=True):
919 data = self.ReadData(decomp)
920 self.contents_size = len(data)
921 self.ProcessContentsUpdate(data)
922 self.Detail('Loaded data size %x' % len(data))
924 def GetAltFormat(self, data, alt_format):
925 """Read the data for an extry in an alternative format
927 Supported formats are list in the documentation for each entry. An
928 example is fdtmap which provides .
931 data (bytes): Data to convert (this should have been produced by the
933 alt_format (str): Format to use
939 """Get the image containing this entry
942 Image object containing this entry
944 return self.section.GetImage()
946 def WriteData(self, data, decomp=True):
947 """Write the data to an entry in the image
949 This is used when the image has been read in and we want to replace the
950 data for a particular entry in that image.
952 The image must be re-packed and written out afterwards.
955 data: Data to replace it with
956 decomp: True to compress the data if needed, False if data is
957 already compressed so should be used as is
960 True if the data did not result in a resize of this entry, False if
961 the entry must be resized
963 if self.size is not None:
964 self.contents_size = self.size
966 self.contents_size = self.pre_reset_size
967 ok = self.ProcessContentsUpdate(data)
968 self.Detail('WriteData: size=%x, ok=%s' % (len(data), ok))
969 section_ok = self.section.WriteChildData(self)
970 return ok and section_ok
972 def WriteChildData(self, child):
973 """Handle writing the data in a child entry
975 This should be called on the child's parent section after the child's
976 data has been updated. It should update any data structures needed to
977 validate that the update is successful.
979 This base-class implementation does nothing, since the base Entry object
980 does not have any children.
983 child: Child Entry that was written
986 True if the section could be updated successfully, False if the
987 data is such that the section could not update
991 def GetSiblingOrder(self):
992 """Get the relative order of an entry amoung its siblings
995 'start' if this entry is first among siblings, 'end' if last,
998 entries = list(self.section.GetEntries().values())
1000 if self == entries[0]:
1002 elif self == entries[-1]:
1006 def SetAllowMissing(self, allow_missing):
1007 """Set whether a section allows missing external blobs
1010 allow_missing: True if allowed, False if not allowed
1012 # This is meaningless for anything other than sections
1015 def SetAllowFakeBlob(self, allow_fake):
1016 """Set whether a section allows to create a fake blob
1019 allow_fake: True if allowed, False if not allowed
1021 self.allow_fake = allow_fake
1023 def CheckMissing(self, missing_list):
1024 """Check if any entries in this section have missing external blobs
1026 If there are missing blobs, the entries are added to the list
1029 missing_list: List of Entry objects to be added to
1032 missing_list.append(self)
1034 def check_fake_fname(self, fname, size=0):
1035 """If the file is missing and the entry allows fake blobs, fake it
1037 Sets self.faked to True if faked
1040 fname (str): Filename to check
1041 size (int): Size of fake file to create
1045 fname (str): Filename of faked file
1046 bool: True if the blob was faked, False if not
1048 if self.allow_fake and not pathlib.Path(fname).is_file():
1049 if not self.fake_fname:
1050 outfname = os.path.join(self.fake_dir, os.path.basename(fname))
1051 with open(outfname, "wb") as out:
1053 tout.info(f"Entry '{self._node.path}': Faked blob '{outfname}'")
1054 self.fake_fname = outfname
1056 return self.fake_fname, True
1059 def CheckFakedBlobs(self, faked_blobs_list):
1060 """Check if any entries in this section have faked external blobs
1062 If there are faked blobs, the entries are added to the list
1065 fake_blobs_list: List of Entry objects to be added to
1067 # This is meaningless for anything other than blobs
1070 def GetAllowMissing(self):
1071 """Get whether a section allows missing external blobs
1074 True if allowed, False if not allowed
1076 return self.allow_missing
1078 def record_missing_bintool(self, bintool):
1079 """Record a missing bintool that was needed to produce this entry
1082 bintool (Bintool): Bintool that was missing
1084 if bintool not in self.missing_bintools:
1085 self.missing_bintools.append(bintool)
1087 def check_missing_bintools(self, missing_list):
1088 """Check if any entries in this section have missing bintools
1090 If there are missing bintools, these are added to the list
1093 missing_list: List of Bintool objects to be added to
1095 for bintool in self.missing_bintools:
1096 if bintool not in missing_list:
1097 missing_list.append(bintool)
1100 def GetHelpTags(self):
1101 """Get the tags use for missing-blob help
1104 list of possible tags, most desirable first
1106 return list(filter(None, [self.missing_msg, self.name, self.etype]))
1108 def CompressData(self, indata):
1109 """Compress data according to the entry's compression method
1112 indata: Data to compress
1115 Compressed data (first word is the compressed size)
1117 self.uncomp_data = indata
1118 if self.compress != 'none':
1119 self.uncomp_size = len(indata)
1120 data = comp_util.compress(indata, self.compress)
1123 def DecompressData(self, indata):
1124 """Decompress data according to the entry's compression method
1127 indata: Data to decompress
1132 data = comp_util.decompress(indata, self.compress)
1133 if self.compress != 'none':
1134 self.uncomp_size = len(data)
1135 self.uncomp_data = data
1139 def UseExpanded(cls, node, etype, new_etype):
1140 """Check whether to use an expanded entry type
1142 This is called by Entry.Create() when it finds an expanded version of
1143 an entry type (e.g. 'u-boot-expanded'). If this method returns True then
1144 it will be used (e.g. in place of 'u-boot'). If it returns False, it is
1148 node: Node object containing information about the entry to
1150 etype: Original entry type being used
1151 new_etype: New entry type proposed
1154 True to use this entry type, False to use the original one
1156 tout.info("Node '%s': etype '%s': %s selected" %
1157 (node.path, etype, new_etype))
1160 def CheckAltFormats(self, alt_formats):
1161 """Add any alternative formats supported by this entry type
1164 alt_formats (dict): Dict to add alt_formats to:
1165 key: Name of alt format
1170 def AddBintools(self, btools):
1171 """Add the bintools used by this entry type
1174 btools (dict of Bintool):
1179 def AddBintool(self, tools, name):
1180 """Add a new bintool to the tools used by this etype
1183 name: Name of the tool
1185 btool = bintool.Bintool.create(name)
1189 def SetUpdateHash(self, update_hash):
1190 """Set whether this entry's "hash" subnode should be updated
1193 update_hash: True if hash should be updated, False if not
1195 self.update_hash = update_hash
1197 def collect_contents_to_file(self, entries, prefix, fake_size=0):
1198 """Put the contents of a list of entries into a file
1201 entries (list of Entry): Entries to collect
1202 prefix (str): Filename prefix of file to write to
1203 fake_size (int): Size of fake file to create if needed
1205 If any entry does not have contents yet, this function returns False
1210 bytes: Concatenated data from all the entries (or None)
1211 str: Filename of file written (or None if no data)
1212 str: Unique portion of filename (or None if no data)
1215 for entry in entries:
1216 # First get the input data and put it in a file. If not available,
1218 if not entry.ObtainContents(fake_size=fake_size):
1219 return None, None, None
1220 data += entry.GetData()
1221 uniq = self.GetUniqueName()
1222 fname = tools.get_output_filename(f'{prefix}.{uniq}')
1223 tools.write_file(fname, data)
1224 return data, fname, uniq
1227 def create_fake_dir(cls):
1228 """Create the directory for fake files"""
1229 cls.fake_dir = tools.get_output_filename('binman-fake')
1230 if not os.path.exists(cls.fake_dir):
1231 os.mkdir(cls.fake_dir)
1232 tout.notice(f"Fake-blob dir is '{cls.fake_dir}'")
1234 def ensure_props(self):
1235 """Raise an exception if properties are missing
1238 prop_list (list of str): List of properties to check for
1241 ValueError: Any property is missing
1244 for prop in self.required_props:
1245 if not prop in self._node.props:
1246 not_present.append(prop)
1248 self.Raise(f"'{self.etype}' entry is missing properties: {' '.join(not_present)}")