blob.py 2.7 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677
  1. # SPDX-License-Identifier: GPL-2.0+
  2. # Copyright (c) 2016 Google, Inc
  3. # Written by Simon Glass <sjg@chromium.org>
  4. #
  5. # Entry-type module for blobs, which are binary objects read from files
  6. #
  7. from binman.entry import Entry
  8. from binman import state
  9. from dtoc import fdt_util
  10. from patman import tools
  11. from patman import tout
  12. class Entry_blob(Entry):
  13. """Arbitrary binary blob
  14. Note: This should not be used by itself. It is normally used as a parent
  15. class by other entry types.
  16. Properties / Entry arguments:
  17. - filename: Filename of file to read into entry
  18. - compress: Compression algorithm to use:
  19. none: No compression
  20. lz4: Use lz4 compression (via 'lz4' command-line utility)
  21. This entry reads data from a file and places it in the entry. The
  22. default filename is often specified specified by the subclass. See for
  23. example the 'u-boot' entry which provides the filename 'u-boot.bin'.
  24. If compression is enabled, an extra 'uncomp-size' property is written to
  25. the node (if enabled with -u) which provides the uncompressed size of the
  26. data.
  27. """
  28. def __init__(self, section, etype, node):
  29. super().__init__(section, etype, node)
  30. self._filename = fdt_util.GetString(self._node, 'filename', self.etype)
  31. def ObtainContents(self):
  32. self._filename = self.GetDefaultFilename()
  33. self._pathname = tools.GetInputFilename(self._filename,
  34. self.external and self.section.GetAllowMissing())
  35. # Allow the file to be missing
  36. if not self._pathname:
  37. self.SetContents(b'')
  38. self.missing = True
  39. return True
  40. self.ReadBlobContents()
  41. return True
  42. def ReadBlobContents(self):
  43. """Read blob contents into memory
  44. This function compresses the data before storing if needed.
  45. We assume the data is small enough to fit into memory. If this
  46. is used for large filesystem image that might not be true.
  47. In that case, Image.BuildImage() could be adjusted to use a
  48. new Entry method which can read in chunks. Then we could copy
  49. the data in chunks and avoid reading it all at once. For now
  50. this seems like an unnecessary complication.
  51. """
  52. state.TimingStart('read')
  53. indata = tools.ReadFile(self._pathname)
  54. state.TimingAccum('read')
  55. state.TimingStart('compress')
  56. data = self.CompressData(indata)
  57. state.TimingAccum('compress')
  58. self.SetContents(data)
  59. return True
  60. def GetDefaultFilename(self):
  61. return self._filename
  62. def ProcessContents(self):
  63. # The blob may have changed due to WriteSymbols()
  64. return self.ProcessContentsUpdate(self.data)