summaryrefslogtreecommitdiff
path: root/tools/binman/etype/blob.py
blob: ecfb1e476e899c1b9dd68b855fde580d984cd9af (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
# SPDX-License-Identifier: GPL-2.0+
# Copyright (c) 2016 Google, Inc
# Written by Simon Glass <sjg@chromium.org>
#
# Entry-type module for blobs, which are binary objects read from files
#

from binman.entry import Entry
from dtoc import fdt_util
from patman import tools
from patman import tout

class Entry_blob(Entry):
    """Entry containing an arbitrary binary blob

    Note: This should not be used by itself. It is normally used as a parent
    class by other entry types.

    Properties / Entry arguments:
        - filename: Filename of file to read into entry
        - compress: Compression algorithm to use:
            none: No compression
            lz4: Use lz4 compression (via 'lz4' command-line utility)

    This entry reads data from a file and places it in the entry. The
    default filename is often specified specified by the subclass. See for
    example the 'u_boot' entry which provides the filename 'u-boot.bin'.

    If compression is enabled, an extra 'uncomp-size' property is written to
    the node (if enabled with -u) which provides the uncompressed size of the
    data.
    """
    def __init__(self, section, etype, node):
        super().__init__(section, etype, node)
        self._filename = fdt_util.GetString(self._node, 'filename', self.etype)
        self.compress = fdt_util.GetString(self._node, 'compress', 'none')

    def ObtainContents(self):
        self._filename = self.GetDefaultFilename()
        self._pathname = tools.GetInputFilename(self._filename,
            self.external and self.section.GetAllowMissing())
        # Allow the file to be missing
        if not self._pathname:
            self.SetContents(b'')
            self.missing = True
            return True

        self.ReadBlobContents()
        return True

    def CompressData(self, indata):
        if self.compress != 'none':
            self.uncomp_size = len(indata)
        data = tools.Compress(indata, self.compress)
        return data

    def ReadBlobContents(self):
        """Read blob contents into memory

        This function compresses the data before storing if needed.

        We assume the data is small enough to fit into memory. If this
        is used for large filesystem image that might not be true.
        In that case, Image.BuildImage() could be adjusted to use a
        new Entry method which can read in chunks. Then we could copy
        the data in chunks and avoid reading it all at once. For now
        this seems like an unnecessary complication.
        """
        indata = tools.ReadFile(self._pathname)
        data = self.CompressData(indata)
        self.SetContents(data)
        return True

    def GetDefaultFilename(self):
        return self._filename