summaryrefslogtreecommitdiff
path: root/tools
diff options
context:
space:
mode:
authorSimon Glass <sjg@chromium.org>2019-07-20 12:24:06 -0600
committerSimon Glass <sjg@chromium.org>2019-07-29 09:38:06 -0600
commiteb0f4a4cb40264a90a91e10e3ec00d1e0da7fb66 (patch)
treed317dd48088c9c20af20e1e85e4890640cedfd0d /tools
parent7210c89eac7fb68947f5f31372d9f1e93f42df0c (diff)
downloadu-boot-eb0f4a4cb40264a90a91e10e3ec00d1e0da7fb66.tar.gz
binman: Support replacing data in a cbfs
At present binman cannot replace data within a CBFS since it does not allow rewriting of the files in that CBFS. Implement this by using the new WriteData() method to handle the case. Add a header to compressed data so that the amount of compressed data can be determined without reference to the size of the containing entry. This allows the entry to be larger that the contents, without causing errors in decompression. This is necessary to cope with a compressed device tree being updated in such a way that it shrinks after the entry size is already set (an obscure case). It is not used with CBFS since it has its own metadata for this. Increase the number of passes allowed to resolve the position of entries, to handle this case. Add a test for this new logic. Signed-off-by: Simon Glass <sjg@chromium.org>
Diffstat (limited to 'tools')
-rw-r--r--tools/binman/cbfs_util.py10
-rw-r--r--tools/binman/control.py2
-rw-r--r--tools/binman/entry_test.py5
-rw-r--r--tools/binman/etype/cbfs.py3
-rw-r--r--tools/binman/ftest.py28
-rw-r--r--tools/binman/test/142_replace_cbfs.dts37
-rw-r--r--tools/patman/tools.py11
7 files changed, 87 insertions, 9 deletions
diff --git a/tools/binman/cbfs_util.py b/tools/binman/cbfs_util.py
index 6d9a876ee8..99d77878c9 100644
--- a/tools/binman/cbfs_util.py
+++ b/tools/binman/cbfs_util.py
@@ -208,6 +208,7 @@ class CbfsFile(object):
cbfs_offset: Offset of file data in bytes from start of CBFS, or None to
place this file anyway
data: Contents of file, uncompressed
+ orig_data: Original data added to the file, possibly compressed
data_len: Length of (possibly compressed) data in bytes
ftype: File type (TYPE_...)
compression: Compression type (COMPRESS_...)
@@ -226,6 +227,7 @@ class CbfsFile(object):
self.offset = None
self.cbfs_offset = cbfs_offset
self.data = data
+ self.orig_data = data
self.ftype = ftype
self.compress = compress
self.memlen = None
@@ -240,9 +242,9 @@ class CbfsFile(object):
"""Handle decompressing data if necessary"""
indata = self.data
if self.compress == COMPRESS_LZ4:
- data = tools.Decompress(indata, 'lz4')
+ data = tools.Decompress(indata, 'lz4', with_header=False)
elif self.compress == COMPRESS_LZMA:
- data = tools.Decompress(indata, 'lzma')
+ data = tools.Decompress(indata, 'lzma', with_header=False)
else:
data = indata
self.memlen = len(data)
@@ -361,9 +363,9 @@ class CbfsFile(object):
elif self.ftype == TYPE_RAW:
orig_data = data
if self.compress == COMPRESS_LZ4:
- data = tools.Compress(orig_data, 'lz4')
+ data = tools.Compress(orig_data, 'lz4', with_header=False)
elif self.compress == COMPRESS_LZMA:
- data = tools.Compress(orig_data, 'lzma')
+ data = tools.Compress(orig_data, 'lzma', with_header=False)
self.memlen = len(orig_data)
self.data_len = len(data)
attr = struct.pack(ATTR_COMPRESSION_FORMAT,
diff --git a/tools/binman/control.py b/tools/binman/control.py
index 22e3e306e5..9c8bc6253f 100644
--- a/tools/binman/control.py
+++ b/tools/binman/control.py
@@ -311,7 +311,7 @@ def ProcessImage(image, update_fdt, write_map, get_contents=True,
# since changing an offset from 0x100 to 0x104 (for example) can
# alter the compressed size of the device tree. So we need a
# third pass for this.
- passes = 3
+ passes = 5
for pack_pass in range(passes):
try:
image.PackEntries()
diff --git a/tools/binman/entry_test.py b/tools/binman/entry_test.py
index ee729f3751..cc1fb795da 100644
--- a/tools/binman/entry_test.py
+++ b/tools/binman/entry_test.py
@@ -92,6 +92,11 @@ class TestEntry(unittest.TestCase):
dtb = entry.Entry.Create(None, self.GetNode(), 'u-boot-dtb')
self.assertEqual('u-boot-dtb', dtb.GetFdtEtype())
+ def testWriteChildData(self):
+ """Test the WriteChildData() method of the base class"""
+ base = entry.Entry.Create(None, self.GetNode(), 'blob-dtb')
+ self.assertTrue(base.WriteChildData(base))
+
if __name__ == "__main__":
unittest.main()
diff --git a/tools/binman/etype/cbfs.py b/tools/binman/etype/cbfs.py
index 0109fdbb91..28a9c81a8a 100644
--- a/tools/binman/etype/cbfs.py
+++ b/tools/binman/etype/cbfs.py
@@ -168,6 +168,7 @@ class Entry_cbfs(Entry):
self._cbfs_arg = fdt_util.GetString(node, 'cbfs-arch', 'x86')
self._cbfs_entries = OrderedDict()
self._ReadSubnodes()
+ self.reader = None
def ObtainContents(self, skip=None):
arch = cbfs_util.find_arch(self._cbfs_arg)
@@ -202,7 +203,7 @@ class Entry_cbfs(Entry):
def _ReadSubnodes(self):
"""Read the subnodes to find out what should go in this IFWI"""
for node in self._node.subnodes:
- entry = Entry.Create(self.section, node)
+ entry = Entry.Create(self, node)
entry.ReadNode()
entry._cbfs_name = fdt_util.GetString(node, 'cbfs-name', entry.name)
entry._type = fdt_util.GetString(node, 'cbfs-type')
diff --git a/tools/binman/ftest.py b/tools/binman/ftest.py
index d1ecd65c2c..04bd9f886c 100644
--- a/tools/binman/ftest.py
+++ b/tools/binman/ftest.py
@@ -2485,7 +2485,7 @@ class TestFunctional(unittest.TestCase):
def testExtractCbfsRaw(self):
"""Test extracting CBFS compressed data without decompressing it"""
data = self._RunExtractCmd('section/cbfs/u-boot-dtb', decomp=False)
- dtb = tools.Decompress(data, 'lzma')
+ dtb = tools.Decompress(data, 'lzma', with_header=False)
self.assertEqual(EXTRACT_DTB_SIZE, len(dtb))
def testExtractBadEntry(self):
@@ -2984,6 +2984,32 @@ class TestFunctional(unittest.TestCase):
self.assertEqual(0xff800000, desc.offset);
self.assertEqual(0xff800000, desc.image_pos);
+ def testReplaceCbfs(self):
+ """Test replacing a single file in CBFS without changing the size"""
+ self._CheckLz4()
+ expected = b'x' * len(U_BOOT_DATA)
+ data = self._DoReadFileRealDtb('142_replace_cbfs.dts')
+ updated_fname = tools.GetOutputFilename('image-updated.bin')
+ tools.WriteFile(updated_fname, data)
+ entry_name = 'section/cbfs/u-boot'
+ control.WriteEntry(updated_fname, entry_name, expected,
+ allow_resize=True)
+ data = control.ReadEntry(updated_fname, entry_name)
+ self.assertEqual(expected, data)
+
+ def testReplaceResizeCbfs(self):
+ """Test replacing a single file in CBFS with one of a different size"""
+ self._CheckLz4()
+ expected = U_BOOT_DATA + b'x'
+ data = self._DoReadFileRealDtb('142_replace_cbfs.dts')
+ updated_fname = tools.GetOutputFilename('image-updated.bin')
+ tools.WriteFile(updated_fname, data)
+ entry_name = 'section/cbfs/u-boot'
+ control.WriteEntry(updated_fname, entry_name, expected,
+ allow_resize=True)
+ data = control.ReadEntry(updated_fname, entry_name)
+ self.assertEqual(expected, data)
+
if __name__ == "__main__":
unittest.main()
diff --git a/tools/binman/test/142_replace_cbfs.dts b/tools/binman/test/142_replace_cbfs.dts
new file mode 100644
index 0000000000..d64142f9d5
--- /dev/null
+++ b/tools/binman/test/142_replace_cbfs.dts
@@ -0,0 +1,37 @@
+// SPDX-License-Identifier: GPL-2.0+
+
+/dts-v1/;
+
+/ {
+ #address-cells = <1>;
+ #size-cells = <1>;
+
+ binman {
+ size = <0xe00>;
+ allow-repack;
+ u-boot {
+ };
+ section {
+ align = <0x100>;
+ cbfs {
+ size = <0x400>;
+ u-boot {
+ cbfs-type = "raw";
+ };
+ u-boot-dtb {
+ cbfs-type = "raw";
+ cbfs-compress = "lzma";
+ cbfs-offset = <0x80>;
+ };
+ };
+ u-boot-dtb {
+ compress = "lz4";
+ };
+ };
+ fdtmap {
+ };
+ image-header {
+ location = "end";
+ };
+ };
+};
diff --git a/tools/patman/tools.py b/tools/patman/tools.py
index f492dc8f8e..d615227482 100644
--- a/tools/patman/tools.py
+++ b/tools/patman/tools.py
@@ -9,6 +9,7 @@ import command
import glob
import os
import shutil
+import struct
import sys
import tempfile
@@ -377,7 +378,7 @@ def ToBytes(string):
return string.encode('utf-8')
return string
-def Compress(indata, algo):
+def Compress(indata, algo, with_header=True):
"""Compress some data using a given algorithm
Note that for lzma this uses an old version of the algorithm, not that
@@ -408,9 +409,12 @@ def Compress(indata, algo):
data = Run('gzip', '-c', fname, binary=True)
else:
raise ValueError("Unknown algorithm '%s'" % algo)
+ if with_header:
+ hdr = struct.pack('<I', len(data))
+ data = hdr + data
return data
-def Decompress(indata, algo):
+def Decompress(indata, algo, with_header=True):
"""Decompress some data using a given algorithm
Note that for lzma this uses an old version of the algorithm, not that
@@ -428,6 +432,9 @@ def Decompress(indata, algo):
"""
if algo == 'none':
return indata
+ if with_header:
+ data_len = struct.unpack('<I', indata[:4])[0]
+ indata = indata[4:4 + data_len]
fname = GetOutputFilename('%s.decomp.tmp' % algo)
with open(fname, 'wb') as fd:
fd.write(indata)