tools: Wrap flash binaries into a UF2 file for flashing through USB MSC

@mmoskal This commit adds basic support for UF2 into ESP-IDF.
pull/6365/head
Roland Dobai 2020-12-11 16:28:11 +01:00
rodzic 286f06b274
commit 548ea1bdd5
14 zmienionych plików z 557 dodań i 1 usunięć

Wyświetl plik

@ -55,6 +55,7 @@ bootloader: $(BOOTLOADER_BIN) | check_python_dependencies
@echo "$(ESPTOOLPY_WRITE_FLASH) $(BOOTLOADER_OFFSET) $^"
ESPTOOL_ALL_FLASH_ARGS += $(BOOTLOADER_OFFSET) $(BOOTLOADER_BIN)
UF2_ADD_BINARIES += $(BOOTLOADER_OFFSET) $(BOOTLOADER_BIN)
bootloader-flash: $(BOOTLOADER_BIN) $(call prereq_if_explicit,erase_flash) | check_python_dependencies
$(ESPTOOLPY_WRITE_FLASH) 0x1000 $^

Wyświetl plik

@ -66,6 +66,7 @@ ESPTOOLPY_WRITE_FLASH_ENCRYPT=$(ESPTOOLPY_SERIAL) write_flash --encrypt $(if $(C
endif
ESPTOOL_ALL_FLASH_ARGS += $(APP_OFFSET) $(APP_BIN)
UF2_ADD_BINARIES += $(APP_OFFSET) $(APP_BIN)
ifdef CONFIG_SECURE_BOOT_BUILD_SIGNED_BINARIES
ifndef IS_BOOTLOADER_BUILD
@ -164,4 +165,19 @@ monitor: $(call prereq_if_explicit,%flash) | check_python_dependencies
[ -f $(APP_ELF) ] || exit 1
$(MONITOR_PYTHON) $(IDF_PATH)/tools/idf_monitor.py $(MONITOR_OPTS) $(APP_ELF)
# Make supports ESP32 only
UF2_CHIP_ID = "0x1c5f21b0"
uf2-app: $(APP_BIN) partition_table_get_info
$(PYTHON) $(IDF_PATH)/tools/mkuf2.py write \
-o "$(BUILD_DIR_BASE)/uf2-app.bin" \
--chip-id "$(UF2_CHIP_ID)" \
$(APP_OFFSET) $(APP_BIN)
uf2: all_binaries
$(PYTHON) $(IDF_PATH)/tools/mkuf2.py write \
-o "$(BUILD_DIR_BASE)/uf2.bin" \
--chip-id "$(UF2_CHIP_ID)" \
$(UF2_ADD_BINARIES)
.PHONY: erase_flash

Wyświetl plik

@ -96,6 +96,7 @@ check_table_contents: partition_table_get_info
PARTITION_TABLE_FLASH_CMD = $(ESPTOOLPY_SERIAL) write_flash $(PARTITION_TABLE_OFFSET) $(PARTITION_TABLE_BIN)
ESPTOOL_ALL_FLASH_ARGS += $(PARTITION_TABLE_OFFSET) $(PARTITION_TABLE_BIN)
UF2_ADD_BINARIES += $(PARTITION_TABLE_OFFSET) $(PARTITION_TABLE_BIN)
partition_table: $(PARTITION_TABLE_BIN) partition_table_get_info | check_python_dependencies
@echo "Partition table binary generated. Contents:"

Wyświetl plik

@ -312,6 +312,12 @@ test_mkdfu:
- cd ${IDF_PATH}/tools/test_mkdfu
- ${IDF_PATH}/tools/ci/multirun_with_pyenv.sh ./test_mkdfu.py
test_mkuf2:
extends: .host_test_template
script:
- cd ${IDF_PATH}/tools/test_mkuf2
- ${IDF_PATH}/tools/ci/multirun_with_pyenv.sh ./test_mkuf2.py
test_docs:
extends: .host_test_template
image: $ESP_IDF_DOC_ENV_IMAGE

Wyświetl plik

@ -92,12 +92,14 @@ tools/ldgen/test/test_fragments.py
tools/ldgen/test/test_generation.py
tools/mass_mfg/mfg_gen.py
tools/mkdfu.py
tools/mkuf2.py
tools/set-submodules-to-github.sh
tools/test_idf_monitor/run_test_idf_monitor.py
tools/test_idf_py/test_idf_py.py
tools/test_idf_size/test.sh
tools/test_idf_tools/test_idf_tools.py
tools/test_mkdfu/test_mkdfu.py
tools/test_mkuf2/test_mkuf2.py
tools/unit-test-app/tools/get_available_configs.sh
tools/unit-test-app/unit_test.py
tools/windows/eclipse_make.sh

Wyświetl plik

@ -333,6 +333,15 @@ function run_tests()
rm sdkconfig.defaults
make defconfig
print_status "UF2 build works"
rm -f -r build sdkconfig
make defconfig
make uf2
assert_built ${APP_BINS} "uf2.bin"
make uf2-app
assert_built "uf2-app.bin"
rm -f -r build sdkconfig
print_status "Empty directory not treated as a component"
mkdir -p components/esp32
make || failure "Failed to build with empty esp32 directory in components"

Wyświetl plik

@ -743,6 +743,21 @@ endmenu\n" >> ${IDF_PATH}/Kconfig
assert_built ${APP_BINS} ${BOOTLOADER_BINS} ${PARTITION_BIN} "dfu.bin"
rm -rf build sdkconfig
print_status "UF2 build works"
rm -f -r build sdkconfig
idf.py uf2 &> tmp.log
grep "build/uf2.bin\" has been written." tmp.log || (tail -n 100 tmp.log ; failure "UF2 build works for esp32")
assert_built ${APP_BINS} ${BOOTLOADER_BINS} ${PARTITION_BIN} "uf2.bin"
idf.py uf2-app &> tmp.log
grep "build/uf2-app.bin\" has been written." tmp.log || (tail -n 100 tmp.log ; failure "UF2 build works for application binary")
assert_built "uf2-app.bin"
idf.py set-target esp32s2
idf.py uf2 &> tmp.log
grep "build/uf2.bin\" has been written." tmp.log || (tail -n 100 tmp.log ; failure "UF2 build works for esp32s2")
rm tmp.log
assert_built ${APP_BINS} ${BOOTLOADER_BINS} ${PARTITION_BIN} "uf2.bin"
rm -rf build sdkconfig
print_status "Loadable ELF build works"
echo "CONFIG_APP_BUILD_TYPE_ELF_RAM=y" > sdkconfig
idf.py reconfigure || failure "Couldn't configure for loadable ELF file"

Wyświetl plik

@ -44,6 +44,7 @@ if(NOT __idf_env_set)
include(targets)
include(ldgen)
include(dfu)
include(uf2)
include(version)
__build_init("${idf_path}")

Wyświetl plik

@ -494,6 +494,9 @@ macro(project project_name)
# Add DFU build and flash targets
__add_dfu_targets()
# Add UF2 build targets
__add_uf2_targets()
idf_build_executable(${project_elf})
__project_info("${test_components}")

Wyświetl plik

@ -0,0 +1,41 @@
# Add UF2 build target
function(__add_uf2_targets)
idf_build_get_property(target IDF_TARGET)
if("${target}" STREQUAL "esp32")
set(uf2_family_id "0x1c5f21b0")
elseif("${target}" STREQUAL "esp32s2")
set(uf2_family_id "0xbfdd4eee")
elseif("${target}" STREQUAL "esp32c3")
set(uf2_family_id "0xd42ba06c")
elseif("${target}" STREQUAL "esp32s3")
set(uf2_family_id "0xc47e5767")
elseif("${target}" STREQUAL "linux")
return()
else()
message(FATAL_ERROR "UF2 family identificator is unknown for ${target}")
# Generate an ID and submit a pull request as described here: https://github.com/microsoft/uf2
endif()
idf_build_get_property(python PYTHON)
idf_build_get_property(idf_path IDF_PATH)
add_custom_target(uf2-app
COMMAND ${python} ${idf_path}/tools/mkuf2.py write
-o "${CMAKE_CURRENT_BINARY_DIR}/uf2-app.bin"
--json "${CMAKE_CURRENT_BINARY_DIR}/flasher_args.json"
--chip-id "${uf2_family_id}"
--bin app
DEPENDS gen_project_binary
VERBATIM
USES_TERMINAL)
add_custom_target(uf2
COMMAND ${python} ${idf_path}/tools/mkuf2.py write
-o "${CMAKE_CURRENT_BINARY_DIR}/uf2.bin"
--json "${CMAKE_CURRENT_BINARY_DIR}/flasher_args.json"
--chip-id "${uf2_family_id}"
DEPENDS gen_project_binary bootloader
VERBATIM
USES_TERMINAL)
endfunction()

Wyświetl plik

@ -465,7 +465,7 @@ def init_cli(verbose_output=None):
def _print_closing_message(self, args, actions):
# print a closing message of some kind
#
if "flash" in str(actions) or "dfu" in str(actions):
if any(t in str(actions) for t in ("flash", "dfu", "uf2", "uf2-app")):
print("Done")
return

Wyświetl plik

@ -0,0 +1,24 @@
from idf_py_actions.tools import ensure_build_directory, run_target
def action_extensions(base_actions, project_path):
def uf2_target(target_name, ctx, args):
ensure_build_directory(args, ctx.info_name)
run_target(target_name, args)
uf2_actions = {
"actions": {
"uf2": {
"callback": uf2_target,
"short_help": "Generate the UF2 binary with all the binaries included",
"dependencies": ["all"],
},
"uf2-app": {
"callback": uf2_target,
"short_help": "Generate an UF2 binary for the application only",
"dependencies": ["all"],
},
}
}
return uf2_actions

212
tools/mkuf2.py 100755
Wyświetl plik

@ -0,0 +1,212 @@
#!/usr/bin/env python
#
# Copyright 2020 Espressif Systems (Shanghai) CO LTD
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import division
import argparse
import hashlib
import json
import os
import struct
from functools import partial
from future.utils import iteritems
try:
from itertools import izip as zip
except ImportError:
# Python 3
pass
def round_up_int_div(n, d):
# equivalent to math.ceil(n / d)
return (n + d - 1) // d
class UF2Writer(object):
# The UF2 format is described here: https://github.com/microsoft/uf2
UF2_BLOCK_SIZE = 512
UF2_DATA_SIZE = 476 # max value of CHUNK_SIZE reduced by optional parts. Currently, MD5_PART only.
UF2_MD5_PART_SIZE = 24
UF2_FIRST_MAGIC = 0x0A324655
UF2_SECOND_MAGIC = 0x9E5D5157
UF2_FINAL_MAGIC = 0x0AB16F30
UF2_FLAG_FAMILYID_PRESENT = 0x00002000
UF2_FLAG_MD5_PRESENT = 0x00004000
def __init__(self, chip_id, output_file, chunk_size):
self.chip_id = chip_id
self.CHUNK_SIZE = self.UF2_DATA_SIZE - self.UF2_MD5_PART_SIZE if chunk_size is None else chunk_size
self.f = open(output_file, 'wb')
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
if self.f:
self.f.close()
@staticmethod
def _to_uint32(num):
return struct.pack('<I', num)
def _write_block(self, addr, chunk, len_chunk, block_no, blocks):
assert len_chunk > 0
assert len_chunk <= self.CHUNK_SIZE
assert block_no < blocks
block = self._to_uint32(self.UF2_FIRST_MAGIC)
block += self._to_uint32(self.UF2_SECOND_MAGIC)
block += self._to_uint32(self.UF2_FLAG_FAMILYID_PRESENT | self.UF2_FLAG_MD5_PRESENT)
block += self._to_uint32(addr)
block += self._to_uint32(len_chunk)
block += self._to_uint32(block_no)
block += self._to_uint32(blocks)
block += self._to_uint32(self.chip_id)
block += chunk
md5_part = self._to_uint32(addr)
md5_part += self._to_uint32(len_chunk)
md5_part += hashlib.md5(chunk).digest()
assert(len(md5_part) == self.UF2_MD5_PART_SIZE)
block += md5_part
block += b'\x00' * (self.UF2_DATA_SIZE - self.UF2_MD5_PART_SIZE - len_chunk)
block += self._to_uint32(self.UF2_FINAL_MAGIC)
assert len(block) == self.UF2_BLOCK_SIZE
self.f.write(block)
def add_file(self, addr, f_path):
blocks = round_up_int_div(os.path.getsize(f_path), self.CHUNK_SIZE)
with open(f_path, 'rb') as fin:
a = addr
for i, chunk in enumerate(iter(partial(fin.read, self.CHUNK_SIZE), b'')):
len_chunk = len(chunk)
self._write_block(a, chunk, len_chunk, i, blocks)
a += len_chunk
def action_write(args):
with UF2Writer(args['chip_id'], args['output_file'], args['chunk_size']) as writer:
for addr, f in args['files']:
print('Adding {} at {:#x}'.format(f, addr))
writer.add_file(addr, f)
print('"{}" has been written.'.format(args['output_file']))
def main():
parser = argparse.ArgumentParser()
def four_byte_aligned(integer):
return integer & 3 == 0
def parse_chunk_size(string):
num = int(string, 0)
if not four_byte_aligned(num):
raise argparse.ArgumentTypeError('Chunk size should be a 4-byte aligned number')
return num
def parse_chip_id(string):
num = int(string, 16)
if num < 0 or num > 0xFFFFFFFF:
raise argparse.ArgumentTypeError('Chip ID should be a 4-byte unsigned integer')
return num
# Provision to add "info" command
subparsers = parser.add_subparsers(dest="command")
write_parser = subparsers.add_parser("write")
write_parser.add_argument("-o", "--output-file",
help='Filename for storing the output UF2 image',
required=True)
write_parser.add_argument("--chip-id",
required=True,
type=parse_chip_id,
help='Hexa-decimal chip identificator')
write_parser.add_argument("--chunk-size",
required=False,
type=parse_chunk_size,
default=None,
help='Specify the used data part of the 512 byte UF2 block. A common value is 256. By '
'default the largest possible value will be used.')
write_parser.add_argument("--json",
help='Optional file for loading "flash_files" dictionary with <address> <file> items')
write_parser.add_argument("--bin",
help='Use only a subset of binaries from the JSON file, e.g. "partition_table '
'bootloader app"',
nargs='*')
write_parser.add_argument("files",
metavar="<address> <file>", help='Add <file> at <address>',
nargs="*")
args = parser.parse_args()
def check_file(file_name):
if not os.path.isfile(file_name):
raise RuntimeError('{} is not a regular file!'.format(file_name))
return file_name
def parse_addr(string):
num = int(string, 0)
if not four_byte_aligned(num):
raise RuntimeError('{} is not a 4-byte aligned valid address'.format(string))
return num
files = []
if args.files:
files += [(parse_addr(addr), check_file(f_name)) for addr, f_name in zip(args.files[::2], args.files[1::2])]
if args.json:
json_dir = os.path.dirname(os.path.abspath(args.json))
def process_json_file(path):
'''
The input path is relative to json_dir. This function makes it relative to the current working
directory.
'''
return check_file(os.path.relpath(os.path.join(json_dir, path), start=os.curdir))
with open(args.json) as f:
json_content = json.load(f)
if args.bin:
try:
bin_selection = [json_content[b] for b in args.bin]
flash_dic = dict((x['offset'], x['file']) for x in bin_selection)
except KeyError:
print('Invalid binary was selected.')
valid = [k if all(x in v for x in ('offset', 'file')) else None for k, v in iteritems(json_content)]
print('Valid ones:', ' '.join(x for x in valid if x))
exit(1)
else:
flash_dic = json_content['flash_files']
files += [(parse_addr(addr), process_json_file(f_name)) for addr, f_name in iteritems(flash_dic)]
files = sorted([(addr, f_name) for addr, f_name in iteritems(dict(files))],
key=lambda x: x[0]) # remove possible duplicates and sort based on the address
cmd_args = {'output_file': args.output_file,
'files': files,
'chip_id': args.chip_id,
'chunk_size': args.chunk_size,
}
{'write': action_write
}[args.command](cmd_args)
if __name__ == "__main__":
main()

Wyświetl plik

@ -0,0 +1,225 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2020 Espressif Systems (Shanghai) CO LTD
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import unicode_literals
import filecmp
import hashlib
import os
import pexpect
import random
import struct
import sys
import tempfile
import time
import unittest
from functools import partial
from io import open
from itertools import chain
try:
from itertools import izip as zip
except ImportError:
# Python 3
pass
current_dir = os.path.dirname(os.path.realpath(__file__))
mkuf2_dir = os.path.abspath(os.path.join(current_dir, '..'))
mkuf2_path = os.path.join(mkuf2_dir, 'mkuf2.py')
try:
import mkuf2
except ImportError:
sys.path.append(mkuf2_dir)
import mkuf2
class UF2Block(object):
def __init__(self, bs):
self.length = len(bs)
# See https://github.com/microsoft/uf2 for the format
first_part = '<' + 'I' * 8
# payload is between
last_part = '<I'
first_part_len = struct.calcsize(first_part)
last_part_len = struct.calcsize(last_part)
(self.magicStart0, self.magicStart1, self.flags, self.targetAddr, self.payloadSize, self.blockNo,
self.numBlocks, self.familyID) = struct.unpack(first_part, bs[:first_part_len])
self.data = bs[first_part_len:-last_part_len]
(self.magicEnd, ) = struct.unpack(last_part, bs[-last_part_len:])
def __len__(self):
return self.length
class UF2BlockReader(object):
def __init__(self, f_name):
self.f_name = f_name
def get(self):
with open(self.f_name, 'rb') as f:
for chunk in iter(partial(f.read, mkuf2.UF2Writer.UF2_BLOCK_SIZE), b''):
yield UF2Block(chunk)
class BinaryWriter(object):
def __init__(self, f_name):
self.f_name = f_name
def append(self, data):
# File is reopened several times in order to make sure that won't left open
with open(self.f_name, 'ab') as f:
f.write(data)
class BinaryTester(unittest.TestCase):
def generate_binary(self, size):
with tempfile.NamedTemporaryFile(delete=False) as f:
self.addCleanup(os.unlink, f.name)
for _ in range(size):
f.write(struct.pack('B', random.randrange(0, 1 << 8)))
return f.name
@staticmethod
def generate_chipID():
return random.randrange(0, 1 << 32)
def generate_uf2(self, chip_id, iter_addr_offset_tuples, chunk_size=None):
of_name = self.generate_binary(0)
com_args = [mkuf2_path, 'write',
'-o', of_name,
'--chip-id', hex(chip_id)]
com_args += [] if chunk_size is None else ['--chunk-size', str(chunk_size)]
file_args = list(chain(*[(str(addr), f) for addr, f in iter_addr_offset_tuples]))
p = pexpect.spawn(sys.executable, com_args + file_args, timeout=20)
self.addCleanup(p.terminate, force=True)
exp_list = ['Adding {} at {}'.format(f, hex(addr)) for addr, f in iter_addr_offset_tuples]
exp_list += ['"{}" has been written.'.format(of_name)]
for e in exp_list:
p.expect_exact(e)
# Do non-blocking wait instead of the blocking p.wait():
for _ in range(10):
if not p.isalive():
break
time.sleep(0.5)
# else: will be terminated during cleanup
return of_name
def process_blocks(self, uf2block, expected_chip_id):
flags = mkuf2.UF2Writer.UF2_FLAG_FAMILYID_PRESENT | mkuf2.UF2Writer.UF2_FLAG_MD5_PRESENT
parsed_binaries = []
block_list = [] # collect block numbers here
total_blocks = set() # collect total block numbers here
for block in UF2BlockReader(uf2block).get():
if block.blockNo == 0:
# new file has been detected
base_addr = block.targetAddr
current_addr = base_addr
binary_writer = BinaryWriter(self.generate_binary(0))
self.assertEqual(len(block), mkuf2.UF2Writer.UF2_BLOCK_SIZE)
self.assertEqual(block.magicStart0, mkuf2.UF2Writer.UF2_FIRST_MAGIC)
self.assertEqual(block.magicStart1, mkuf2.UF2Writer.UF2_SECOND_MAGIC)
self.assertEqual(block.flags & flags, flags)
self.assertEqual(len(block.data), mkuf2.UF2Writer.UF2_DATA_SIZE)
payload = block.data[:block.payloadSize]
md5_obj = hashlib.md5(payload)
md5_part = block.data[block.payloadSize:block.payloadSize + mkuf2.UF2Writer.UF2_MD5_PART_SIZE]
address, length = struct.unpack('<II', md5_part[:-md5_obj.digest_size])
md5sum = md5_part[-md5_obj.digest_size:]
self.assertEqual(address, block.targetAddr)
self.assertEqual(length, block.payloadSize)
self.assertEqual(md5sum, md5_obj.digest())
self.assertEqual(block.familyID, expected_chip_id)
self.assertEqual(block.magicEnd, mkuf2.UF2Writer.UF2_FINAL_MAGIC)
self.assertEqual(current_addr, block.targetAddr)
binary_writer.append(payload)
block_list.append(block.blockNo)
total_blocks.add(block.numBlocks)
if block.blockNo == block.numBlocks - 1:
self.assertEqual(block_list, list(range(block.numBlocks)))
# we have found all blocks and in the right order
self.assertEqual(total_blocks, {block.numBlocks}) # numBlocks are the same in all the blocks
del block_list[:]
total_blocks.clear()
parsed_binaries += [(base_addr, binary_writer.f_name)]
current_addr += block.payloadSize
return parsed_binaries
def common(self, t, chunk_size=None):
chip_id = self.generate_chipID()
parsed_t = self.process_blocks(self.generate_uf2(chip_id, t, chunk_size), chip_id)
self.assertEqual(len(t), len(parsed_t))
for (orig_addr, orig_fname), (addr, fname) in zip(t, parsed_t):
self.assertEqual(orig_addr, addr)
self.assertTrue(filecmp.cmp(orig_fname, fname))
def test_simple(self):
self.common([(0, self.generate_binary(1))])
def test_more_files(self):
self.common([(100, self.generate_binary(1)), (200, self.generate_binary(1))])
def test_larger_files(self):
self.common([(0x10, self.generate_binary(6)), (0x20, self.generate_binary(8))])
def test_boundaries(self):
self.common([(0x100, self.generate_binary(mkuf2.UF2Writer.UF2_DATA_SIZE)),
(0x200, self.generate_binary(mkuf2.UF2Writer.UF2_DATA_SIZE + 1)),
(0x300, self.generate_binary(mkuf2.UF2Writer.UF2_DATA_SIZE - 1))])
def test_files_with_more_blocks(self):
self.common([(0x100, self.generate_binary(3 * mkuf2.UF2Writer.UF2_DATA_SIZE)),
(0x200, self.generate_binary(2 * mkuf2.UF2Writer.UF2_DATA_SIZE + 1)),
(0x300, self.generate_binary(2 * mkuf2.UF2Writer.UF2_DATA_SIZE - 1))])
def test_very_large_files(self):
self.common([(0x100, self.generate_binary(20 * mkuf2.UF2Writer.UF2_DATA_SIZE + 5)),
(0x10000, self.generate_binary(50 * mkuf2.UF2Writer.UF2_DATA_SIZE + 100)),
(0x100000, self.generate_binary(100 * mkuf2.UF2Writer.UF2_DATA_SIZE))])
def test_chunk_size(self):
chunk_size = 256
self.common([(0x100, self.generate_binary(chunk_size)),
(0x200, self.generate_binary(chunk_size + 1)),
(0x300, self.generate_binary(chunk_size - 1))],
chunk_size)
if __name__ == '__main__':
unittest.main()