feat(tools): move uf2 generation to esptool

pull/12486/head
Peter Dragun 2023-08-30 15:58:26 +02:00
rodzic 8fc8f3f479
commit 9acf412a07
12 zmienionych plików z 102 dodań i 394 usunięć

Wyświetl plik

@ -208,12 +208,6 @@ test_mkdfu:
- cd ${IDF_PATH}/tools/test_mkdfu
- ./test_mkdfu.py
test_mkuf2:
extends: .host_test_template
script:
- cd ${IDF_PATH}/tools/test_mkuf2
- ./test_mkuf2.py
test_sbom:
extends:
- .host_test_template

Wyświetl plik

@ -193,7 +193,6 @@
- "tools/gen_soc_caps_kconfig/test/test_gen_soc_caps_kconfig.py"
- "tools/mkuf2.py"
- "tools/test_mkuf2/test_mkuf2.py"
- "tools/split_paths_by_spaces.py"

Wyświetl plik

@ -11,6 +11,7 @@ set(ESPTOOLPY ${python} "$ENV{ESPTOOL_WRAPPER}" "${CMAKE_CURRENT_LIST_DIR}/espto
set(ESPSECUREPY ${python} "${CMAKE_CURRENT_LIST_DIR}/esptool/espsecure.py")
set(ESPEFUSEPY ${python} "${CMAKE_CURRENT_LIST_DIR}/esptool/espefuse.py")
set(ESPMONITOR ${python} -m esp_idf_monitor)
set(ESPMKUF2 ${python} "${idf_path}/tools/mkuf2.py" write --chip ${chip_model})
set(ESPTOOLPY_CHIP "${chip_model}")
if(NOT CONFIG_APP_BUILD_TYPE_RAM AND CONFIG_APP_BUILD_GENERATE_BINARIES)
@ -208,6 +209,30 @@ add_custom_target(erase_flash
VERBATIM
)
set(UF2_ARGS --json "${CMAKE_CURRENT_BINARY_DIR}/flasher_args.json")
add_custom_target(uf2
COMMAND ${CMAKE_COMMAND}
-D "IDF_PATH=${idf_path}"
-D "SERIAL_TOOL=${ESPMKUF2}"
-D "SERIAL_TOOL_ARGS=${UF2_ARGS};-o;${CMAKE_CURRENT_BINARY_DIR}/uf2.bin"
-P run_serial_tool.cmake
WORKING_DIRECTORY ${CMAKE_CURRENT_LIST_DIR}
USES_TERMINAL
VERBATIM
)
add_custom_target(uf2-app
COMMAND ${CMAKE_COMMAND}
-D "IDF_PATH=${idf_path}"
-D "SERIAL_TOOL=${ESPMKUF2}"
-D "SERIAL_TOOL_ARGS=${UF2_ARGS};-o;${CMAKE_CURRENT_BINARY_DIR}/uf2-app.bin;--bin;app"
-P run_serial_tool.cmake
WORKING_DIRECTORY ${CMAKE_CURRENT_LIST_DIR}
USES_TERMINAL
VERBATIM
)
add_custom_target(monitor
COMMAND ${CMAKE_COMMAND}
-D "IDF_PATH=${idf_path}"

Wyświetl plik

@ -113,6 +113,5 @@ tools/test_idf_py/test_hints.py
tools/test_idf_py/test_idf_py.py
tools/test_idf_tools/test_idf_tools.py
tools/test_mkdfu/test_mkdfu.py
tools/test_mkuf2/test_mkuf2.py
tools/unit-test-app/tools/get_available_configs.sh
tools/unit-test-app/unit_test.py

Wyświetl plik

@ -95,7 +95,6 @@ tools/test_idf_py/test_idf_extensions/test_ext/test_extension.py
tools/test_idf_py/test_idf_py.py
tools/test_idf_tools/test_idf_tools.py
tools/test_mkdfu/test_mkdfu.py
tools/test_mkuf2/test_mkuf2.py
tools/unit-test-app/idf_ext.py
tools/unit-test-app/tools/CreateSectionTable.py
tools/unit-test-app/tools/UnitTestParser.py

Wyświetl plik

@ -47,7 +47,6 @@ if(NOT __idf_env_set)
include(targets)
include(ldgen)
include(dfu)
include(uf2)
include(version)
__build_init("${idf_path}")

Wyświetl plik

@ -718,9 +718,6 @@ macro(project project_name)
# Add DFU build and flash targets
__add_dfu_targets()
# Add UF2 build targets
__add_uf2_targets()
idf_build_executable(${project_elf})
__project_info("${test_components}")

Wyświetl plik

@ -1,49 +0,0 @@
# Add UF2 build target
function(__add_uf2_targets)
idf_build_get_property(target IDF_TARGET)
if("${target}" STREQUAL "esp32")
set(uf2_family_id "0x1c5f21b0")
elseif("${target}" STREQUAL "esp32s2")
set(uf2_family_id "0xbfdd4eee")
elseif("${target}" STREQUAL "esp32c3")
set(uf2_family_id "0xd42ba06c")
elseif("${target}" STREQUAL "esp32s3")
set(uf2_family_id "0xc47e5767")
elseif("${target}" STREQUAL "esp32h2")
set(uf2_family_id "0x332726f6")
elseif("${target}" STREQUAL "esp32c2")
set(uf2_family_id "0x2b88d29c")
elseif("${target}" STREQUAL "esp32c6")
set(uf2_family_id "0x540ddf62")
elseif("${target}" STREQUAL "esp32p4")
set(uf2_family_id "0x3d308e94")
elseif("${target}" STREQUAL "linux")
return()
else()
message(FATAL_ERROR "UF2 family identificator is unknown for ${target}")
# Generate an ID and submit a pull request as described here: https://github.com/microsoft/uf2
endif()
idf_build_get_property(python PYTHON)
idf_build_get_property(idf_path IDF_PATH)
add_custom_target(uf2-app
COMMAND ${python} ${idf_path}/tools/mkuf2.py write
-o "${CMAKE_CURRENT_BINARY_DIR}/uf2-app.bin"
--json "${CMAKE_CURRENT_BINARY_DIR}/flasher_args.json"
--chip-id "${uf2_family_id}"
--bin app
DEPENDS gen_project_binary
VERBATIM
USES_TERMINAL)
add_custom_target(uf2
COMMAND ${python} ${idf_path}/tools/mkuf2.py write
-o "${CMAKE_CURRENT_BINARY_DIR}/uf2.bin"
--json "${CMAKE_CURRENT_BINARY_DIR}/flasher_args.json"
--chip-id "${uf2_family_id}"
DEPENDS gen_project_binary bootloader
VERBATIM
USES_TERMINAL)
endfunction()

Wyświetl plik

@ -1,4 +1,4 @@
# SPDX-FileCopyrightText: 2022 Espressif Systems (Shanghai) CO LTD
# SPDX-FileCopyrightText: 2022-2023 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
from typing import Dict, List
@ -7,19 +7,32 @@ from idf_py_actions.tools import PropertyDict, ensure_build_directory, run_targe
def action_extensions(base_actions: Dict, project_path: List) -> Dict:
def uf2_target(target_name: str, ctx: Context, args: PropertyDict) -> None:
def uf2_target(target_name: str, ctx: Context, args: PropertyDict, md5_disable: bool) -> None:
ensure_build_directory(args, ctx.info_name)
run_target(target_name, args)
extra = list()
if md5_disable:
extra.append('--md5-disable')
run_target(target_name, args, env={'SERIAL_TOOL_EXTRA_ARGS': ' '.join(extra)})
uf2_options = [
{
'names': ['--md5-disable'],
'is_flag': True,
'help': 'Disable MD5 checksum',
},
]
uf2_actions = {
'actions': {
'uf2': {
'callback': uf2_target,
'options': uf2_options,
'short_help': 'Generate the UF2 binary with all the binaries included',
'dependencies': ['all'],
},
'uf2-app': {
'callback': uf2_target,
'options': uf2_options,
'short_help': 'Generate an UF2 binary for the application only',
'dependencies': ['all'],
},

Wyświetl plik

@ -1,112 +1,39 @@
#!/usr/bin/env python
#
# SPDX-FileCopyrightText: 2020-2022 Espressif Systems (Shanghai) CO LTD
# SPDX-FileCopyrightText: 2020-2023 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
# Module was moved to the esptool in ESP-IDF v5.2 and relicensed under GPL v2.0 license.
from __future__ import division
import argparse
import hashlib
import json
import os
import struct
from functools import partial
from typing import Dict, List
def round_up_int_div(n: int, d: int) -> int:
# equivalent to math.ceil(n / d)
return (n + d - 1) // d
class UF2Writer(object):
# The UF2 format is described here: https://github.com/microsoft/uf2
UF2_BLOCK_SIZE = 512
UF2_DATA_SIZE = 476 # max value of CHUNK_SIZE reduced by optional parts. Currently, MD5_PART only.
UF2_MD5_PART_SIZE = 24
UF2_FIRST_MAGIC = 0x0A324655
UF2_SECOND_MAGIC = 0x9E5D5157
UF2_FINAL_MAGIC = 0x0AB16F30
UF2_FLAG_FAMILYID_PRESENT = 0x00002000
UF2_FLAG_MD5_PRESENT = 0x00004000
def __init__(self, chip_id: int, output_file: os.PathLike, chunk_size: int) -> None:
self.chip_id = chip_id
self.CHUNK_SIZE = self.UF2_DATA_SIZE - self.UF2_MD5_PART_SIZE if chunk_size is None else chunk_size
self.f = open(output_file, 'wb')
def __enter__(self) -> 'UF2Writer':
return self
def __exit__(self, exc_type: str, exc_val: int, exc_tb: List) -> None:
if self.f:
self.f.close()
@staticmethod
def _to_uint32(num: int) -> bytes:
return struct.pack('<I', num)
def _write_block(self, addr: int, chunk: bytes, len_chunk: int, block_no: int, blocks: int) -> None:
assert len_chunk > 0
assert len_chunk <= self.CHUNK_SIZE
assert block_no < blocks
block = self._to_uint32(self.UF2_FIRST_MAGIC)
block += self._to_uint32(self.UF2_SECOND_MAGIC)
block += self._to_uint32(self.UF2_FLAG_FAMILYID_PRESENT | self.UF2_FLAG_MD5_PRESENT)
block += self._to_uint32(addr)
block += self._to_uint32(len_chunk)
block += self._to_uint32(block_no)
block += self._to_uint32(blocks)
block += self._to_uint32(self.chip_id)
block += chunk
md5_part = self._to_uint32(addr)
md5_part += self._to_uint32(len_chunk)
md5_part += hashlib.md5(chunk).digest()
assert len(md5_part) == self.UF2_MD5_PART_SIZE
block += md5_part
block += b'\x00' * (self.UF2_DATA_SIZE - self.UF2_MD5_PART_SIZE - len_chunk)
block += self._to_uint32(self.UF2_FINAL_MAGIC)
assert len(block) == self.UF2_BLOCK_SIZE
self.f.write(block)
def add_file(self, addr: int, f_path: os.PathLike) -> None:
blocks = round_up_int_div(os.path.getsize(f_path), self.CHUNK_SIZE)
with open(f_path, 'rb') as fin:
a = addr
for i, chunk in enumerate(iter(partial(fin.read, self.CHUNK_SIZE), b'')):
len_chunk = len(chunk)
self._write_block(a, chunk, len_chunk, i, blocks)
a += len_chunk
def action_write(args: Dict) -> None:
with UF2Writer(args['chip_id'], args['output_file'], args['chunk_size']) as writer:
for addr, f in args['files']:
print('Adding {} at {:#x}'.format(f, addr))
writer.add_file(addr, f)
print('"{}" has been written.'.format(args['output_file']))
import subprocess
import sys
def main() -> None:
parser = argparse.ArgumentParser()
def four_byte_aligned(integer: int) -> bool:
return integer & 3 == 0
def parse_chunk_size(string: str) -> int:
num = int(string, 0)
if not four_byte_aligned(num):
raise argparse.ArgumentTypeError('Chunk size should be a 4-byte aligned number')
return num
def parse_chip_id(string: str) -> int:
num = int(string, 16)
if num < 0 or num > 0xFFFFFFFF:
raise argparse.ArgumentTypeError('Chip ID should be a 4-byte unsigned integer')
return num
def parse_chip_id(string: str) -> str:
# compatibility layer with old script
print("DEPRECATED option '--chip-id'. Please consider using '--chip' instead")
# DO NOT add new IDs; they are now maintained in esptool.
ids = {
0x1c5f21b0: 'esp32',
0xbfdd4eee: 'esp32s2',
0xd42ba06c: 'esp32c3',
0xc47e5767: 'esp32s3',
0x332726f6: 'esp32h2',
0x2b88d29c: 'esp32c2',
0x540ddf62: 'esp32c6',
0x3d308e94: 'esp32p4',
}
try:
return ids[int(string, 16)]
except KeyError:
raise argparse.ArgumentTypeError('Unknown Chip ID')
# Provision to add "info" command
subparsers = parser.add_subparsers(dest='command')
@ -114,13 +41,17 @@ def main() -> None:
write_parser.add_argument('-o', '--output-file',
help='Filename for storing the output UF2 image',
required=True)
write_parser.add_argument('--chip-id',
required=True,
type=parse_chip_id,
help='Hexa-decimal chip identificator')
group = write_parser.add_mutually_exclusive_group(required=True)
# chip-id used just for backwards compatibility, UF2 family IDs are now stored in esptool
group.add_argument('--chip-id',
type=parse_chip_id,
help=argparse.SUPPRESS)
group.add_argument('--chip',
type=str,
help='Target chip type')
write_parser.add_argument('--chunk-size',
required=False,
type=parse_chunk_size,
type=int,
default=None,
help='Specify the used data part of the 512 byte UF2 block. A common value is 256. By '
'default the largest possible value will be used.')
@ -130,6 +61,9 @@ def main() -> None:
help='Use only a subset of binaries from the JSON file, e.g. "partition_table '
'bootloader app"',
nargs='*')
write_parser.add_argument('--md5-disable',
help='Disable MD5 checksum. Useful for compatibility with e.g. TinyUF2',
action='store_true')
write_parser.add_argument('files',
metavar='<address> <file>', help='Add <file> at <address>',
nargs='*')
@ -141,15 +75,9 @@ def main() -> None:
raise RuntimeError('{} is not a regular file!'.format(file_name))
return file_name
def parse_addr(string: str) -> int:
num = int(string, 0)
if not four_byte_aligned(num):
raise RuntimeError('{} is not a 4-byte aligned valid address'.format(string))
return num
files = []
if args.files:
files += [(parse_addr(addr), check_file(f_name)) for addr, f_name in zip(args.files[::2], args.files[1::2])]
files += [(addr, check_file(f_name)) for addr, f_name in zip(args.files[::2], args.files[1::2])]
if args.json:
json_dir = os.path.dirname(os.path.abspath(args.json))
@ -159,7 +87,7 @@ def main() -> None:
The input path is relative to json_dir. This function makes it relative to the current working
directory.
'''
return check_file(os.path.relpath(os.path.join(json_dir, path), start=os.curdir))
return check_file(os.path.abspath(os.path.join(json_dir, path)))
with open(args.json) as f:
json_content = json.load(f)
@ -176,19 +104,31 @@ def main() -> None:
else:
flash_dic = json_content['flash_files']
files += [(parse_addr(addr), process_json_file(f_name)) for addr, f_name in flash_dic.items()]
files += [(addr, process_json_file(f_name)) for addr, f_name in flash_dic.items()]
files = sorted([(addr, f_name) for addr, f_name in dict(files).items()],
key=lambda x: x[0]) # remove possible duplicates and sort based on the address
# remove possible duplicates and sort based on the address
files = sorted([(addr, f_name) for addr, f_name in dict(files).items()], key=lambda x: x[0]) # type: ignore
cmd_args = {'output_file': args.output_file,
'files': files,
'chip_id': args.chip_id,
'chunk_size': args.chunk_size,
}
# list of tuples to simple list
files = [item for t in files for item in t]
{'write': action_write
}[args.command](cmd_args)
cmd = [
sys.executable, '-m', 'esptool',
'--chip', args.chip_id or args.chip,
'merge_bin',
'--format', 'uf2',
'-o', args.output_file,
]
if args.chunk_size:
cmd.extend(['--chunk_size', args.chunk_size])
if args.md5_disable:
cmd.append('--md5-disable')
cmd_str = ' '.join(cmd + files)
print(f'Executing: {cmd_str}')
sys.exit(subprocess.run(cmd + files).returncode)
if __name__ == '__main__':

Wyświetl plik

@ -158,14 +158,14 @@ def test_build_dfu(idf_py: IdfPyFunc) -> None:
def test_build_uf2(idf_py: IdfPyFunc) -> None:
logging.info('UF2 build works')
ret = idf_py('uf2')
assert 'build/uf2.bin" has been written.' in ret.stdout, 'UF2 build should work for esp32'
assert 'build/uf2.bin, ready to be flashed with any ESP USB Bridge' in ret.stdout, 'UF2 build should work for esp32'
assert_built(BOOTLOADER_BINS + APP_BINS + PARTITION_BIN + ['build/uf2.bin'])
ret = idf_py('uf2-app')
assert 'build/uf2-app.bin" has been written.' in ret.stdout, 'UF2 build should work for application binary'
assert 'build/uf2-app.bin, ready to be flashed with any ESP USB Bridge' in ret.stdout, 'UF2 build should work for application binary'
assert_built(['build/uf2-app.bin'])
idf_py('set-target', 'esp32s2')
ret = idf_py('uf2')
assert 'build/uf2.bin" has been written.' in ret.stdout, 'UF2 build should work for esp32s2'
assert 'build/uf2.bin, ready to be flashed with any ESP USB Bridge' in ret.stdout, 'UF2 build should work for esp32s2'
assert_built(BOOTLOADER_BINS + APP_BINS + PARTITION_BIN + ['build/uf2.bin'])

Wyświetl plik

@ -1,208 +0,0 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# SPDX-FileCopyrightText: 2020-2022 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
from __future__ import unicode_literals
import filecmp
import hashlib
import os
import random
import struct
import sys
import tempfile
import time
import unittest
from functools import partial
from io import open
from itertools import chain
import pexpect
current_dir = os.path.dirname(os.path.realpath(__file__))
mkuf2_dir = os.path.abspath(os.path.join(current_dir, '..'))
mkuf2_path = os.path.join(mkuf2_dir, 'mkuf2.py')
try:
import mkuf2
except ImportError:
sys.path.append(mkuf2_dir)
import mkuf2
class UF2Block(object):
def __init__(self, bs):
self.length = len(bs)
# See https://github.com/microsoft/uf2 for the format
first_part = '<' + 'I' * 8
# payload is between
last_part = '<I'
first_part_len = struct.calcsize(first_part)
last_part_len = struct.calcsize(last_part)
(self.magicStart0, self.magicStart1, self.flags, self.targetAddr, self.payloadSize, self.blockNo,
self.numBlocks, self.familyID) = struct.unpack(first_part, bs[:first_part_len])
self.data = bs[first_part_len:-last_part_len]
(self.magicEnd, ) = struct.unpack(last_part, bs[-last_part_len:])
def __len__(self):
return self.length
class UF2BlockReader(object):
def __init__(self, f_name):
self.f_name = f_name
def get(self):
with open(self.f_name, 'rb') as f:
for chunk in iter(partial(f.read, mkuf2.UF2Writer.UF2_BLOCK_SIZE), b''):
yield UF2Block(chunk)
class BinaryWriter(object):
def __init__(self, f_name):
self.f_name = f_name
def append(self, data):
# File is reopened several times in order to make sure that won't left open
with open(self.f_name, 'ab') as f:
f.write(data)
class BinaryTester(unittest.TestCase):
def generate_binary(self, size):
with tempfile.NamedTemporaryFile(delete=False) as f:
self.addCleanup(os.unlink, f.name)
for _ in range(size):
f.write(struct.pack('B', random.randrange(0, 1 << 8)))
return f.name
@staticmethod
def generate_chipID():
return random.randrange(0, 1 << 32)
def generate_uf2(self, chip_id, iter_addr_offset_tuples, chunk_size=None):
of_name = self.generate_binary(0)
com_args = [mkuf2_path, 'write',
'-o', of_name,
'--chip-id', hex(chip_id)]
com_args += [] if chunk_size is None else ['--chunk-size', str(chunk_size)]
file_args = list(chain(*[(str(addr), f) for addr, f in iter_addr_offset_tuples]))
p = pexpect.spawn(sys.executable, com_args + file_args, timeout=20)
self.addCleanup(p.terminate, force=True)
exp_list = ['Adding {} at {}'.format(f, hex(addr)) for addr, f in iter_addr_offset_tuples]
exp_list += ['"{}" has been written.'.format(of_name)]
for e in exp_list:
p.expect_exact(e)
# Do non-blocking wait instead of the blocking p.wait():
for _ in range(10):
if not p.isalive():
break
time.sleep(0.5)
# else: will be terminated during cleanup
return of_name
def process_blocks(self, uf2block, expected_chip_id):
flags = mkuf2.UF2Writer.UF2_FLAG_FAMILYID_PRESENT | mkuf2.UF2Writer.UF2_FLAG_MD5_PRESENT
parsed_binaries = []
block_list = [] # collect block numbers here
total_blocks = set() # collect total block numbers here
for block in UF2BlockReader(uf2block).get():
if block.blockNo == 0:
# new file has been detected
base_addr = block.targetAddr
current_addr = base_addr
binary_writer = BinaryWriter(self.generate_binary(0))
self.assertEqual(len(block), mkuf2.UF2Writer.UF2_BLOCK_SIZE)
self.assertEqual(block.magicStart0, mkuf2.UF2Writer.UF2_FIRST_MAGIC)
self.assertEqual(block.magicStart1, mkuf2.UF2Writer.UF2_SECOND_MAGIC)
self.assertEqual(block.flags & flags, flags)
self.assertEqual(len(block.data), mkuf2.UF2Writer.UF2_DATA_SIZE)
payload = block.data[:block.payloadSize]
md5_obj = hashlib.md5(payload)
md5_part = block.data[block.payloadSize:block.payloadSize + mkuf2.UF2Writer.UF2_MD5_PART_SIZE]
address, length = struct.unpack('<II', md5_part[:-md5_obj.digest_size])
md5sum = md5_part[-md5_obj.digest_size:]
self.assertEqual(address, block.targetAddr)
self.assertEqual(length, block.payloadSize)
self.assertEqual(md5sum, md5_obj.digest())
self.assertEqual(block.familyID, expected_chip_id)
self.assertEqual(block.magicEnd, mkuf2.UF2Writer.UF2_FINAL_MAGIC)
self.assertEqual(current_addr, block.targetAddr)
binary_writer.append(payload)
block_list.append(block.blockNo)
total_blocks.add(block.numBlocks)
if block.blockNo == block.numBlocks - 1:
self.assertEqual(block_list, list(range(block.numBlocks)))
# we have found all blocks and in the right order
self.assertEqual(total_blocks, {block.numBlocks}) # numBlocks are the same in all the blocks
del block_list[:]
total_blocks.clear()
parsed_binaries += [(base_addr, binary_writer.f_name)]
current_addr += block.payloadSize
return parsed_binaries
def common(self, t, chunk_size=None):
chip_id = self.generate_chipID()
parsed_t = self.process_blocks(self.generate_uf2(chip_id, t, chunk_size), chip_id)
self.assertEqual(len(t), len(parsed_t))
for (orig_addr, orig_fname), (addr, fname) in zip(t, parsed_t):
self.assertEqual(orig_addr, addr)
self.assertTrue(filecmp.cmp(orig_fname, fname))
def test_simple(self):
self.common([(0, self.generate_binary(1))])
def test_more_files(self):
self.common([(100, self.generate_binary(1)), (200, self.generate_binary(1))])
def test_larger_files(self):
self.common([(0x10, self.generate_binary(6)), (0x20, self.generate_binary(8))])
def test_boundaries(self):
self.common([(0x100, self.generate_binary(mkuf2.UF2Writer.UF2_DATA_SIZE)),
(0x200, self.generate_binary(mkuf2.UF2Writer.UF2_DATA_SIZE + 1)),
(0x300, self.generate_binary(mkuf2.UF2Writer.UF2_DATA_SIZE - 1))])
def test_files_with_more_blocks(self):
self.common([(0x100, self.generate_binary(3 * mkuf2.UF2Writer.UF2_DATA_SIZE)),
(0x200, self.generate_binary(2 * mkuf2.UF2Writer.UF2_DATA_SIZE + 1)),
(0x300, self.generate_binary(2 * mkuf2.UF2Writer.UF2_DATA_SIZE - 1))])
def test_very_large_files(self):
self.common([(0x100, self.generate_binary(20 * mkuf2.UF2Writer.UF2_DATA_SIZE + 5)),
(0x10000, self.generate_binary(50 * mkuf2.UF2Writer.UF2_DATA_SIZE + 100)),
(0x100000, self.generate_binary(100 * mkuf2.UF2Writer.UF2_DATA_SIZE))])
def test_chunk_size(self):
chunk_size = 256
self.common([(0x100, self.generate_binary(chunk_size)),
(0x200, self.generate_binary(chunk_size + 1)),
(0x300, self.generate_binary(chunk_size - 1))],
chunk_size)
if __name__ == '__main__':
unittest.main()