1#!/usr/bin/env python 2# 3# Copyright 2020 Espressif Systems (Shanghai) CO LTD 4# 5# Licensed under the Apache License, Version 2.0 (the "License"); 6# you may not use this file except in compliance with the License. 7# You may obtain a copy of the License at 8# 9# http://www.apache.org/licenses/LICENSE-2.0 10# 11# Unless required by applicable law or agreed to in writing, software 12# distributed under the License is distributed on an "AS IS" BASIS, 13# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14# See the License for the specific language governing permissions and 15# limitations under the License. 16 17from __future__ import division 18 19import argparse 20import hashlib 21import json 22import os 23import struct 24from functools import partial 25 26from future.utils import iteritems 27 28try: 29 from itertools import izip as zip 30except ImportError: 31 # Python 3 32 pass 33 34 35def round_up_int_div(n, d): 36 # equivalent to math.ceil(n / d) 37 return (n + d - 1) // d 38 39 40class UF2Writer(object): 41 42 # The UF2 format is described here: https://github.com/microsoft/uf2 43 UF2_BLOCK_SIZE = 512 44 UF2_DATA_SIZE = 476 # max value of CHUNK_SIZE reduced by optional parts. Currently, MD5_PART only. 45 UF2_MD5_PART_SIZE = 24 46 UF2_FIRST_MAGIC = 0x0A324655 47 UF2_SECOND_MAGIC = 0x9E5D5157 48 UF2_FINAL_MAGIC = 0x0AB16F30 49 UF2_FLAG_FAMILYID_PRESENT = 0x00002000 50 UF2_FLAG_MD5_PRESENT = 0x00004000 51 52 def __init__(self, chip_id, output_file, chunk_size): 53 self.chip_id = chip_id 54 self.CHUNK_SIZE = self.UF2_DATA_SIZE - self.UF2_MD5_PART_SIZE if chunk_size is None else chunk_size 55 self.f = open(output_file, 'wb') 56 57 def __enter__(self): 58 return self 59 60 def __exit__(self, exc_type, exc_val, exc_tb): 61 if self.f: 62 self.f.close() 63 64 @staticmethod 65 def _to_uint32(num): 66 return struct.pack('<I', num) 67 68 def _write_block(self, addr, chunk, len_chunk, block_no, blocks): 69 assert len_chunk > 0 70 assert len_chunk <= self.CHUNK_SIZE 71 assert block_no < blocks 72 block = self._to_uint32(self.UF2_FIRST_MAGIC) 73 block += self._to_uint32(self.UF2_SECOND_MAGIC) 74 block += self._to_uint32(self.UF2_FLAG_FAMILYID_PRESENT | self.UF2_FLAG_MD5_PRESENT) 75 block += self._to_uint32(addr) 76 block += self._to_uint32(len_chunk) 77 block += self._to_uint32(block_no) 78 block += self._to_uint32(blocks) 79 block += self._to_uint32(self.chip_id) 80 block += chunk 81 82 md5_part = self._to_uint32(addr) 83 md5_part += self._to_uint32(len_chunk) 84 md5_part += hashlib.md5(chunk).digest() 85 assert(len(md5_part) == self.UF2_MD5_PART_SIZE) 86 87 block += md5_part 88 block += b'\x00' * (self.UF2_DATA_SIZE - self.UF2_MD5_PART_SIZE - len_chunk) 89 block += self._to_uint32(self.UF2_FINAL_MAGIC) 90 assert len(block) == self.UF2_BLOCK_SIZE 91 self.f.write(block) 92 93 def add_file(self, addr, f_path): 94 blocks = round_up_int_div(os.path.getsize(f_path), self.CHUNK_SIZE) 95 with open(f_path, 'rb') as fin: 96 a = addr 97 for i, chunk in enumerate(iter(partial(fin.read, self.CHUNK_SIZE), b'')): 98 len_chunk = len(chunk) 99 self._write_block(a, chunk, len_chunk, i, blocks) 100 a += len_chunk 101 102 103def action_write(args): 104 with UF2Writer(args['chip_id'], args['output_file'], args['chunk_size']) as writer: 105 for addr, f in args['files']: 106 print('Adding {} at {:#x}'.format(f, addr)) 107 writer.add_file(addr, f) 108 print('"{}" has been written.'.format(args['output_file'])) 109 110 111def main(): 112 parser = argparse.ArgumentParser() 113 114 def four_byte_aligned(integer): 115 return integer & 3 == 0 116 117 def parse_chunk_size(string): 118 num = int(string, 0) 119 if not four_byte_aligned(num): 120 raise argparse.ArgumentTypeError('Chunk size should be a 4-byte aligned number') 121 return num 122 123 def parse_chip_id(string): 124 num = int(string, 16) 125 if num < 0 or num > 0xFFFFFFFF: 126 raise argparse.ArgumentTypeError('Chip ID should be a 4-byte unsigned integer') 127 return num 128 129 # Provision to add "info" command 130 subparsers = parser.add_subparsers(dest='command') 131 write_parser = subparsers.add_parser('write') 132 write_parser.add_argument('-o', '--output-file', 133 help='Filename for storing the output UF2 image', 134 required=True) 135 write_parser.add_argument('--chip-id', 136 required=True, 137 type=parse_chip_id, 138 help='Hexa-decimal chip identificator') 139 write_parser.add_argument('--chunk-size', 140 required=False, 141 type=parse_chunk_size, 142 default=None, 143 help='Specify the used data part of the 512 byte UF2 block. A common value is 256. By ' 144 'default the largest possible value will be used.') 145 write_parser.add_argument('--json', 146 help='Optional file for loading "flash_files" dictionary with <address> <file> items') 147 write_parser.add_argument('--bin', 148 help='Use only a subset of binaries from the JSON file, e.g. "partition_table ' 149 'bootloader app"', 150 nargs='*') 151 write_parser.add_argument('files', 152 metavar='<address> <file>', help='Add <file> at <address>', 153 nargs='*') 154 155 args = parser.parse_args() 156 157 def check_file(file_name): 158 if not os.path.isfile(file_name): 159 raise RuntimeError('{} is not a regular file!'.format(file_name)) 160 return file_name 161 162 def parse_addr(string): 163 num = int(string, 0) 164 if not four_byte_aligned(num): 165 raise RuntimeError('{} is not a 4-byte aligned valid address'.format(string)) 166 return num 167 168 files = [] 169 if args.files: 170 files += [(parse_addr(addr), check_file(f_name)) for addr, f_name in zip(args.files[::2], args.files[1::2])] 171 172 if args.json: 173 json_dir = os.path.dirname(os.path.abspath(args.json)) 174 175 def process_json_file(path): 176 ''' 177 The input path is relative to json_dir. This function makes it relative to the current working 178 directory. 179 ''' 180 return check_file(os.path.relpath(os.path.join(json_dir, path), start=os.curdir)) 181 182 with open(args.json) as f: 183 json_content = json.load(f) 184 185 if args.bin: 186 try: 187 bin_selection = [json_content[b] for b in args.bin] 188 flash_dic = dict((x['offset'], x['file']) for x in bin_selection) 189 except KeyError: 190 print('Invalid binary was selected.') 191 valid = [k if all(x in v for x in ('offset', 'file')) else None for k, v in iteritems(json_content)] 192 print('Valid ones:', ' '.join(x for x in valid if x)) 193 exit(1) 194 else: 195 flash_dic = json_content['flash_files'] 196 197 files += [(parse_addr(addr), process_json_file(f_name)) for addr, f_name in iteritems(flash_dic)] 198 199 files = sorted([(addr, f_name) for addr, f_name in iteritems(dict(files))], 200 key=lambda x: x[0]) # remove possible duplicates and sort based on the address 201 202 cmd_args = {'output_file': args.output_file, 203 'files': files, 204 'chip_id': args.chip_id, 205 'chunk_size': args.chunk_size, 206 } 207 208 {'write': action_write 209 }[args.command](cmd_args) 210 211 212if __name__ == '__main__': 213 main() 214