micropython: add micropython component

This commit is contained in:
KY-zhang-X
2022-09-29 12:10:37 +08:00
parent 1514f1cb9b
commit dd76146324
2679 changed files with 354110 additions and 0 deletions

View File

@@ -0,0 +1 @@
*.tar.gz binary

View File

@@ -0,0 +1,8 @@
tinytest/.gitignore
tinytest/.travis.yml
tinytest/Makefile
tinytest/Makefile.arm-cortex-m3-qemu
tinytest/Makefile.avr
tinytest/TODO
tinytest/portable_demo.c
tinytest/tinytest_demo.c

View File

@@ -0,0 +1,107 @@
#!/bin/bash
#
# Build firmware for ports.
#
# Requirements:
# - All toolchains must be in path (arm-none-eabi-gcc, xtensa-lx106-elf)
# - IDF_PATH_V42 must be set
# - IDF_PATH_V44 must be set
# - MICROPY_AUTOBUILD_MICROPYTHON_REPO must be set to location of micropython repository
# - MICROPY_AUTOBUILD_MAKE must be set to the make command to use, eg "make -j2"
#
# Optional settings:
# - MICROPY_AUTOBUILD_REMOTE_MACHINE can be set to a remote ssh machine to copy files to
# - MICROPY_AUTOBUILD_REMOTE_DIR can be set to destination directory on remote machine
if [ ! -d "$IDF_PATH_V42" ]; then
echo "must set IDF_PATH_V42"
exit 1
fi
if [ ! -d "$IDF_PATH_V44" ]; then
echo "must set IDF_PATH_V44"
exit 1
fi
if [ ! -d "$MICROPY_AUTOBUILD_MICROPYTHON_REPO" ]; then
echo "must set MICROPY_AUTOBUILD_MICROPYTHON_REPO"
exit 1
fi
if [ -z "$MICROPY_AUTOBUILD_MAKE" ]; then
echo "must set MICROPY_AUTOBUILD_MAKE"
exit 1
fi
########################################
# Initialisation
# get directory of this script for access to other build scripts
AUTODIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
# source additional functions
source ${AUTODIR}/build-boards.sh
# make local directory to put firmware
LOCAL_FIRMWARE=/tmp/autobuild-firmware-$$
mkdir -p ${LOCAL_FIRMWARE}
# get latest MicroPython
git -C ${MICROPY_AUTOBUILD_MICROPYTHON_REPO} pull
git -C ${MICROPY_AUTOBUILD_MICROPYTHON_REPO} submodule update --init
git -C ${MICROPY_AUTOBUILD_MICROPYTHON_REPO}/lib/pico-sdk submodule update --init
########################################
# Build all firmware
pushd ${MICROPY_AUTOBUILD_MICROPYTHON_REPO}
# build cross compiler
make -C mpy-cross
# make the firmware tag
FW_DATE=$(date '+%Y%m%d')
FW_GIT="$(git describe --dirty || echo unknown)"
FW_TAG="-$FW_DATE-unstable-$FW_GIT"
# build new firmware
cd ports/cc3200
${AUTODIR}/build-cc3200-latest.sh ${FW_TAG} ${LOCAL_FIRMWARE}
cd ../esp8266
${AUTODIR}/build-esp8266-latest.sh ${FW_TAG} ${LOCAL_FIRMWARE}
cd ../esp32
(source ${IDF_PATH_V42}/export.sh && build_esp32_boards ${FW_TAG} ${LOCAL_FIRMWARE})
(source ${IDF_PATH_V44}/export.sh && build_esp32_boards ${FW_TAG} ${LOCAL_FIRMWARE})
cd ../mimxrt
build_mimxrt_boards ${FW_TAG} ${LOCAL_FIRMWARE}
cd ../renesas-ra
build_renesas_ra_boards ${FW_TAG} ${LOCAL_FIRMWARE}
cd ../rp2
build_rp2_boards ${FW_TAG} ${LOCAL_FIRMWARE}
cd ../samd
build_samd_boards ${FW_TAG} ${LOCAL_FIRMWARE}
cd ../stm32
build_stm32_boards ${FW_TAG} ${LOCAL_FIRMWARE}
${AUTODIR}/build-stm32-extra.sh ${FW_TAG} ${LOCAL_FIRMWARE}
popd
########################################
# Copy firmware to remote machine
if [ -z "$MICROPY_AUTOBUILD_REMOTE_MACHINE" -o -z "$MICROPY_AUTOBUILD_REMOTE_DIR" ]; then
echo "No remote given, leaving firmware in ${LOCAL_FIRMWARE}"
exit 0
fi
# copy new firmware to remote machine
scp ${LOCAL_FIRMWARE}/* ${MICROPY_AUTOBUILD_REMOTE_MACHINE}:${MICROPY_AUTOBUILD_REMOTE_DIR}/
# remove old firmware
${AUTODIR}/remove_old_firmware.py ${MICROPY_AUTOBUILD_REMOTE_MACHINE} ${MICROPY_AUTOBUILD_REMOTE_DIR}
########################################
# Clean up
/bin/rm -v ${LOCAL_FIRMWARE}/*
/bin/rmdir ${LOCAL_FIRMWARE}

View File

@@ -0,0 +1,120 @@
#!/bin/bash
#
# The functions in this file can be run independently to build boards.
# For example:
#
# $ source build-boards.sh
# $ MICROPY_AUTOBUILD_MAKE=make build_rp2_boards -latest /tmp
function build_board {
# check/get parameters
if [ $# -lt 4 ]; then
echo "usage: $0 <board-json-file> <fw-tag> <dest-dir> <exts...>"
return 1
fi
board_json=$1
fw_tag=$2
dest_dir=$3
shift
shift
shift
board=$(echo $board_json | awk -F '/' '{ print $2 }')
descr=$(cat $board_json | python3 -c "import json,sys; print(json.load(sys.stdin).get('id', '$board'))")
build_dir=/tmp/micropython-build-$board
echo "building $descr $board"
$MICROPY_AUTOBUILD_MAKE BOARD=$board BUILD=$build_dir && (
for ext in $@; do
dest=$dest_dir/$descr$fw_tag.$ext
if [ -r $build_dir/firmware.$ext ]; then
mv $build_dir/firmware.$ext $dest
elif [ -r $build_dir/micropython.$ext ]; then
# esp32 has micropython.elf, etc
mv $build_dir/micropython.$ext $dest
fi
done
)
rm -rf $build_dir
}
function build_boards {
# check/get parameters
if [ $# -lt 4 ]; then
echo "usage: $0 <check-file> <fw-tag> <dest-dir> <exts...>"
return 1
fi
check_file=$1
shift
# check we are in the correct directory
if [ ! -r $check_file ]; then
echo "must be in directory containing $check_file"
return 1
fi
# build all boards that have a board.json file
for board_json in $(find boards/ -name board.json | sort); do
build_board $board_json $@
done
}
function build_esp32_boards {
# check/get parameters
if [ $# != 2 ]; then
echo "usage: $0 <fw-tag> <dest-dir>"
return 1
fi
fw_tag=$1
dest_dir=$2
# check we are in the correct directory
if [ ! -r modesp32.c ]; then
echo "must be in esp32 directory"
return 1
fi
# build the boards, based on the IDF version
for board_json in $(find boards/ -name board.json | sort); do
mcu=$(cat $board_json | python3 -c "import json,sys; print(json.load(sys.stdin).get('mcu', 'unknown'))")
if idf.py --version | grep -q v4.2; then
if [ $mcu = esp32 ]; then
# build standard esp32-based boards with IDF v4.2
if echo $board_json | grep -q GENERIC; then
# traditionally, GENERIC and GENERIC_SPIRAM boards used manifest_release.py
MICROPY_AUTOBUILD_MAKE="$MICROPY_AUTOBUILD_MAKE FROZEN_MANIFEST=$(pwd)/boards/manifest_release.py" build_board $board_json $fw_tag $dest_dir bin elf map
else
build_board $board_json $fw_tag $dest_dir bin elf map
fi
fi
else
if [ $mcu != esp32 ]; then
# build esp32-s2/s3/c3 based boards with IDF v4.4+
build_board $board_json $fw_tag $dest_dir bin elf map uf2
fi
fi
done
}
function build_mimxrt_boards {
build_boards modmimxrt.c $1 $2 bin hex
}
function build_renesas_ra_boards {
build_boards ra_it.c $1 $2 hex
}
function build_rp2_boards {
build_boards modrp2.c $1 $2 uf2
}
function build_samd_boards {
build_boards samd_soc.c $1 $2 uf2
}
function build_stm32_boards {
build_boards modpyb.c $1 $2 dfu hex
}

View File

@@ -0,0 +1,32 @@
#!/bin/bash
# function for building firmware
function do_build() {
descr=$1
board=$2
shift
shift
echo "building $descr $board"
build_dir=/tmp/cc3200-build-$board
$MICROPY_AUTOBUILD_MAKE $@ BTARGET=application BOARD=$board BUILD=$build_dir || exit 1
zip $dest_dir/$descr$fw_tag.zip $build_dir/mcuimg.bin
rm -rf $build_dir
}
# check/get parameters
if [ $# != 2 ]; then
echo "usage: $0 <fw-tag> <dest-dir>"
exit 1
fi
fw_tag=$1
dest_dir=$2
# check we are in the correct directory
if [ ! -r application.mk ]; then
echo "must be in cc3200 directory"
exit 1
fi
# build the versions
do_build wipy WIPY

View File

@@ -0,0 +1,58 @@
#!/usr/bin/env python3
import glob
import json
import os
import sys
def main(repo_path, output_path):
boards_index = []
board_ids = set()
for board_json in glob.glob(os.path.join(repo_path, "ports/*/boards/*/board.json")):
# Relative path to the board directory (e.g. "ports/stm32/boards/PYBV11").
board_dir = os.path.dirname(board_json)
# Relative path to the port (e.g. "ports/stm32")
port_dir = os.path.dirname(os.path.dirname(board_dir))
with open(board_json, "r") as f:
blob = json.load(f)
# Use "id" if specified, otherwise default to board dir (e.g. "PYBV11").
# We allow boards to override ID for the historical build names.
blob["id"] = blob.get("id", os.path.basename(board_dir))
# Check for duplicate board IDs.
if blob["id"] in board_ids:
print("Duplicate board ID: '{}'".format(blob["id"]), file=sys.stderr)
board_ids.add(blob["id"])
# Add in default fields.
blob["port"] = os.path.basename(port_dir)
blob["build"] = os.path.basename(board_dir)
boards_index.append(blob)
# Create the board markdown, which is the concatenation of the
# default "board.md" file (if exists), as well as any flashing
# instructions.
board_markdown = os.path.join(board_dir, "board.md")
with open(os.path.join(output_path, blob["id"] + ".md"), "w") as f:
if os.path.exists(board_markdown):
with open(board_markdown, "r") as fin:
f.write(fin.read())
if blob["deploy"]:
f.write("\n\n## Installation instructions\n")
for deploy in blob["deploy"]:
with open(os.path.join(board_dir, deploy), "r") as fin:
f.write(fin.read())
# Write the full index for the website to load.
with open(os.path.join(output_path, "index.json"), "w") as f:
json.dump(boards_index, f, indent=4, sort_keys=True)
f.write("\n")
if __name__ == "__main__":
main(sys.argv[1], sys.argv[2])

View File

@@ -0,0 +1,61 @@
#!/bin/bash
PYTHON3=python3
yaota8266=$HOME/yaota8266
# for debugging
#exec &> /tmp/esp-log-$$.txt
# function for building firmware
function do_build() {
descr=$1
board=$2
shift
shift
echo "building $descr $board"
build_dir=/tmp/esp8266-build-$board
$MICROPY_AUTOBUILD_MAKE $@ BOARD=$board BUILD=$build_dir || exit 1
mv $build_dir/firmware-combined.bin $dest_dir/$descr$fw_tag.bin
mv $build_dir/firmware.elf $dest_dir/$descr$fw_tag.elf
mv $build_dir/firmware.map $dest_dir/$descr$fw_tag.map
rm -rf $build_dir
}
function do_build_ota() {
descr=$1
board=$2
shift
shift
echo "building $descr $board"
build_dir=/tmp/esp8266-build-$board
$MICROPY_AUTOBUILD_MAKE $@ BOARD=$board BUILD=$build_dir || exit 1
cat $yaota8266/yaota8266.bin $build_dir/firmware-ota.bin > $dest_dir/$descr$fw_tag.bin
pushd $yaota8266/ota-client
$PYTHON3 ota_client.py sign $build_dir/firmware-ota.bin
popd
mv $build_dir/firmware-ota.bin.ota $dest_dir/$descr$fw_tag.ota
mv $build_dir/firmware.elf $dest_dir/$descr$fw_tag.elf
mv $build_dir/firmware.map $dest_dir/$descr$fw_tag.map
rm -rf $build_dir
}
# check/get parameters
if [ $# != 2 ]; then
echo "usage: $0 <fw-tag> <dest-dir>"
exit 1
fi
fw_tag=$1
dest_dir=$2
# check we are in the correct directory
if [ ! -r boards/esp8266_common.ld ]; then
echo "must be in esp8266 directory"
exit 1
fi
# build the versions
do_build esp8266 GENERIC
do_build esp8266-512k GENERIC_512K
do_build esp8266-1m GENERIC_1M
do_build_ota esp8266-ota GENERIC_1M ota

View File

@@ -0,0 +1,47 @@
#!/bin/bash
# Build additional variants of pyboard firmware (base variants are built by build-boards.sh).
# function for building firmware
function do_build() {
descr=$1
board=$2
shift
shift
echo "building $descr $board"
build_dir=/tmp/stm-build-$board
$MICROPY_AUTOBUILD_MAKE $@ BOARD=$board BUILD=$build_dir || exit 1
mv $build_dir/firmware.dfu $dest_dir/$descr$fw_tag.dfu
mv $build_dir/firmware.hex $dest_dir/$descr$fw_tag.hex
rm -rf $build_dir
}
# check/get parameters
if [ $# != 2 ]; then
echo "usage: $0 <fw-tag> <dest-dir>"
exit 1
fi
fw_tag=$1
dest_dir=$2
# check we are in the correct directory
if [ ! -r modpyb.c ]; then
echo "must be in stm directory"
exit 1
fi
# build the versions
do_build pybv3 PYBV3
do_build pybv3-network PYBV3 MICROPY_PY_NETWORK_WIZNET5K=5200 MICROPY_PY_CC3K=1
do_build pybv10-dp PYBV10 MICROPY_FLOAT_IMPL=double
do_build pybv10-thread PYBV10 CFLAGS_EXTRA='-DMICROPY_PY_THREAD=1'
do_build pybv10-dp-thread PYBV10 MICROPY_FLOAT_IMPL=double CFLAGS_EXTRA='-DMICROPY_PY_THREAD=1'
do_build pybv10-network PYBV10 MICROPY_PY_NETWORK_WIZNET5K=5200 MICROPY_PY_CC3K=1
do_build pybv11-dp PYBV11 MICROPY_FLOAT_IMPL=double
do_build pybv11-thread PYBV11 CFLAGS_EXTRA='-DMICROPY_PY_THREAD=1'
do_build pybv11-dp-thread PYBV11 MICROPY_FLOAT_IMPL=double CFLAGS_EXTRA='-DMICROPY_PY_THREAD=1'
do_build pybv11-network PYBV11 MICROPY_PY_NETWORK_WIZNET5K=5200 MICROPY_PY_CC3K=1
do_build pyblitev10-dp PYBLITEV10 MICROPY_FLOAT_IMPL=double
do_build pyblitev10-thread PYBLITEV10 CFLAGS_EXTRA='-DMICROPY_PY_THREAD=1'
do_build pyblitev10-dp-thread PYBLITEV10 MICROPY_FLOAT_IMPL=double CFLAGS_EXTRA='-DMICROPY_PY_THREAD=1'
do_build pyblitev10-network PYBLITEV10 MICROPY_PY_NETWORK_WIZNET5K=5200 MICROPY_PY_CC3K=1

View File

@@ -0,0 +1,72 @@
#!/usr/bin/env python3
import re, subprocess, sys
DEBUG = False
DRY_RUN = False
NUM_KEEP_PER_BOARD = 4
def main():
ssh_machine = sys.argv[1]
ssh_firmware_dir = sys.argv[2]
# SSH to get list of existing files.
p = subprocess.run(
["ssh", ssh_machine, "find", ssh_firmware_dir, "-name", "\\*-unstable-v\\*"],
capture_output=True,
)
if p.returncode != 0:
print(p.stderr)
return
all_files = p.stdout.split(b"\n")
# Parse all files to organise into boards/date/version.
boards = {}
for file in all_files:
m = re.match(
rb"([a-z/.]+)/([A-Za-z0-9_-]+)-(20[0-9]{6})-unstable-(v[0-9.-]+-g[0-9a-f]+).",
file,
)
if not m:
continue
dir, board, date, version = m.groups()
if board not in boards:
boards[board] = {}
if (date, version) not in boards[board]:
boards[board][(date, version)] = []
boards[board][(date, version)].append(file)
# Collect files to remove based on date and version.
remove = []
for board in boards.values():
filelist = [(date, version, files) for (date, version), files in board.items()]
filelist.sort(reverse=True)
keep = []
for date, version, files in filelist:
if keep and version == keep[-1]:
remove.extend(files)
elif len(keep) >= NUM_KEEP_PER_BOARD:
remove.extend(files)
else:
keep.append(version)
if DEBUG:
all_files.sort(reverse=True)
for file in all_files:
print(file, file in remove)
print(len(remove), "/", len(all_files))
# Do removal of files.
for file in remove:
file = str(file, "ascii")
print("remove:", file)
if not DRY_RUN:
p = subprocess.run(["ssh", ssh_machine, "/bin/rm", file], capture_output=True)
if p.returncode != 0:
print(p.stderr)
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,262 @@
#!/usr/bin/env python3
"""
This is a middle-processor for MicroPython source files. It takes the output
of the C preprocessor, has the option to change it, then feeds this into the
C compiler.
It currently has the ability to reorder static hash tables so they are actually
hashed, resulting in faster lookup times at runtime.
To use, configure the Python variables below, and add the following line to the
Makefile:
CFLAGS += -no-integrated-cpp -B$(shell pwd)/../tools
"""
import sys
import os
import re
################################################################################
# these are the configuration variables
# TODO somehow make them externally configurable
# this is the path to the true C compiler
cc1_path = '/usr/lib/gcc/x86_64-unknown-linux-gnu/5.3.0/cc1'
#cc1_path = '/usr/lib/gcc/arm-none-eabi/5.3.0/cc1'
# this must be the same as MICROPY_QSTR_BYTES_IN_HASH
bytes_in_qstr_hash = 2
# this must be 1 or more (can be a decimal)
# larger uses more code size but yields faster lookups
table_size_mult = 1
# these control output during processing
print_stats = True
print_debug = False
# end configuration variables
################################################################################
# precompile regexs
re_preproc_line = re.compile(r'# [0-9]+ ')
re_map_entry = re.compile(r'\{.+?\(MP_QSTR_([A-Za-z0-9_]+)\).+\},')
re_mp_obj_dict_t = re.compile(r'(?P<head>(static )?const mp_obj_dict_t (?P<id>[a-z0-9_]+) = \{ \.base = \{&mp_type_dict\}, \.map = \{ \.all_keys_are_qstrs = 1, \.is_fixed = 1, \.is_ordered = )1(?P<tail>, \.used = .+ };)$')
re_mp_map_t = re.compile(r'(?P<head>(static )?const mp_map_t (?P<id>[a-z0-9_]+) = \{ \.all_keys_are_qstrs = 1, \.is_fixed = 1, \.is_ordered = )1(?P<tail>, \.used = .+ };)$')
re_mp_rom_map_elem_t = re.compile(r'static const mp_rom_map_elem_t [a-z_0-9]+\[\] = {$')
# this must match the equivalent function in qstr.c
def compute_hash(qstr):
hash = 5381
for char in qstr:
hash = (hash * 33) ^ ord(char)
# Make sure that valid hash is never zero, zero means "hash not computed"
return (hash & ((1 << (8 * bytes_in_qstr_hash)) - 1)) or 1
# this algo must match the equivalent in map.c
def hash_insert(map, key, value):
hash = compute_hash(key)
pos = hash % len(map)
start_pos = pos
if print_debug:
print(' insert %s: start at %u/%u -- ' % (key, pos, len(map)), end='')
while True:
if map[pos] is None:
# found empty slot, so key is not in table
if print_debug:
print('put at %u' % pos)
map[pos] = (key, value)
return
else:
# not yet found, keep searching
if map[pos][0] == key:
raise AssertionError("duplicate key '%s'" % (key,))
pos = (pos + 1) % len(map)
assert pos != start_pos
def hash_find(map, key):
hash = compute_hash(key)
pos = hash % len(map)
start_pos = pos
attempts = 0
while True:
attempts += 1
if map[pos] is None:
return attempts, None
elif map[pos][0] == key:
return attempts, map[pos][1]
else:
pos = (pos + 1) % len(map)
if pos == start_pos:
return attempts, None
def process_map_table(file, line, output):
output.append(line)
# consume all lines that are entries of the table and concat them
# (we do it this way because there can be multiple entries on one line)
table_contents = []
while True:
line = file.readline()
if len(line) == 0:
print('unexpected end of input')
sys.exit(1)
line = line.strip()
if len(line) == 0:
# empty line
continue
if re_preproc_line.match(line):
# preprocessor line number comment
continue
if line == '};':
# end of table (we assume it appears on a single line)
break
table_contents.append(line)
# make combined string of entries
entries_str = ''.join(table_contents)
# split into individual entries
entries = []
while entries_str:
# look for single entry, by matching nested braces
match = None
if entries_str[0] == '{':
nested_braces = 0
for i in range(len(entries_str)):
if entries_str[i] == '{':
nested_braces += 1
elif entries_str[i] == '}':
nested_braces -= 1
if nested_braces == 0:
match = re_map_entry.match(entries_str[:i + 2])
break
if not match:
print('unknown line in table:', entries_str)
sys.exit(1)
# extract single entry
line = match.group(0)
qstr = match.group(1)
entries_str = entries_str[len(line):].lstrip()
# add the qstr and the whole line to list of all entries
entries.append((qstr, line))
# sort entries so hash table construction is deterministic
entries.sort()
# create hash table
map = [None] * int(len(entries) * table_size_mult)
for qstr, line in entries:
# We assume that qstr does not have any escape sequences in it.
# This is reasonably safe, since keys in a module or class dict
# should be standard identifiers.
# TODO verify this and raise an error if escape sequence found
hash_insert(map, qstr, line)
# compute statistics
total_attempts = 0
for qstr, _ in entries:
attempts, line = hash_find(map, qstr)
assert line is not None
if print_debug:
print(' %s lookup took %u attempts' % (qstr, attempts))
total_attempts += attempts
if len(entries):
stats = len(map), len(entries) / len(map), total_attempts / len(entries)
else:
stats = 0, 0, 0
if print_debug:
print(' table stats: size=%d, load=%.2f, avg_lookups=%.1f' % stats)
# output hash table
for row in map:
if row is None:
output.append('{ 0, 0 },\n')
else:
output.append(row[1] + '\n')
output.append('};\n')
# skip to next non-blank line
while True:
line = file.readline()
if len(line) == 0:
print('unexpected end of input')
sys.exit(1)
line = line.strip()
if len(line) == 0:
continue
break
# transform the is_ordered param from 1 to 0
match = re_mp_obj_dict_t.match(line)
if match is None:
match = re_mp_map_t.match(line)
if match is None:
print('expecting mp_obj_dict_t or mp_map_t definition')
print(output[0])
print(line)
sys.exit(1)
line = match.group('head') + '0' + match.group('tail') + '\n'
output.append(line)
return (match.group('id'),) + stats
def process_file(filename):
output = []
file_changed = False
with open(filename, 'rt') as f:
while True:
line = f.readline()
if not line:
break
if re_mp_rom_map_elem_t.match(line):
file_changed = True
stats = process_map_table(f, line, output)
if print_stats:
print(' [%s: size=%d, load=%.2f, avg_lookups=%.1f]' % stats)
else:
output.append(line)
if file_changed:
if print_debug:
print(' modifying static maps in', output[0].strip())
with open(filename, 'wt') as f:
for line in output:
f.write(line)
def main():
# run actual C compiler
# need to quote args that have special characters in them
def quote(s):
if s.find('<') != -1 or s.find('>') != -1:
return "'" + s + "'"
else:
return s
ret = os.system(cc1_path + ' ' + ' '.join(quote(s) for s in sys.argv[1:]))
if ret != 0:
ret = (ret & 0x7f) or 127 # make it in range 0-127, but non-zero
sys.exit(ret)
if sys.argv[1] == '-E':
# CPP has been run, now do our processing stage
for i, arg in enumerate(sys.argv):
if arg == '-o':
return process_file(sys.argv[i + 1])
print('%s: could not find "-o" option' % (sys.argv[0],))
sys.exit(1)
elif sys.argv[1] == '-fpreprocessed':
# compiler has been run, nothing more to do
return
else:
# unknown processing stage
print('%s: unknown first option "%s"' % (sys.argv[0], sys.argv[1]))
sys.exit(1)
if __name__ == '__main__':
main()

View File

@@ -0,0 +1,699 @@
#!/bin/bash
if which nproc > /dev/null; then
MAKEOPTS="-j$(nproc)"
else
MAKEOPTS="-j$(sysctl -n hw.ncpu)"
fi
########################################################################################
# general helper functions
function ci_gcc_arm_setup {
sudo apt-get install gcc-arm-none-eabi libnewlib-arm-none-eabi
arm-none-eabi-gcc --version
}
########################################################################################
# code formatting
function ci_code_formatting_setup {
sudo apt-add-repository --yes --update ppa:pybricks/ppa
sudo apt-get install uncrustify
pip3 install black
uncrustify --version
black --version
}
function ci_code_formatting_run {
tools/codeformat.py -v
}
########################################################################################
# commit formatting
function ci_commit_formatting_run {
git remote add upstream https://github.com/micropython/micropython.git
git fetch --depth=100 upstream master
# For a PR, upstream/master..HEAD ends with a merge commit into master, exlude that one.
tools/verifygitlog.py -v upstream/master..HEAD --no-merges
}
########################################################################################
# code size
function ci_code_size_setup {
sudo apt-get update
sudo apt-get install gcc-multilib
gcc --version
ci_gcc_arm_setup
}
function ci_code_size_build {
# starts off at either the ref/pull/N/merge FETCH_HEAD, or the current branch HEAD
git checkout -b pull_request # save the current location
git remote add upstream https://github.com/micropython/micropython.git
git fetch --depth=100 upstream master
# build reference, save to size0
# ignore any errors with this build, in case master is failing
git checkout `git merge-base --fork-point upstream/master pull_request`
git show -s
tools/metrics.py clean bm
tools/metrics.py build bm | tee ~/size0 || true
# build PR/branch, save to size1
git checkout pull_request
git log upstream/master..HEAD
tools/metrics.py clean bm
tools/metrics.py build bm | tee ~/size1
}
########################################################################################
# .mpy file format
function ci_mpy_format_setup {
sudo pip3 install pyelftools
}
function ci_mpy_format_test {
# Test mpy-tool.py dump feature on bytecode
python2 ./tools/mpy-tool.py -xd ports/minimal/frozentest.mpy
python3 ./tools/mpy-tool.py -xd ports/minimal/frozentest.mpy
# Test mpy-tool.py dump feature on native code
make -C examples/natmod/features1
./tools/mpy-tool.py -xd examples/natmod/features1/features1.mpy
}
########################################################################################
# ports/cc3200
function ci_cc3200_setup {
ci_gcc_arm_setup
}
function ci_cc3200_build {
make ${MAKEOPTS} -C ports/cc3200 BTARGET=application BTYPE=release
make ${MAKEOPTS} -C ports/cc3200 BTARGET=bootloader BTYPE=release
}
########################################################################################
# ports/esp32
function ci_esp32_setup_helper {
pip3 install pyelftools
git clone https://github.com/espressif/esp-idf.git
git -C esp-idf checkout $1
git -C esp-idf submodule update --init \
components/bt/host/nimble/nimble \
components/esp_wifi \
components/esptool_py/esptool \
components/lwip/lwip \
components/mbedtls/mbedtls
if [ -d esp-idf/components/bt/controller/esp32 ]; then
git -C esp-idf submodule update --init \
components/bt/controller/lib_esp32 \
components/bt/controller/lib_esp32c3_family
else
git -C esp-idf submodule update --init \
components/bt/controller/lib
fi
./esp-idf/install.sh
}
function ci_esp32_idf402_setup {
ci_esp32_setup_helper v4.0.2
}
function ci_esp32_idf44_setup {
ci_esp32_setup_helper v4.4
}
function ci_esp32_build {
source esp-idf/export.sh
make ${MAKEOPTS} -C mpy-cross
make ${MAKEOPTS} -C ports/esp32 submodules
make ${MAKEOPTS} -C ports/esp32 \
USER_C_MODULES=../../../examples/usercmodule/micropython.cmake \
FROZEN_MANIFEST=$(pwd)/ports/esp32/boards/manifest_test.py
if [ -d $IDF_PATH/components/esp32c3 ]; then
make ${MAKEOPTS} -C ports/esp32 BOARD=GENERIC_C3
fi
if [ -d $IDF_PATH/components/esp32s2 ]; then
make ${MAKEOPTS} -C ports/esp32 BOARD=GENERIC_S2
fi
if [ -d $IDF_PATH/components/esp32s3 ]; then
make ${MAKEOPTS} -C ports/esp32 BOARD=GENERIC_S3
fi
# Test building native .mpy with xtensawin architecture.
ci_native_mpy_modules_build xtensawin
}
########################################################################################
# ports/esp8266
function ci_esp8266_setup {
sudo pip install pyserial esptool==3.3.1
wget https://github.com/jepler/esp-open-sdk/releases/download/2018-06-10/xtensa-lx106-elf-standalone.tar.gz
zcat xtensa-lx106-elf-standalone.tar.gz | tar x
# Remove this esptool.py so pip version is used instead
rm xtensa-lx106-elf/bin/esptool.py
}
function ci_esp8266_path {
echo $(pwd)/xtensa-lx106-elf/bin
}
function ci_esp8266_build {
make ${MAKEOPTS} -C mpy-cross
make ${MAKEOPTS} -C ports/esp8266 submodules
make ${MAKEOPTS} -C ports/esp8266
make ${MAKEOPTS} -C ports/esp8266 BOARD=GENERIC_512K
make ${MAKEOPTS} -C ports/esp8266 BOARD=GENERIC_1M
}
########################################################################################
# ports/javascript
function ci_javascript_setup {
git clone https://github.com/emscripten-core/emsdk.git
(cd emsdk && ./emsdk install latest && ./emsdk activate latest)
}
function ci_javascript_build {
source emsdk/emsdk_env.sh
make ${MAKEOPTS} -C ports/javascript
}
function ci_javascript_run_tests {
# This port is very slow at running, so only run a few of the tests.
(cd tests && MICROPY_MICROPYTHON=../ports/javascript/node_run.sh ./run-tests.py -j1 basics/builtin_*.py)
}
########################################################################################
# ports/mimxrt
function ci_mimxrt_setup {
ci_gcc_arm_setup
}
function ci_mimxrt_build {
make ${MAKEOPTS} -C mpy-cross
make ${MAKEOPTS} -C ports/mimxrt submodules
make ${MAKEOPTS} -C ports/mimxrt BOARD=MIMXRT1020_EVK
make ${MAKEOPTS} -C ports/mimxrt BOARD=TEENSY40
}
########################################################################################
# ports/nrf
function ci_nrf_setup {
ci_gcc_arm_setup
}
function ci_nrf_build {
ports/nrf/drivers/bluetooth/download_ble_stack.sh s140_nrf52_6_1_1
make ${MAKEOPTS} -C mpy-cross
make ${MAKEOPTS} -C ports/nrf submodules
make ${MAKEOPTS} -C ports/nrf BOARD=pca10040
make ${MAKEOPTS} -C ports/nrf BOARD=microbit
make ${MAKEOPTS} -C ports/nrf BOARD=pca10056 SD=s140
make ${MAKEOPTS} -C ports/nrf BOARD=pca10090
}
########################################################################################
# ports/powerpc
function ci_powerpc_setup {
sudo apt-get update
sudo apt-get install gcc-powerpc64le-linux-gnu libc6-dev-ppc64el-cross
}
function ci_powerpc_build {
make ${MAKEOPTS} -C ports/powerpc UART=potato
make ${MAKEOPTS} -C ports/powerpc UART=lpc_serial
}
########################################################################################
# ports/qemu-arm
function ci_qemu_arm_setup {
ci_gcc_arm_setup
sudo apt-get update
sudo apt-get install qemu-system
qemu-system-arm --version
}
function ci_qemu_arm_build {
make ${MAKEOPTS} -C mpy-cross
make ${MAKEOPTS} -C ports/qemu-arm CFLAGS_EXTRA=-DMP_ENDIANNESS_BIG=1
make ${MAKEOPTS} -C ports/qemu-arm clean
make ${MAKEOPTS} -C ports/qemu-arm -f Makefile.test test
make ${MAKEOPTS} -C ports/qemu-arm -f Makefile.test clean
make ${MAKEOPTS} -C ports/qemu-arm -f Makefile.test BOARD=sabrelite test
}
########################################################################################
# ports/renesas-ra
function ci_renesas_ra_setup {
ci_gcc_arm_setup
}
function ci_renesas_ra_board_build {
make ${MAKEOPTS} -C mpy-cross
make ${MAKEOPTS} -C ports/renesas-ra submodules
make ${MAKEOPTS} -C ports/renesas-ra BOARD=RA4M1_CLICKER
make ${MAKEOPTS} -C ports/renesas-ra BOARD=RA6M2_EK
}
########################################################################################
# ports/rp2
function ci_rp2_setup {
ci_gcc_arm_setup
}
function ci_rp2_build {
make ${MAKEOPTS} -C mpy-cross
make ${MAKEOPTS} -C ports/rp2 submodules
make ${MAKEOPTS} -C ports/rp2
make ${MAKEOPTS} -C ports/rp2 clean
make ${MAKEOPTS} -C ports/rp2 USER_C_MODULES=../../examples/usercmodule/micropython.cmake
make ${MAKEOPTS} -C ports/rp2 BOARD=W5100S_EVB_PICO submodules
make ${MAKEOPTS} -C ports/rp2 BOARD=W5100S_EVB_PICO
}
########################################################################################
# ports/samd
function ci_samd_setup {
ci_gcc_arm_setup
}
function ci_samd_build {
make ${MAKEOPTS} -C ports/samd submodules
make ${MAKEOPTS} -C ports/samd
}
########################################################################################
# ports/stm32
function ci_stm32_setup {
ci_gcc_arm_setup
pip3 install pyelftools
pip3 install pyhy
}
function ci_stm32_pyb_build {
make ${MAKEOPTS} -C mpy-cross
make ${MAKEOPTS} -C ports/stm32 submodules
git submodule update --init lib/btstack
git submodule update --init lib/mynewt-nimble
make ${MAKEOPTS} -C ports/stm32 BOARD=PYBV11 MICROPY_PY_NETWORK_WIZNET5K=5200 MICROPY_PY_CC3K=1 USER_C_MODULES=../../examples/usercmodule
make ${MAKEOPTS} -C ports/stm32 BOARD=PYBD_SF2
make ${MAKEOPTS} -C ports/stm32 BOARD=PYBD_SF6 NANBOX=1 MICROPY_BLUETOOTH_NIMBLE=0 MICROPY_BLUETOOTH_BTSTACK=1
make ${MAKEOPTS} -C ports/stm32/mboot BOARD=PYBV10 CFLAGS_EXTRA='-DMBOOT_FSLOAD=1 -DMBOOT_VFS_LFS2=1'
make ${MAKEOPTS} -C ports/stm32/mboot BOARD=PYBD_SF6
make ${MAKEOPTS} -C ports/stm32/mboot BOARD=STM32F769DISC CFLAGS_EXTRA='-DMBOOT_ADDRESS_SPACE_64BIT=1 -DMBOOT_SDCARD_ADDR=0x100000000ULL -DMBOOT_SDCARD_BYTE_SIZE=0x400000000ULL -DMBOOT_FSLOAD=1 -DMBOOT_VFS_FAT=1'
# Test building native .mpy with armv7emsp architecture.
git submodule update --init lib/berkeley-db-1.xx
ci_native_mpy_modules_build armv7emsp
}
function ci_stm32_nucleo_build {
make ${MAKEOPTS} -C mpy-cross
make ${MAKEOPTS} -C ports/stm32 submodules
git submodule update --init lib/mynewt-nimble
# Test building various MCU families, some with additional options.
make ${MAKEOPTS} -C ports/stm32 BOARD=NUCLEO_F091RC
make ${MAKEOPTS} -C ports/stm32 BOARD=NUCLEO_H743ZI COPT=-O2 CFLAGS_EXTRA='-DMICROPY_PY_THREAD=1'
make ${MAKEOPTS} -C ports/stm32 BOARD=NUCLEO_L073RZ
make ${MAKEOPTS} -C ports/stm32 BOARD=NUCLEO_L476RG DEBUG=1
# Test building a board with mboot packing enabled (encryption, signing, compression).
make ${MAKEOPTS} -C ports/stm32 BOARD=NUCLEO_WB55 USE_MBOOT=1 MBOOT_ENABLE_PACKING=1
make ${MAKEOPTS} -C ports/stm32/mboot BOARD=NUCLEO_WB55 USE_MBOOT=1 MBOOT_ENABLE_PACKING=1
# Test mboot_pack_dfu.py created a valid file, and that its unpack-dfu command works.
BOARD_WB55=ports/stm32/boards/NUCLEO_WB55
BUILD_WB55=ports/stm32/build-NUCLEO_WB55
python3 ports/stm32/mboot/mboot_pack_dfu.py -k $BOARD_WB55/mboot_keys.h unpack-dfu $BUILD_WB55/firmware.pack.dfu $BUILD_WB55/firmware.unpack.dfu
diff $BUILD_WB55/firmware.unpack.dfu $BUILD_WB55/firmware.dfu
# Test unpack-dfu command works without a secret key
tail -n +2 $BOARD_WB55/mboot_keys.h > $BOARD_WB55/mboot_keys_no_sk.h
python3 ports/stm32/mboot/mboot_pack_dfu.py -k $BOARD_WB55/mboot_keys_no_sk.h unpack-dfu $BUILD_WB55/firmware.pack.dfu $BUILD_WB55/firmware.unpack_no_sk.dfu
diff $BUILD_WB55/firmware.unpack.dfu $BUILD_WB55/firmware.unpack_no_sk.dfu
}
########################################################################################
# ports/teensy
function ci_teensy_setup {
ci_gcc_arm_setup
}
function ci_teensy_build {
make ${MAKEOPTS} -C ports/teensy
}
########################################################################################
# ports/unix
CI_UNIX_OPTS_SYS_SETTRACE=(
MICROPY_PY_BTREE=0
MICROPY_PY_FFI=0
MICROPY_PY_USSL=0
CFLAGS_EXTRA="-DMICROPY_PY_SYS_SETTRACE=1"
)
CI_UNIX_OPTS_SYS_SETTRACE_STACKLESS=(
MICROPY_PY_BTREE=0
MICROPY_PY_FFI=0
MICROPY_PY_USSL=0
CFLAGS_EXTRA="-DMICROPY_STACKLESS=1 -DMICROPY_STACKLESS_STRICT=1 -DMICROPY_PY_SYS_SETTRACE=1"
)
CI_UNIX_OPTS_QEMU_MIPS=(
CROSS_COMPILE=mips-linux-gnu-
VARIANT=coverage
MICROPY_STANDALONE=1
LDFLAGS_EXTRA="-static"
)
CI_UNIX_OPTS_QEMU_ARM=(
CROSS_COMPILE=arm-linux-gnueabi-
VARIANT=coverage
MICROPY_STANDALONE=1
)
function ci_unix_build_helper {
make ${MAKEOPTS} -C mpy-cross
make ${MAKEOPTS} -C ports/unix "$@" submodules
make ${MAKEOPTS} -C ports/unix "$@" deplibs
make ${MAKEOPTS} -C ports/unix "$@"
}
function ci_unix_build_ffi_lib_helper {
$1 $2 -shared -o tests/unix/ffi_lib.so tests/unix/ffi_lib.c
}
function ci_unix_run_tests_helper {
make -C ports/unix "$@" test
}
function ci_unix_run_tests_full_helper {
variant=$1
shift
if [ $variant = standard ]; then
micropython=micropython
else
micropython=micropython-$variant
fi
make -C ports/unix VARIANT=$variant "$@" test_full
(cd tests && MICROPY_CPYTHON3=python3 MICROPY_MICROPYTHON=../ports/unix/$micropython ./run-multitests.py multi_net/*.py)
(cd tests && MICROPY_CPYTHON3=python3 MICROPY_MICROPYTHON=../ports/unix/$micropython ./run-perfbench.py 1000 1000)
}
function ci_native_mpy_modules_build {
if [ "$1" = "" ]; then
arch=x64
else
arch=$1
fi
make -C examples/natmod/features1 ARCH=$arch
make -C examples/natmod/features2 ARCH=$arch
make -C examples/natmod/features3 ARCH=$arch
make -C examples/natmod/btree ARCH=$arch
make -C examples/natmod/framebuf ARCH=$arch
make -C examples/natmod/uheapq ARCH=$arch
make -C examples/natmod/urandom ARCH=$arch
make -C examples/natmod/ure ARCH=$arch
make -C examples/natmod/uzlib ARCH=$arch
}
function ci_native_mpy_modules_32bit_build {
ci_native_mpy_modules_build x86
}
function ci_unix_minimal_build {
make ${MAKEOPTS} -C ports/unix VARIANT=minimal
}
function ci_unix_minimal_run_tests {
(cd tests && MICROPY_CPYTHON3=python3 MICROPY_MICROPYTHON=../ports/unix/micropython-minimal ./run-tests.py -e exception_chain -e self_type_check -e subclass_native_init -d basics)
}
function ci_unix_standard_build {
ci_unix_build_helper VARIANT=standard
ci_unix_build_ffi_lib_helper gcc
}
function ci_unix_standard_run_tests {
ci_unix_run_tests_full_helper standard
}
function ci_unix_dev_build {
ci_unix_build_helper VARIANT=dev
}
function ci_unix_dev_run_tests {
ci_unix_run_tests_helper VARIANT=dev
}
function ci_unix_coverage_setup {
sudo pip3 install setuptools
sudo pip3 install pyelftools
gcc --version
python3 --version
}
function ci_unix_coverage_build {
ci_unix_build_helper VARIANT=coverage
ci_unix_build_ffi_lib_helper gcc
}
function ci_unix_coverage_run_tests {
ci_unix_run_tests_full_helper coverage
}
function ci_unix_coverage_run_mpy_merge_tests {
mptop=$(pwd)
outdir=$(mktemp -d)
allmpy=()
# Compile a selection of tests to .mpy and execute them, collecting the output.
# None of the tests should SKIP.
for inpy in $mptop/tests/basics/[acdel]*.py; do
test=$(basename $inpy .py)
echo $test
outmpy=$outdir/$test.mpy
$mptop/mpy-cross/mpy-cross -o $outmpy $inpy
(cd $outdir && $mptop/ports/unix/micropython-coverage -m $test >> out-individual)
allmpy+=($outmpy)
done
# Merge all the tests into one .mpy file, and then execute it.
python3 $mptop/tools/mpy-tool.py --merge -o $outdir/merged.mpy ${allmpy[@]}
(cd $outdir && $mptop/ports/unix/micropython-coverage -m merged > out-merged)
# Make sure the outputs match.
diff $outdir/out-individual $outdir/out-merged && /bin/rm -rf $outdir
}
function ci_unix_coverage_run_native_mpy_tests {
MICROPYPATH=examples/natmod/features2 ./ports/unix/micropython-coverage -m features2
(cd tests && ./run-natmodtests.py "$@" extmod/{btree*,framebuf*,uheapq*,urandom*,ure*,uzlib*}.py)
}
function ci_unix_32bit_setup {
sudo dpkg --add-architecture i386
sudo apt-get update
sudo apt-get install gcc-multilib g++-multilib libffi-dev:i386
sudo pip3 install setuptools
sudo pip3 install pyelftools
gcc --version
python2 --version
python3 --version
}
function ci_unix_coverage_32bit_build {
ci_unix_build_helper VARIANT=coverage MICROPY_FORCE_32BIT=1
ci_unix_build_ffi_lib_helper gcc -m32
}
function ci_unix_coverage_32bit_run_tests {
ci_unix_run_tests_full_helper coverage MICROPY_FORCE_32BIT=1
}
function ci_unix_coverage_32bit_run_native_mpy_tests {
ci_unix_coverage_run_native_mpy_tests --arch x86
}
function ci_unix_nanbox_build {
# Use Python 2 to check that it can run the build scripts
ci_unix_build_helper PYTHON=python2 VARIANT=nanbox CFLAGS_EXTRA="-DMICROPY_PY_MATH_CONSTANTS=1"
ci_unix_build_ffi_lib_helper gcc -m32
}
function ci_unix_nanbox_run_tests {
ci_unix_run_tests_full_helper nanbox PYTHON=python2
}
function ci_unix_float_build {
ci_unix_build_helper VARIANT=standard CFLAGS_EXTRA="-DMICROPY_FLOAT_IMPL=MICROPY_FLOAT_IMPL_FLOAT"
ci_unix_build_ffi_lib_helper gcc
}
function ci_unix_float_run_tests {
# TODO get this working: ci_unix_run_tests_full_helper standard CFLAGS_EXTRA="-DMICROPY_FLOAT_IMPL=MICROPY_FLOAT_IMPL_FLOAT"
ci_unix_run_tests_helper CFLAGS_EXTRA="-DMICROPY_FLOAT_IMPL=MICROPY_FLOAT_IMPL_FLOAT"
}
function ci_unix_clang_setup {
sudo apt-get install clang
clang --version
}
function ci_unix_stackless_clang_build {
make ${MAKEOPTS} -C mpy-cross CC=clang
make ${MAKEOPTS} -C ports/unix submodules
make ${MAKEOPTS} -C ports/unix CC=clang CFLAGS_EXTRA="-DMICROPY_STACKLESS=1 -DMICROPY_STACKLESS_STRICT=1"
}
function ci_unix_stackless_clang_run_tests {
ci_unix_run_tests_helper CC=clang
}
function ci_unix_float_clang_build {
make ${MAKEOPTS} -C mpy-cross CC=clang
make ${MAKEOPTS} -C ports/unix submodules
make ${MAKEOPTS} -C ports/unix CC=clang CFLAGS_EXTRA="-DMICROPY_FLOAT_IMPL=MICROPY_FLOAT_IMPL_FLOAT"
}
function ci_unix_float_clang_run_tests {
ci_unix_run_tests_helper CC=clang
}
function ci_unix_settrace_build {
make ${MAKEOPTS} -C mpy-cross
make ${MAKEOPTS} -C ports/unix "${CI_UNIX_OPTS_SYS_SETTRACE[@]}"
}
function ci_unix_settrace_run_tests {
ci_unix_run_tests_full_helper standard "${CI_UNIX_OPTS_SYS_SETTRACE[@]}"
}
function ci_unix_settrace_stackless_build {
make ${MAKEOPTS} -C mpy-cross
make ${MAKEOPTS} -C ports/unix "${CI_UNIX_OPTS_SYS_SETTRACE_STACKLESS[@]}"
}
function ci_unix_settrace_stackless_run_tests {
ci_unix_run_tests_full_helper standard "${CI_UNIX_OPTS_SYS_SETTRACE_STACKLESS[@]}"
}
function ci_unix_macos_build {
make ${MAKEOPTS} -C mpy-cross
make ${MAKEOPTS} -C ports/unix submodules
#make ${MAKEOPTS} -C ports/unix deplibs
make ${MAKEOPTS} -C ports/unix
# check for additional compiler errors/warnings
make ${MAKEOPTS} -C ports/unix VARIANT=dev submodules
make ${MAKEOPTS} -C ports/unix VARIANT=dev
make ${MAKEOPTS} -C ports/unix VARIANT=coverage submodules
make ${MAKEOPTS} -C ports/unix VARIANT=coverage
}
function ci_unix_macos_run_tests {
# Issues with macOS tests:
# - import_pkg7 has a problem with relative imports
# - urandom_basic has a problem with getrandbits(0)
(cd tests && ./run-tests.py --exclude 'import_pkg7.py' --exclude 'urandom_basic.py')
}
function ci_unix_qemu_mips_setup {
sudo apt-get update
sudo apt-get install gcc-mips-linux-gnu g++-mips-linux-gnu
sudo apt-get install qemu-user
qemu-mips --version
}
function ci_unix_qemu_mips_build {
# qemu-mips on GitHub Actions will seg-fault if not linked statically
ci_unix_build_helper "${CI_UNIX_OPTS_QEMU_MIPS[@]}"
}
function ci_unix_qemu_mips_run_tests {
# Issues with MIPS tests:
# - (i)listdir does not work, it always returns the empty list (it's an issue with the underlying C call)
# - ffi tests do not work
file ./ports/unix/micropython-coverage
(cd tests && MICROPY_MICROPYTHON=../ports/unix/micropython-coverage ./run-tests.py --exclude 'vfs_posix.py' --exclude 'ffi_(callback|float|float2).py')
}
function ci_unix_qemu_arm_setup {
sudo apt-get update
sudo apt-get install gcc-arm-linux-gnueabi g++-arm-linux-gnueabi
sudo apt-get install qemu-user
qemu-arm --version
}
function ci_unix_qemu_arm_build {
ci_unix_build_helper "${CI_UNIX_OPTS_QEMU_ARM[@]}"
ci_unix_build_ffi_lib_helper arm-linux-gnueabi-gcc
}
function ci_unix_qemu_arm_run_tests {
# Issues with ARM tests:
# - (i)listdir does not work, it always returns the empty list (it's an issue with the underlying C call)
export QEMU_LD_PREFIX=/usr/arm-linux-gnueabi
file ./ports/unix/micropython-coverage
(cd tests && MICROPY_MICROPYTHON=../ports/unix/micropython-coverage ./run-tests.py --exclude 'vfs_posix.py')
}
########################################################################################
# ports/windows
function ci_windows_setup {
sudo apt-get install gcc-mingw-w64
}
function ci_windows_build {
make ${MAKEOPTS} -C mpy-cross
make ${MAKEOPTS} -C ports/windows CROSS_COMPILE=i686-w64-mingw32-
}
########################################################################################
# ports/zephyr
ZEPHYR_DOCKER_VERSION=v0.21.0
ZEPHYR_SDK_VERSION=0.13.2
ZEPHYR_VERSION=v3.0.0
function ci_zephyr_setup {
docker pull zephyrprojectrtos/ci:${ZEPHYR_DOCKER_VERSION}
docker run --name zephyr-ci -d -it \
-v "$(pwd)":/micropython \
-e ZEPHYR_SDK_INSTALL_DIR=/opt/toolchains/zephyr-sdk-${ZEPHYR_SDK_VERSION} \
-e ZEPHYR_TOOLCHAIN_VARIANT=zephyr \
-e ZEPHYR_BASE=/zephyrproject/zephyr \
-w /micropython/ports/zephyr \
zephyrprojectrtos/ci:${ZEPHYR_DOCKER_VERSION}
docker ps -a
}
function ci_zephyr_install {
docker exec zephyr-ci west init --mr ${ZEPHYR_VERSION} /zephyrproject
docker exec -w /zephyrproject zephyr-ci west update
docker exec -w /zephyrproject zephyr-ci west zephyr-export
}
function ci_zephyr_build {
docker exec zephyr-ci west build -p auto -b qemu_x86 -- -DCONF_FILE=prj_minimal.conf
docker exec zephyr-ci west build -p auto -b qemu_x86
docker exec zephyr-ci west build -p auto -b frdm_k64f
docker exec zephyr-ci west build -p auto -b mimxrt1050_evk
docker exec zephyr-ci west build -p auto -b nucleo_wb55rg # for bluetooth
}

View File

@@ -0,0 +1,202 @@
#!/usr/bin/env python3
#
# This file is part of the MicroPython project, http://micropython.org/
#
# The MIT License (MIT)
#
# Copyright (c) 2020 Damien P. George
# Copyright (c) 2020 Jim Mussared
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import argparse
import glob
import itertools
import os
import re
import subprocess
# Relative to top-level repo dir.
PATHS = [
# C
"drivers/ninaw10/*.[ch]",
"extmod/*.[ch]",
"extmod/btstack/*.[ch]",
"extmod/nimble/*.[ch]",
"lib/mbedtls_errors/tester.c",
"shared/netutils/*.[ch]",
"shared/timeutils/*.[ch]",
"shared/runtime/*.[ch]",
"mpy-cross/*.[ch]",
"ports/**/*.[ch]",
"py/*.[ch]",
# Python
"drivers/**/*.py",
"examples/**/*.py",
"extmod/**/*.py",
"ports/**/*.py",
"py/**/*.py",
"tools/**/*.py",
"tests/**/*.py",
]
EXCLUSIONS = [
# The cc3200 port is not fully formatted yet.
"ports/cc3200/*/*.[ch]",
# The nrf port is not fully formatted yet.
"ports/nrf/boards/*.[ch]",
"ports/nrf/device/*.[ch]",
"ports/nrf/drivers/*.[ch]",
"ports/nrf/modules/ble/*.[ch]",
"ports/nrf/modules/board/*.[ch]",
"ports/nrf/modules/machine/*.[ch]",
"ports/nrf/modules/music/*.[ch]",
"ports/nrf/modules/ubluepy/*.[ch]",
"ports/nrf/modules/uos/*.[ch]",
"ports/nrf/modules/utime/*.[ch]",
# STM32 USB dev/host code is mostly 3rd party.
"ports/stm32/usbdev/**/*.[ch]",
"ports/stm32/usbhost/**/*.[ch]",
# Teensy core code is 3rd party.
"ports/teensy/core/*.[ch]",
# STM32 build includes generated Python code.
"ports/*/build*",
# not real python files
"tests/**/repl_*.py",
# needs careful attention before applying automatic formatting
"tests/basics/*.py",
]
# Path to repo top-level dir.
TOP = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
UNCRUSTIFY_CFG = os.path.join(TOP, "tools/uncrustify.cfg")
C_EXTS = (
".c",
".h",
)
PY_EXTS = (".py",)
def list_files(paths, exclusions=None, prefix=""):
files = set()
for pattern in paths:
files.update(glob.glob(os.path.join(prefix, pattern), recursive=True))
for pattern in exclusions or []:
files.difference_update(glob.fnmatch.filter(files, os.path.join(prefix, pattern)))
return sorted(files)
def fixup_c(filename):
# Read file.
with open(filename) as f:
lines = f.readlines()
# Write out file with fixups.
with open(filename, "w", newline="") as f:
dedent_stack = []
while lines:
# Get next line.
l = lines.pop(0)
# Dedent #'s to match indent of following line (not previous line).
m = re.match(r"( +)#(if |ifdef |ifndef |elif |else|endif)", l)
if m:
indent = len(m.group(1))
directive = m.group(2)
if directive in ("if ", "ifdef ", "ifndef "):
l_next = lines[0]
indent_next = len(re.match(r"( *)", l_next).group(1))
if indent - 4 == indent_next and re.match(r" +(} else |case )", l_next):
# This #-line (and all associated ones) needs dedenting by 4 spaces.
l = l[4:]
dedent_stack.append(indent - 4)
else:
# This #-line does not need dedenting.
dedent_stack.append(-1)
else:
if dedent_stack[-1] >= 0:
# This associated #-line needs dedenting to match the #if.
indent_diff = indent - dedent_stack[-1]
assert indent_diff >= 0
l = l[indent_diff:]
if directive == "endif":
dedent_stack.pop()
# Write out line.
f.write(l)
assert not dedent_stack, filename
def main():
cmd_parser = argparse.ArgumentParser(description="Auto-format C and Python files.")
cmd_parser.add_argument("-c", action="store_true", help="Format C code only")
cmd_parser.add_argument("-p", action="store_true", help="Format Python code only")
cmd_parser.add_argument("-v", action="store_true", help="Enable verbose output")
cmd_parser.add_argument("files", nargs="*", help="Run on specific globs")
args = cmd_parser.parse_args()
# Setting only one of -c or -p disables the other. If both or neither are set, then do both.
format_c = args.c or not args.p
format_py = args.p or not args.c
# Expand the globs passed on the command line, or use the default globs above.
files = []
if args.files:
files = list_files(args.files)
else:
files = list_files(PATHS, EXCLUSIONS, TOP)
# Extract files matching a specific language.
def lang_files(exts):
for file in files:
if os.path.splitext(file)[1].lower() in exts:
yield file
# Run tool on N files at a time (to avoid making the command line too long).
def batch(cmd, files, N=200):
while True:
file_args = list(itertools.islice(files, N))
if not file_args:
break
subprocess.check_call(cmd + file_args)
# Format C files with uncrustify.
if format_c:
command = ["uncrustify", "-c", UNCRUSTIFY_CFG, "-lC", "--no-backup"]
if not args.v:
command.append("-q")
batch(command, lang_files(C_EXTS))
for file in lang_files(C_EXTS):
fixup_c(file)
# Format Python files with black.
if format_py:
command = ["black", "--fast", "--line-length=99"]
if args.v:
command.append("-v")
else:
command.append("-q")
batch(command, lang_files(PY_EXTS))
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,187 @@
#!/bin/sh
#
# This script generates statistics (build size, speed) for successive
# revisions of the code. It checks out git commits one an a time, compiles
# various ports to determine their size, and runs pystone on the unix port.
# Results are collected in the output file.
#
# Note: you will need to copy this file out of the tools directory before
# executing because it does not exist in old revisions of the repository.
# check that we are in the root directory of the repository
if [ ! -d py -o ! -d ports/unix -o ! -d ports/stm32 ]; then
echo "script must be run from root of the repository"
exit 1
fi
# output file for the data; data is appended if file already exists
output=codestats.dat
# utility programs
RM=/bin/rm
AWK=awk
MAKE="make -j2"
# these are the binaries that are built; some have 2 or 3 depending on version
bin_unix=ports/unix/micropython
bin_stm32=ports/stm32/build-PYBV10/firmware.elf
bin_barearm_1=ports/bare-arm/build/flash.elf
bin_barearm_2=ports/bare-arm/build/firmware.elf
bin_minimal=ports/minimal/build/firmware.elf
bin_cc3200_1=ports/cc3200/build/LAUNCHXL/application.axf
bin_cc3200_2=ports/cc3200/build/LAUNCHXL/release/application.axf
bin_cc3200_3=ports/cc3200/build/WIPY/release/application.axf
# start at zero size; if build fails reuse previous valid size
size_unix="0"
size_stm32="0"
size_barearm="0"
size_minimal="0"
size_cc3200="0"
# start at zero pystones
pystones="0"
# this code runs pystone and averages the results
pystoneavg=/tmp/pystoneavg.py
cat > $pystoneavg << EOF
import pystone
samples = [pystone.pystones(300000)[1] for i in range(5)]
samples.sort()
stones = sum(samples[1:-1]) / (len(samples) - 2) # exclude smallest and largest
print("stones %g" % stones)
EOF
function get_size() {
if [ -r $2 ]; then
size $2 | tail -n1 | $AWK '{print $1}'
else
echo $1
fi
}
function get_size2() {
if [ -r $2 ]; then
size $2 | tail -n1 | $AWK '{print $1}'
elif [ -r $3 ]; then
size $3 | tail -n1 | $AWK '{print $1}'
else
echo $1
fi
}
function get_size3() {
if [ -r $2 ]; then
size $2 | tail -n1 | $AWK '{print $1}'
elif [ -r $3 ]; then
size $3 | tail -n1 | $AWK '{print $1}'
elif [ -r $4 ]; then
size $4 | tail -n1 | $AWK '{print $1}'
else
echo $1
fi
}
# get the last revision in the data file; or start at v1.0 if no file
if [ -r $output ]; then
last_rev=$(tail -n1 $output | $AWK '{print $1}')
else
echo "# hash size_unix size_stm32 size_barearm size_minimal size_cc3200 pystones" > $output
last_rev="v1.0"
fi
# get a list of hashes between last revision (exclusive) and master
hashes=$(git log --format=format:"%H" --reverse ${last_rev}..master)
#hashes=$(git log --format=format:"%H" --reverse ${last_rev}..master | $AWK '{if (NR % 10 == 0) print $0}') # do every 10th one
for hash in $hashes; do
#### checkout the revision ####
git checkout $hash
if [ $? -ne 0 ]; then
echo "aborting"
exit 1
fi
#### apply patches to get it to build ####
if grep -q '#if defined(MP_CLOCKS_PER_SEC) && (MP_CLOCKS_PER_SEC == 1000000) // POSIX' unix/modtime.c; then
echo apply patch
git apply - << EOF
diff --git a/unix/modtime.c b/unix/modtime.c
index 77d2945..dae0644 100644
--- a/unix/modtime.c
+++ b/unix/modtime.c
@@ -55,10 +55,8 @@ void msec_sleep_tv(struct timeval *tv) {
#define MP_CLOCKS_PER_SEC CLOCKS_PER_SEC
#endif
-#if defined(MP_CLOCKS_PER_SEC) && (MP_CLOCKS_PER_SEC == 1000000) // POSIX
-#define CLOCK_DIV 1000.0
-#elif defined(MP_CLOCKS_PER_SEC) && (MP_CLOCKS_PER_SEC == 1000) // WIN32
-#define CLOCK_DIV 1.0
+#if defined(MP_CLOCKS_PER_SEC)
+#define CLOCK_DIV (MP_CLOCKS_PER_SEC / 1000.0F)
#else
#error Unsupported clock() implementation
#endif
EOF
fi
#### unix ####
$RM $bin_unix
$MAKE -C ports/unix CFLAGS_EXTRA=-DNDEBUG
size_unix=$(get_size $size_unix $bin_unix)
# undo patch if it was applied
git checkout unix/modtime.c
#### stm32 ####
$RM $bin_stm32
$MAKE -C ports/stm32 board=PYBV10
size_stm32=$(get_size $size_stm32 $bin_stm32)
#### bare-arm ####
$RM $bin_barearm_1 $bin_barearm_2
$MAKE -C ports/bare-arm
size_barearm=$(get_size2 $size_barearm $bin_barearm_1 $bin_barearm_2)
#### minimal ####
if [ -r ports/minimal/Makefile ]; then
$RM $bin_minimal
$MAKE -C ports/minimal CROSS=1
size_minimal=$(get_size $size_minimal $bin_minimal)
fi
#### cc3200 ####
if [ -r ports/cc3200/Makefile ]; then
$RM $bin_cc3200_1 $bin_cc3200_2 $bin_cc3200_3
$MAKE -C ports/cc3200 BTARGET=application
size_cc3200=$(get_size3 $size_cc3200 $bin_cc3200_1 $bin_cc3200_2 $bin_cc3200_3)
fi
#### run pystone ####
if [ -x $bin_unix ]; then
new_pystones=$($bin_unix $pystoneavg)
# only update the variable if pystone executed successfully
if echo $new_pystones | grep -q "^stones"; then
pystones=$(echo $new_pystones | $AWK '{print $2}')
fi
fi
#### output data for this commit ####
echo "$hash $size_unix $size_stm32 $size_barearm $size_minimal $size_cc3200 $pystones" >> $output
done
# checkout master and cleanup
git checkout master
$RM $pystoneavg

View File

@@ -0,0 +1,163 @@
#!/usr/bin/python
# Written by Antonio Galea - 2010/11/18
# Distributed under Gnu LGPL 3.0
# see http://www.gnu.org/licenses/lgpl-3.0.txt
import sys, struct, zlib, os
from optparse import OptionParser
DEFAULT_DEVICE = "0x0483:0xdf11"
def named(tuple, names):
return dict(zip(names.split(), tuple))
def consume(fmt, data, names):
n = struct.calcsize(fmt)
return named(struct.unpack(fmt, data[:n]), names), data[n:]
def cstring(string):
return string.split(b"\0", 1)[0]
def compute_crc(data):
return 0xFFFFFFFF & -zlib.crc32(data) - 1
def parse(file, dump_images=False):
print('File: "%s"' % file)
data = open(file, "rb").read()
crc = compute_crc(data[:-4])
prefix, data = consume("<5sBIB", data, "signature version size targets")
print("%(signature)s v%(version)d, image size: %(size)d, targets: %(targets)d" % prefix)
for t in range(prefix["targets"]):
tprefix, data = consume(
"<6sBI255s2I", data, "signature altsetting named name size elements"
)
tprefix["num"] = t
if tprefix["named"]:
tprefix["name"] = cstring(tprefix["name"])
else:
tprefix["name"] = ""
print(
'%(signature)s %(num)d, alt setting: %(altsetting)s, name: "%(name)s", size: %(size)d, elements: %(elements)d'
% tprefix
)
tsize = tprefix["size"]
target, data = data[:tsize], data[tsize:]
for e in range(tprefix["elements"]):
eprefix, target = consume("<2I", target, "address size")
eprefix["num"] = e
print(" %(num)d, address: 0x%(address)08x, size: %(size)d" % eprefix)
esize = eprefix["size"]
image, target = target[:esize], target[esize:]
if dump_images:
out = "%s.target%d.image%d.bin" % (file, t, e)
open(out, "wb").write(image)
print(' DUMPED IMAGE TO "%s"' % out)
if len(target):
print("target %d: PARSE ERROR" % t)
suffix = named(struct.unpack("<4H3sBI", data[:16]), "device product vendor dfu ufd len crc")
print(
"usb: %(vendor)04x:%(product)04x, device: 0x%(device)04x, dfu: 0x%(dfu)04x, %(ufd)s, %(len)d, 0x%(crc)08x"
% suffix
)
if crc != suffix["crc"]:
print("CRC ERROR: computed crc32 is 0x%08x" % crc)
data = data[16:]
if data:
print("PARSE ERROR")
def build(file, targets, device=DEFAULT_DEVICE):
data = b""
for t, target in enumerate(targets):
tdata = b""
for image in target:
# pad image to 8 bytes (needed at least for L476)
pad = (8 - len(image["data"]) % 8) % 8
image["data"] = image["data"] + bytes(bytearray(8)[0:pad])
#
tdata += struct.pack("<2I", image["address"], len(image["data"])) + image["data"]
tdata = (
struct.pack("<6sBI255s2I", b"Target", 0, 1, b"ST...", len(tdata), len(target)) + tdata
)
data += tdata
data = struct.pack("<5sBIB", b"DfuSe", 1, len(data) + 11, len(targets)) + data
v, d = map(lambda x: int(x, 0) & 0xFFFF, device.split(":", 1))
data += struct.pack("<4H3sB", 0, d, v, 0x011A, b"UFD", 16)
crc = compute_crc(data)
data += struct.pack("<I", crc)
open(file, "wb").write(data)
if __name__ == "__main__":
usage = """
%prog [-d|--dump] infile.dfu
%prog {-b|--build} address:file.bin [-b address:file.bin ...] [{-D|--device}=vendor:device] outfile.dfu"""
parser = OptionParser(usage=usage)
parser.add_option(
"-b",
"--build",
action="append",
dest="binfiles",
help="build a DFU file from given BINFILES",
metavar="BINFILES",
)
parser.add_option(
"-D",
"--device",
action="store",
dest="device",
help="build for DEVICE, defaults to %s" % DEFAULT_DEVICE,
metavar="DEVICE",
)
parser.add_option(
"-d",
"--dump",
action="store_true",
dest="dump_images",
default=False,
help="dump contained images to current directory",
)
(options, args) = parser.parse_args()
if options.binfiles and len(args) == 1:
target = []
for arg in options.binfiles:
try:
address, binfile = arg.split(":", 1)
except ValueError:
print("Address:file couple '%s' invalid." % arg)
sys.exit(1)
try:
address = int(address, 0) & 0xFFFFFFFF
except ValueError:
print("Address %s invalid." % address)
sys.exit(1)
if not os.path.isfile(binfile):
print("Unreadable file '%s'." % binfile)
sys.exit(1)
target.append({"address": address, "data": open(binfile, "rb").read()})
outfile = args[0]
device = DEFAULT_DEVICE
if options.device:
device = options.device
try:
v, d = map(lambda x: int(x, 0) & 0xFFFF, device.split(":", 1))
except:
print("Invalid device '%s'." % device)
sys.exit(1)
build(outfile, [target], device)
elif len(args) == 1:
infile = args[0]
if not os.path.isfile(infile):
print("Unreadable file '%s'." % infile)
sys.exit(1)
parse(infile, dump_images=options.dump_images)
else:
parser.print_help()
sys.exit(1)

View File

@@ -0,0 +1,30 @@
# Reads in a text file, and performs the necessary escapes so that it
# can be #included as a static string like:
# static const char string_from_textfile[] =
# #include "build/textfile.h"
# ;
# This script simply prints the escaped string straight to stdout
from __future__ import print_function
import sys
# Can either be set explicitly, or left blank to auto-detect
# Except auto-detect doesn't work because the file has been passed
# through Python text processing, which makes all EOL a \n
line_end = "\\r\\n"
if __name__ == "__main__":
filename = sys.argv[1]
for line in open(filename, "r").readlines():
if not line_end:
for ending in ("\r\n", "\r", "\n"):
if line.endswith(ending):
line_end = ending.replace("\r", "\\r").replace("\n", "\\n")
break
if not line_end:
raise Exception("Couldn't auto-detect line-ending of %s" % filename)
line = line.rstrip("\r\n")
line = line.replace("\\", "\\\\")
line = line.replace('"', '\\"')
print('"%s%s"' % (line, line_end))

View File

@@ -0,0 +1,12 @@
#!/bin/sh
echo "MicroPython change log"
for t in $(git tag | grep -v v1.0-rc1 | sort -rV); do
echo ''
echo '========'
echo ''
git show -s --format=%cD `git rev-list $t --max-count=1`
echo ''
git tag -l $t -n9999
done

View File

@@ -0,0 +1,268 @@
# This file is part of the MicroPython project, http://micropython.org/
#
# The MIT License (MIT)
#
# Copyright (c) 2016 Rami Ali
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
""" gen-cpydiff generates documentation which outlines operations that differ between MicroPython
and CPython. This script is called by the docs Makefile for html and Latex and may be run
manually using the command make gen-cpydiff. """
import os
import errno
import subprocess
import time
import re
from collections import namedtuple
# MicroPython supports syntax of CPython 3.4 with some features from 3.5, and
# such version should be used to test for differences. If your default python3
# executable is of lower version, you can point MICROPY_CPYTHON3 environment var
# to the correct executable.
if os.name == "nt":
CPYTHON3 = os.getenv("MICROPY_CPYTHON3", "python3.exe")
MICROPYTHON = os.getenv("MICROPY_MICROPYTHON", "../ports/windows/micropython.exe")
else:
CPYTHON3 = os.getenv("MICROPY_CPYTHON3", "python3")
MICROPYTHON = os.getenv("MICROPY_MICROPYTHON", "../ports/unix/micropython")
TESTPATH = "../tests/cpydiff/"
DOCPATH = "../docs/genrst/"
INDEXTEMPLATE = "../docs/differences/index_template.txt"
INDEX = "index.rst"
HEADER = ".. This document was generated by tools/gen-cpydiff.py\n\n"
CLASSMAP = {"Core": "Core language", "Types": "Builtin types"}
INDEXPRIORITY = ["syntax", "core_language", "builtin_types", "modules"]
RSTCHARS = ["=", "-", "~", "`", ":"]
SPLIT = '"""\n|categories: |description: |cause: |workaround: '
TAB = " "
Output = namedtuple(
"output",
[
"name",
"class_",
"desc",
"cause",
"workaround",
"code",
"output_cpy",
"output_upy",
"status",
],
)
def readfiles():
"""Reads test files"""
tests = list(filter(lambda x: x.endswith(".py"), os.listdir(TESTPATH)))
tests.sort()
files = []
for test in tests:
text = open(TESTPATH + test, "r").read()
try:
class_, desc, cause, workaround, code = [
x.rstrip() for x in list(filter(None, re.split(SPLIT, text)))
]
# remove black `fmt: on/off/skip` comments
code = "".join(
# skip comments are inline, so we replace just the comment
re.sub(r"\s*# fmt: skip", "", x)
for x in code.splitlines(keepends=True)
# on/off comments are on their own line, so we omit the entire line
if not re.match(r"\s*# fmt: (on|off)\s*", x)
)
output = Output(test, class_, desc, cause, workaround, code, "", "", "")
files.append(output)
except IndexError:
print("Incorrect format in file " + TESTPATH + test)
return files
def run_tests(tests):
"""executes all tests"""
results = []
for test in tests:
with open(TESTPATH + test.name, "rb") as f:
input_py = f.read()
process = subprocess.Popen(
CPYTHON3,
shell=True,
stdout=subprocess.PIPE,
stdin=subprocess.PIPE,
stderr=subprocess.PIPE,
)
output_cpy = [com.decode("utf8") for com in process.communicate(input_py)]
process = subprocess.Popen(
MICROPYTHON,
shell=True,
stdout=subprocess.PIPE,
stdin=subprocess.PIPE,
stderr=subprocess.PIPE,
)
output_upy = [com.decode("utf8") for com in process.communicate(input_py)]
if output_cpy[0] == output_upy[0] and output_cpy[1] == output_upy[1]:
status = "Supported"
print("Supported operation!\nFile: " + TESTPATH + test.name)
else:
status = "Unsupported"
output = Output(
test.name,
test.class_,
test.desc,
test.cause,
test.workaround,
test.code,
output_cpy,
output_upy,
status,
)
results.append(output)
results.sort(key=lambda x: x.class_)
return results
def indent(block, spaces):
"""indents paragraphs of text for rst formatting"""
new_block = ""
for line in block.split("\n"):
new_block += spaces + line + "\n"
return new_block
def gen_table(contents):
"""creates a table given any set of columns"""
xlengths = []
ylengths = []
for column in contents:
col_len = 0
for entry in column:
lines = entry.split("\n")
for line in lines:
col_len = max(len(line) + 2, col_len)
xlengths.append(col_len)
for i in range(len(contents[0])):
ymax = 0
for j in range(len(contents)):
ymax = max(ymax, len(contents[j][i].split("\n")))
ylengths.append(ymax)
table_divider = "+" + "".join(["-" * i + "+" for i in xlengths]) + "\n"
table = table_divider
for i in range(len(ylengths)):
row = [column[i] for column in contents]
row = [entry + "\n" * (ylengths[i] - len(entry.split("\n"))) for entry in row]
row = [entry.split("\n") for entry in row]
for j in range(ylengths[i]):
k = 0
for entry in row:
width = xlengths[k]
table += "".join(["| {:{}}".format(entry[j], width - 1)])
k += 1
table += "|\n"
table += table_divider
return table + "\n"
def gen_rst(results):
"""creates restructured text documents to display tests"""
# make sure the destination directory exists
try:
os.mkdir(DOCPATH)
except OSError as e:
if e.args[0] != errno.EEXIST and e.args[0] != errno.EISDIR:
raise
toctree = []
class_ = []
for output in results:
section = output.class_.split(",")
for i in range(len(section)):
section[i] = section[i].rstrip()
if section[i] in CLASSMAP:
section[i] = CLASSMAP[section[i]]
if i >= len(class_) or section[i] != class_[i]:
if i == 0:
filename = section[i].replace(" ", "_").lower()
rst = open(DOCPATH + filename + ".rst", "w")
rst.write(HEADER)
rst.write(section[i] + "\n")
rst.write(RSTCHARS[0] * len(section[i]))
rst.write(time.strftime("\nGenerated %a %d %b %Y %X UTC\n\n", time.gmtime()))
toctree.append(filename)
else:
rst.write(section[i] + "\n")
rst.write(RSTCHARS[min(i, len(RSTCHARS) - 1)] * len(section[i]))
rst.write("\n\n")
class_ = section
rst.write(".. _cpydiff_%s:\n\n" % output.name.rsplit(".", 1)[0])
rst.write(output.desc + "\n")
rst.write("~" * len(output.desc) + "\n\n")
if output.cause != "Unknown":
rst.write("**Cause:** " + output.cause + "\n\n")
if output.workaround != "Unknown":
rst.write("**Workaround:** " + output.workaround + "\n\n")
rst.write("Sample code::\n\n" + indent(output.code, TAB) + "\n")
output_cpy = indent("".join(output.output_cpy[0:2]), TAB).rstrip()
output_cpy = ("::\n\n" if output_cpy != "" else "") + output_cpy
output_upy = indent("".join(output.output_upy[0:2]), TAB).rstrip()
output_upy = ("::\n\n" if output_upy != "" else "") + output_upy
table = gen_table([["CPy output:", output_cpy], ["uPy output:", output_upy]])
rst.write(table)
template = open(INDEXTEMPLATE, "r")
index = open(DOCPATH + INDEX, "w")
index.write(HEADER)
index.write(template.read())
for section in INDEXPRIORITY:
if section in toctree:
index.write(indent(section + ".rst", TAB))
toctree.remove(section)
for section in toctree:
index.write(indent(section + ".rst", TAB))
def main():
"""Main function"""
# set search path so that test scripts find the test modules (and no other ones)
os.environ["PYTHONPATH"] = TESTPATH
os.environ["MICROPYPATH"] = TESTPATH
files = readfiles()
results = run_tests(files)
gen_rst(results)
main()

View File

@@ -0,0 +1,551 @@
"""
Generate documentation for pyboard API from C files.
"""
import os
import argparse
import re
import markdown
# given a list of (name,regex) pairs, find the first one that matches the given line
def re_match_first(regexs, line):
for name, regex in regexs:
match = re.match(regex, line)
if match:
return name, match
return None, None
def makedirs(d):
if not os.path.isdir(d):
os.makedirs(d)
class Lexer:
class LexerError(Exception):
pass
class EOF(Exception):
pass
class Break(Exception):
pass
def __init__(self, file):
self.filename = file
with open(file, "rt") as f:
line_num = 0
lines = []
for line in f:
line_num += 1
line = line.strip()
if line == "///":
lines.append((line_num, ""))
elif line.startswith("/// "):
lines.append((line_num, line[4:]))
elif len(lines) > 0 and lines[-1][1] is not None:
lines.append((line_num, None))
if len(lines) > 0 and lines[-1][1] is not None:
lines.append((line_num, None))
self.cur_line = 0
self.lines = lines
def opt_break(self):
if len(self.lines) > 0 and self.lines[0][1] is None:
self.lines.pop(0)
def next(self):
if len(self.lines) == 0:
raise Lexer.EOF
else:
l = self.lines.pop(0)
self.cur_line = l[0]
if l[1] is None:
raise Lexer.Break
else:
return l[1]
def error(self, msg):
print("({}:{}) {}".format(self.filename, self.cur_line, msg))
raise Lexer.LexerError
class MarkdownWriter:
def __init__(self):
pass
def start(self):
self.lines = []
def end(self):
return "\n".join(self.lines)
def heading(self, level, text):
if len(self.lines) > 0:
self.lines.append("")
self.lines.append(level * "#" + " " + text)
self.lines.append("")
def para(self, text):
if len(self.lines) > 0 and self.lines[-1] != "":
self.lines.append("")
if isinstance(text, list):
self.lines.extend(text)
elif isinstance(text, str):
self.lines.append(text)
else:
assert False
self.lines.append("")
def single_line(self, text):
self.lines.append(text)
def module(self, name, short_descr, descr):
self.heading(1, "module {}".format(name))
self.para(descr)
def function(self, ctx, name, args, descr):
proto = "{}.{}{}".format(ctx, self.name, self.args)
self.heading(3, "`" + proto + "`")
self.para(descr)
def method(self, ctx, name, args, descr):
if name == "\\constructor":
proto = "{}{}".format(ctx, args)
elif name == "\\call":
proto = "{}{}".format(ctx, args)
else:
proto = "{}.{}{}".format(ctx, name, args)
self.heading(3, "`" + proto + "`")
self.para(descr)
def constant(self, ctx, name, descr):
self.single_line("`{}.{}` - {}".format(ctx, name, descr))
class ReStructuredTextWriter:
head_chars = {1: "=", 2: "-", 3: "."}
def __init__(self):
pass
def start(self):
self.lines = []
def end(self):
return "\n".join(self.lines)
def _convert(self, text):
return text.replace("`", "``").replace("*", "\\*")
def heading(self, level, text, convert=True):
if len(self.lines) > 0:
self.lines.append("")
if convert:
text = self._convert(text)
self.lines.append(text)
self.lines.append(len(text) * self.head_chars[level])
self.lines.append("")
def para(self, text, indent=""):
if len(self.lines) > 0 and self.lines[-1] != "":
self.lines.append("")
if isinstance(text, list):
for t in text:
self.lines.append(indent + self._convert(t))
elif isinstance(text, str):
self.lines.append(indent + self._convert(text))
else:
assert False
self.lines.append("")
def single_line(self, text):
self.lines.append(self._convert(text))
def module(self, name, short_descr, descr):
self.heading(1, ":mod:`{}` --- {}".format(name, self._convert(short_descr)), convert=False)
self.lines.append(".. module:: {}".format(name))
self.lines.append(" :synopsis: {}".format(short_descr))
self.para(descr)
def function(self, ctx, name, args, descr):
args = self._convert(args)
self.lines.append(".. function:: " + name + args)
self.para(descr, indent=" ")
def method(self, ctx, name, args, descr):
args = self._convert(args)
if name == "\\constructor":
self.lines.append(".. class:: " + ctx + args)
elif name == "\\call":
self.lines.append(".. method:: " + ctx + args)
else:
self.lines.append(".. method:: " + ctx + "." + name + args)
self.para(descr, indent=" ")
def constant(self, ctx, name, descr):
self.lines.append(".. data:: " + name)
self.para(descr, indent=" ")
class DocValidateError(Exception):
pass
class DocItem:
def __init__(self):
self.doc = []
def add_doc(self, lex):
try:
while True:
line = lex.next()
if len(line) > 0 or len(self.doc) > 0:
self.doc.append(line)
except Lexer.Break:
pass
def dump(self, writer):
writer.para(self.doc)
class DocConstant(DocItem):
def __init__(self, name, descr):
super().__init__()
self.name = name
self.descr = descr
def dump(self, ctx, writer):
writer.constant(ctx, self.name, self.descr)
class DocFunction(DocItem):
def __init__(self, name, args):
super().__init__()
self.name = name
self.args = args
def dump(self, ctx, writer):
writer.function(ctx, self.name, self.args, self.doc)
class DocMethod(DocItem):
def __init__(self, name, args):
super().__init__()
self.name = name
self.args = args
def dump(self, ctx, writer):
writer.method(ctx, self.name, self.args, self.doc)
class DocClass(DocItem):
def __init__(self, name, descr):
super().__init__()
self.name = name
self.descr = descr
self.constructors = {}
self.classmethods = {}
self.methods = {}
self.constants = {}
def process_classmethod(self, lex, d):
name = d["id"]
if name == "\\constructor":
dict_ = self.constructors
else:
dict_ = self.classmethods
if name in dict_:
lex.error("multiple definition of method '{}'".format(name))
method = dict_[name] = DocMethod(name, d["args"])
method.add_doc(lex)
def process_method(self, lex, d):
name = d["id"]
dict_ = self.methods
if name in dict_:
lex.error("multiple definition of method '{}'".format(name))
method = dict_[name] = DocMethod(name, d["args"])
method.add_doc(lex)
def process_constant(self, lex, d):
name = d["id"]
if name in self.constants:
lex.error("multiple definition of constant '{}'".format(name))
self.constants[name] = DocConstant(name, d["descr"])
lex.opt_break()
def dump(self, writer):
writer.heading(1, "class {}".format(self.name))
super().dump(writer)
if len(self.constructors) > 0:
writer.heading(2, "Constructors")
for f in sorted(self.constructors.values(), key=lambda x: x.name):
f.dump(self.name, writer)
if len(self.classmethods) > 0:
writer.heading(2, "Class methods")
for f in sorted(self.classmethods.values(), key=lambda x: x.name):
f.dump(self.name, writer)
if len(self.methods) > 0:
writer.heading(2, "Methods")
for f in sorted(self.methods.values(), key=lambda x: x.name):
f.dump(self.name.lower(), writer)
if len(self.constants) > 0:
writer.heading(2, "Constants")
for c in sorted(self.constants.values(), key=lambda x: x.name):
c.dump(self.name, writer)
class DocModule(DocItem):
def __init__(self, name, descr):
super().__init__()
self.name = name
self.descr = descr
self.functions = {}
self.constants = {}
self.classes = {}
self.cur_class = None
def new_file(self):
self.cur_class = None
def process_function(self, lex, d):
name = d["id"]
if name in self.functions:
lex.error("multiple definition of function '{}'".format(name))
function = self.functions[name] = DocFunction(name, d["args"])
function.add_doc(lex)
# def process_classref(self, lex, d):
# name = d['id']
# self.classes[name] = name
# lex.opt_break()
def process_class(self, lex, d):
name = d["id"]
if name in self.classes:
lex.error("multiple definition of class '{}'".format(name))
self.cur_class = self.classes[name] = DocClass(name, d["descr"])
self.cur_class.add_doc(lex)
def process_classmethod(self, lex, d):
self.cur_class.process_classmethod(lex, d)
def process_method(self, lex, d):
self.cur_class.process_method(lex, d)
def process_constant(self, lex, d):
if self.cur_class is None:
# a module-level constant
name = d["id"]
if name in self.constants:
lex.error("multiple definition of constant '{}'".format(name))
self.constants[name] = DocConstant(name, d["descr"])
lex.opt_break()
else:
# a class-level constant
self.cur_class.process_constant(lex, d)
def validate(self):
if self.descr is None:
raise DocValidateError("module {} referenced but never defined".format(self.name))
def dump(self, writer):
writer.module(self.name, self.descr, self.doc)
if self.functions:
writer.heading(2, "Functions")
for f in sorted(self.functions.values(), key=lambda x: x.name):
f.dump(self.name, writer)
if self.constants:
writer.heading(2, "Constants")
for c in sorted(self.constants.values(), key=lambda x: x.name):
c.dump(self.name, writer)
if self.classes:
writer.heading(2, "Classes")
for c in sorted(self.classes.values(), key=lambda x: x.name):
writer.para("[`{}.{}`]({}) - {}".format(self.name, c.name, c.name, c.descr))
def write_html(self, dir):
md_writer = MarkdownWriter()
md_writer.start()
self.dump(md_writer)
with open(os.path.join(dir, "index.html"), "wt") as f:
f.write(markdown.markdown(md_writer.end()))
for c in self.classes.values():
class_dir = os.path.join(dir, c.name)
makedirs(class_dir)
md_writer.start()
md_writer.para("part of the [{} module](./)".format(self.name))
c.dump(md_writer)
with open(os.path.join(class_dir, "index.html"), "wt") as f:
f.write(markdown.markdown(md_writer.end()))
def write_rst(self, dir):
rst_writer = ReStructuredTextWriter()
rst_writer.start()
self.dump(rst_writer)
with open(dir + "/" + self.name + ".rst", "wt") as f:
f.write(rst_writer.end())
for c in self.classes.values():
rst_writer.start()
c.dump(rst_writer)
with open(dir + "/" + self.name + "." + c.name + ".rst", "wt") as f:
f.write(rst_writer.end())
class Doc:
def __init__(self):
self.modules = {}
self.cur_module = None
def new_file(self):
self.cur_module = None
for m in self.modules.values():
m.new_file()
def check_module(self, lex):
if self.cur_module is None:
lex.error("module not defined")
def process_module(self, lex, d):
name = d["id"]
if name not in self.modules:
self.modules[name] = DocModule(name, None)
self.cur_module = self.modules[name]
if self.cur_module.descr is not None:
lex.error("multiple definition of module '{}'".format(name))
self.cur_module.descr = d["descr"]
self.cur_module.add_doc(lex)
def process_moduleref(self, lex, d):
name = d["id"]
if name not in self.modules:
self.modules[name] = DocModule(name, None)
self.cur_module = self.modules[name]
lex.opt_break()
def process_class(self, lex, d):
self.check_module(lex)
self.cur_module.process_class(lex, d)
def process_function(self, lex, d):
self.check_module(lex)
self.cur_module.process_function(lex, d)
def process_classmethod(self, lex, d):
self.check_module(lex)
self.cur_module.process_classmethod(lex, d)
def process_method(self, lex, d):
self.check_module(lex)
self.cur_module.process_method(lex, d)
def process_constant(self, lex, d):
self.check_module(lex)
self.cur_module.process_constant(lex, d)
def validate(self):
for m in self.modules.values():
m.validate()
def dump(self, writer):
writer.heading(1, "Modules")
writer.para("These are the Python modules that are implemented.")
for m in sorted(self.modules.values(), key=lambda x: x.name):
writer.para("[`{}`]({}/) - {}".format(m.name, m.name, m.descr))
def write_html(self, dir):
md_writer = MarkdownWriter()
with open(os.path.join(dir, "module", "index.html"), "wt") as f:
md_writer.start()
self.dump(md_writer)
f.write(markdown.markdown(md_writer.end()))
for m in self.modules.values():
mod_dir = os.path.join(dir, "module", m.name)
makedirs(mod_dir)
m.write_html(mod_dir)
def write_rst(self, dir):
# with open(os.path.join(dir, 'module', 'index.html'), 'wt') as f:
# f.write(markdown.markdown(self.dump()))
for m in self.modules.values():
m.write_rst(dir)
regex_descr = r"(?P<descr>.*)"
doc_regexs = (
(Doc.process_module, re.compile(r"\\module (?P<id>[a-z][a-z0-9]*) - " + regex_descr + r"$")),
(Doc.process_moduleref, re.compile(r"\\moduleref (?P<id>[a-z]+)$")),
(Doc.process_function, re.compile(r"\\function (?P<id>[a-z0-9_]+)(?P<args>\(.*\))$")),
(Doc.process_classmethod, re.compile(r"\\classmethod (?P<id>\\?[a-z0-9_]+)(?P<args>\(.*\))$")),
(Doc.process_method, re.compile(r"\\method (?P<id>\\?[a-z0-9_]+)(?P<args>\(.*\))$")),
(
Doc.process_constant,
re.compile(r"\\constant (?P<id>[A-Za-z0-9_]+) - " + regex_descr + r"$"),
),
# (Doc.process_classref, re.compile(r'\\classref (?P<id>[A-Za-z0-9_]+)$')),
(Doc.process_class, re.compile(r"\\class (?P<id>[A-Za-z0-9_]+) - " + regex_descr + r"$")),
)
def process_file(file, doc):
lex = Lexer(file)
doc.new_file()
try:
try:
while True:
line = lex.next()
fun, match = re_match_first(doc_regexs, line)
if fun == None:
lex.error("unknown line format: {}".format(line))
fun(doc, lex, match.groupdict())
except Lexer.Break:
lex.error("unexpected break")
except Lexer.EOF:
pass
except Lexer.LexerError:
return False
return True
def main():
cmd_parser = argparse.ArgumentParser(
description="Generate documentation for pyboard API from C files."
)
cmd_parser.add_argument(
"--outdir", metavar="<output dir>", default="gendoc-out", help="ouput directory"
)
cmd_parser.add_argument("--format", default="html", help="output format: html or rst")
cmd_parser.add_argument("files", nargs="+", help="input files")
args = cmd_parser.parse_args()
doc = Doc()
for file in args.files:
print("processing", file)
if not process_file(file, doc):
return
try:
doc.validate()
except DocValidateError as e:
print(e)
makedirs(args.outdir)
if args.format == "html":
doc.write_html(args.outdir)
elif args.format == "rst":
doc.write_rst(args.outdir)
else:
print("unknown format:", args.format)
return
print("written to", args.outdir)
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,37 @@
# Reads the USB VID and PID from the file specified by sys.argv[1] and then
# inserts those values into the template file specified by sys.argv[2],
# printing the result to stdout
from __future__ import print_function
import sys
import re
import string
config_prefix = "MICROPY_HW_USB_"
needed_keys = ("USB_PID_CDC_MSC", "USB_PID_CDC_HID", "USB_PID_CDC", "USB_VID")
def parse_usb_ids(filename):
rv = dict()
for line in open(filename).readlines():
line = line.rstrip("\r\n")
match = re.match("^#define\s+(\w+)\s+\(0x([0-9A-Fa-f]+)\)$", line)
if match and match.group(1).startswith(config_prefix):
key = match.group(1).replace(config_prefix, "USB_")
val = match.group(2)
# print("key =", key, "val =", val)
if key in needed_keys:
rv[key] = val
for k in needed_keys:
if k not in rv:
raise Exception("Unable to parse %s from %s" % (k, filename))
return rv
if __name__ == "__main__":
usb_ids_file = sys.argv[1]
template_file = sys.argv[2]
replacements = parse_usb_ids(usb_ids_file)
for line in open(template_file, "r").readlines():
print(string.Template(line).safe_substitute(replacements), end="")

View File

@@ -0,0 +1,435 @@
#!/usr/bin/env python3
#
# This file is part of the MicroPython project, http://micropython.org/
#
# The MIT License (MIT)
#
# Copyright (c) 2019 Damien P. George
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from __future__ import print_function
import sys
import os
import subprocess
###########################################################################
# Public functions to be used in the manifest
def include(manifest, **kwargs):
"""Include another manifest.
The manifest argument can be a string (filename) or an iterable of
strings.
Relative paths are resolved with respect to the current manifest file.
Optional kwargs can be provided which will be available to the
included script via the `options` variable.
e.g. include("path.py", extra_features=True)
in path.py:
options.defaults(standard_features=True)
# freeze minimal modules.
if options.standard_features:
# freeze standard modules.
if options.extra_features:
# freeze extra modules.
"""
if not isinstance(manifest, str):
for m in manifest:
include(m)
else:
manifest = convert_path(manifest)
with open(manifest) as f:
# Make paths relative to this manifest file while processing it.
# Applies to includes and input files.
prev_cwd = os.getcwd()
os.chdir(os.path.dirname(manifest))
exec(f.read(), globals(), {"options": IncludeOptions(**kwargs)})
os.chdir(prev_cwd)
def freeze(path, script=None, opt=0):
"""Freeze the input, automatically determining its type. A .py script
will be compiled to a .mpy first then frozen, and a .mpy file will be
frozen directly.
`path` must be a directory, which is the base directory to search for
files from. When importing the resulting frozen modules, the name of
the module will start after `path`, ie `path` is excluded from the
module name.
If `path` is relative, it is resolved to the current manifest.py.
Use $(MPY_DIR), $(MPY_LIB_DIR), $(PORT_DIR), $(BOARD_DIR) if you need
to access specific paths.
If `script` is None all files in `path` will be frozen.
If `script` is an iterable then freeze() is called on all items of the
iterable (with the same `path` and `opt` passed through).
If `script` is a string then it specifies the file or directory to
freeze, and can include extra directories before the file or last
directory. The file or directory will be searched for in `path`. If
`script` is a directory then all files in that directory will be frozen.
`opt` is the optimisation level to pass to mpy-cross when compiling .py
to .mpy.
"""
freeze_internal(KIND_AUTO, path, script, opt)
def freeze_as_str(path):
"""Freeze the given `path` and all .py scripts within it as a string,
which will be compiled upon import.
"""
freeze_internal(KIND_AS_STR, path, None, 0)
def freeze_as_mpy(path, script=None, opt=0):
"""Freeze the input (see above) by first compiling the .py scripts to
.mpy files, then freezing the resulting .mpy files.
"""
freeze_internal(KIND_AS_MPY, path, script, opt)
def freeze_mpy(path, script=None, opt=0):
"""Freeze the input (see above), which must be .mpy files that are
frozen directly.
"""
freeze_internal(KIND_MPY, path, script, opt)
###########################################################################
# Internal implementation
KIND_AUTO = 0
KIND_AS_STR = 1
KIND_AS_MPY = 2
KIND_MPY = 3
VARS = {}
manifest_list = []
class IncludeOptions:
def __init__(self, **kwargs):
self._kwargs = kwargs
self._defaults = {}
def defaults(self, **kwargs):
self._defaults = kwargs
def __getattr__(self, name):
return self._kwargs.get(name, self._defaults.get(name, None))
class FreezeError(Exception):
pass
def system(cmd):
try:
output = subprocess.check_output(cmd, stderr=subprocess.STDOUT)
return 0, output
except subprocess.CalledProcessError as er:
return -1, er.output
def convert_path(path):
# Perform variable substituion.
for name, value in VARS.items():
path = path.replace("$({})".format(name), value)
# Convert to absolute path (so that future operations don't rely on
# still being chdir'ed).
return os.path.abspath(path)
def get_timestamp(path, default=None):
try:
stat = os.stat(path)
return stat.st_mtime
except OSError:
if default is None:
raise FreezeError("cannot stat {}".format(path))
return default
def get_timestamp_newest(path):
ts_newest = 0
for dirpath, dirnames, filenames in os.walk(path, followlinks=True):
for f in filenames:
ts_newest = max(ts_newest, get_timestamp(os.path.join(dirpath, f)))
return ts_newest
def mkdir(filename):
path = os.path.dirname(filename)
if not os.path.isdir(path):
os.makedirs(path)
def freeze_internal(kind, path, script, opt):
path = convert_path(path)
if not os.path.isdir(path):
raise FreezeError("freeze path must be a directory: {}".format(path))
if script is None and kind == KIND_AS_STR:
manifest_list.append((KIND_AS_STR, path, script, opt))
elif script is None or isinstance(script, str) and script.find(".") == -1:
# Recursively search `path` for files to freeze, optionally restricted
# to a subdirectory specified by `script`
if script is None:
subdir = ""
else:
subdir = "/" + script
for dirpath, dirnames, filenames in os.walk(path + subdir, followlinks=True):
for f in filenames:
freeze_internal(kind, path, (dirpath + "/" + f)[len(path) + 1 :], opt)
elif not isinstance(script, str):
# `script` is an iterable of items to freeze
for s in script:
freeze_internal(kind, path, s, opt)
else:
# `script` should specify an individual file to be frozen
extension_kind = {KIND_AS_MPY: ".py", KIND_MPY: ".mpy"}
if kind == KIND_AUTO:
for k, ext in extension_kind.items():
if script.endswith(ext):
kind = k
break
else:
print("warn: unsupported file type, skipped freeze: {}".format(script))
return
wanted_extension = extension_kind[kind]
if not script.endswith(wanted_extension):
raise FreezeError("expecting a {} file, got {}".format(wanted_extension, script))
manifest_list.append((kind, path, script, opt))
# Formerly make-frozen.py.
# This generates:
# - MP_FROZEN_STR_NAMES macro
# - mp_frozen_str_sizes
# - mp_frozen_str_content
def generate_frozen_str_content(paths):
def module_name(f):
return f
modules = []
output = [b"#include <stdint.h>\n"]
for path in paths:
root = path.rstrip("/")
root_len = len(root)
for dirpath, dirnames, filenames in os.walk(root):
for f in filenames:
fullpath = dirpath + "/" + f
st = os.stat(fullpath)
modules.append((path, fullpath[root_len + 1 :], st))
output.append(b"#define MP_FROZEN_STR_NAMES \\\n")
for _path, f, st in modules:
m = module_name(f)
output.append(b'"%s\\0" \\\n' % m.encode())
output.append(b"\n")
output.append(b"const uint32_t mp_frozen_str_sizes[] = { ")
for _path, f, st in modules:
output.append(b"%d, " % st.st_size)
output.append(b"0 };\n")
output.append(b"const char mp_frozen_str_content[] = {\n")
for path, f, st in modules:
data = open(path + "/" + f, "rb").read()
# We need to properly escape the script data to create a C string.
# When C parses hex characters of the form \x00 it keeps parsing the hex
# data until it encounters a non-hex character. Thus one must create
# strings of the form "data\x01" "abc" to properly encode this kind of
# data. We could just encode all characters as hex digits but it's nice
# to be able to read the resulting C code as ASCII when possible.
data = bytearray(data) # so Python2 extracts each byte as an integer
esc_dict = {ord("\n"): b"\\n", ord("\r"): b"\\r", ord('"'): b'\\"', ord("\\"): b"\\\\"}
output.append(b'"')
break_str = False
for c in data:
try:
output.append(esc_dict[c])
except KeyError:
if 32 <= c <= 126:
if break_str:
output.append(b'" "')
break_str = False
output.append(chr(c).encode())
else:
output.append(b"\\x%02x" % c)
break_str = True
output.append(b'\\0"\n')
output.append(b'"\\0"\n};\n\n')
return b"".join(output)
def main():
# Parse arguments
import argparse
cmd_parser = argparse.ArgumentParser(
description="A tool to generate frozen content in MicroPython firmware images."
)
cmd_parser.add_argument("-o", "--output", help="output path")
cmd_parser.add_argument("-b", "--build-dir", help="output path")
cmd_parser.add_argument(
"-f", "--mpy-cross-flags", default="", help="flags to pass to mpy-cross"
)
cmd_parser.add_argument("-v", "--var", action="append", help="variables to substitute")
cmd_parser.add_argument("--mpy-tool-flags", default="", help="flags to pass to mpy-tool")
cmd_parser.add_argument("files", nargs="+", help="input manifest list")
args = cmd_parser.parse_args()
# Extract variables for substitution.
for var in args.var:
name, value = var.split("=", 1)
if os.path.exists(value):
value = os.path.abspath(value)
VARS[name] = value
if "MPY_DIR" not in VARS or "PORT_DIR" not in VARS:
print("MPY_DIR and PORT_DIR variables must be specified")
sys.exit(1)
# Get paths to tools
MPY_CROSS = VARS["MPY_DIR"] + "/mpy-cross/mpy-cross"
if sys.platform == "win32":
MPY_CROSS += ".exe"
MPY_CROSS = os.getenv("MICROPY_MPYCROSS", MPY_CROSS)
MPY_TOOL = VARS["MPY_DIR"] + "/tools/mpy-tool.py"
# Ensure mpy-cross is built
if not os.path.exists(MPY_CROSS):
print("mpy-cross not found at {}, please build it first".format(MPY_CROSS))
sys.exit(1)
# Include top-level inputs, to generate the manifest
for input_manifest in args.files:
try:
if input_manifest.endswith(".py"):
include(input_manifest)
else:
exec(input_manifest)
except FreezeError as er:
print('freeze error executing "{}": {}'.format(input_manifest, er.args[0]))
sys.exit(1)
# Process the manifest
str_paths = []
mpy_files = []
ts_newest = 0
for kind, path, script, opt in manifest_list:
if kind == KIND_AS_STR:
str_paths.append(path)
ts_outfile = get_timestamp_newest(path)
elif kind == KIND_AS_MPY:
infile = "{}/{}".format(path, script)
outfile = "{}/frozen_mpy/{}.mpy".format(args.build_dir, script[:-3])
ts_infile = get_timestamp(infile)
ts_outfile = get_timestamp(outfile, 0)
if ts_infile >= ts_outfile:
print("MPY", script)
mkdir(outfile)
res, out = system(
[MPY_CROSS]
+ args.mpy_cross_flags.split()
+ ["-o", outfile, "-s", script, "-O{}".format(opt), infile]
)
if res != 0:
print("error compiling {}:".format(infile))
sys.stdout.buffer.write(out)
raise SystemExit(1)
ts_outfile = get_timestamp(outfile)
mpy_files.append(outfile)
else:
assert kind == KIND_MPY
infile = "{}/{}".format(path, script)
mpy_files.append(infile)
ts_outfile = get_timestamp(infile)
ts_newest = max(ts_newest, ts_outfile)
# Check if output file needs generating
if ts_newest < get_timestamp(args.output, 0):
# No files are newer than output file so it does not need updating
return
# Freeze paths as strings
output_str = generate_frozen_str_content(str_paths)
# Freeze .mpy files
if mpy_files:
res, output_mpy = system(
[
sys.executable,
MPY_TOOL,
"-f",
"-q",
args.build_dir + "/genhdr/qstrdefs.preprocessed.h",
]
+ args.mpy_tool_flags.split()
+ mpy_files
)
if res != 0:
print("error freezing mpy {}:".format(mpy_files))
print(output_mpy.decode())
sys.exit(1)
else:
output_mpy = (
b'#include "py/emitglue.h"\n'
b"extern const qstr_pool_t mp_qstr_const_pool;\n"
b"const qstr_pool_t mp_qstr_frozen_const_pool = {\n"
b" (qstr_pool_t*)&mp_qstr_const_pool, MP_QSTRnumber_of, 0, 0\n"
b"};\n"
b'const char mp_frozen_names[] = { MP_FROZEN_STR_NAMES "\\0"};\n'
b"const mp_raw_code_t *const mp_frozen_mpy_content[] = {NULL};\n"
)
# Generate output
print("GEN", args.output)
mkdir(args.output)
with open(args.output, "wb") as f:
f.write(b"//\n// Content for MICROPY_MODULE_FROZEN_STR\n//\n")
f.write(output_str)
f.write(b"//\n// Content for MICROPY_MODULE_FROZEN_MPY\n//\n")
f.write(output_mpy)
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,231 @@
#!/usr/bin/env python3
#
# This file is part of the MicroPython project, http://micropython.org/
#
# The MIT License (MIT)
#
# Copyright (c) 2020 Damien P. George
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
"""
This script is used to compute metrics, like code size, of the various ports.
Typical usage is:
$ ./tools/metrics.py build | tee size0
<wait for build to complete>
$ git switch new-feature-branch
$ ./tools/metrics.py build | tee size1
<wait for build to complete>
$ ./tools/metrics.py diff size0 size1
Other commands:
$ ./tools/metrics.py sizes # print all firmware sizes
$ ./tools/metrics.py clean # clean all ports
"""
import collections, sys, re, subprocess
MAKE_FLAGS = ["-j3", "CFLAGS_EXTRA=-DNDEBUG"]
class PortData:
def __init__(self, name, dir, output, make_flags=None):
self.name = name
self.dir = dir
self.output = output
self.make_flags = make_flags
self.needs_mpy_cross = dir not in ("bare-arm", "minimal")
port_data = {
"b": PortData("bare-arm", "bare-arm", "build/firmware.elf"),
"m": PortData("minimal x86", "minimal", "build/firmware.elf"),
"u": PortData("unix x64", "unix", "micropython"),
"n": PortData("unix nanbox", "unix", "micropython-nanbox", "VARIANT=nanbox"),
"s": PortData("stm32", "stm32", "build-PYBV10/firmware.elf", "BOARD=PYBV10"),
"c": PortData("cc3200", "cc3200", "build/WIPY/release/application.axf", "BTARGET=application"),
"8": PortData("esp8266", "esp8266", "build-GENERIC/firmware.elf"),
"3": PortData("esp32", "esp32", "build-GENERIC/micropython.elf"),
"r": PortData("nrf", "nrf", "build-pca10040/firmware.elf"),
"p": PortData("rp2", "rp2", "build-PICO/firmware.elf"),
"d": PortData("samd", "samd", "build-ADAFRUIT_ITSYBITSY_M4_EXPRESS/firmware.elf"),
}
def syscmd(*args):
sys.stdout.flush()
a2 = []
for a in args:
if isinstance(a, str):
a2.append(a)
elif a:
a2.extend(a)
subprocess.check_call(a2)
def parse_port_list(args):
if not args:
return list(port_data.values())
else:
ports = []
for arg in args:
for port_char in arg:
try:
ports.append(port_data[port_char])
except KeyError:
print("unknown port:", port_char)
sys.exit(1)
return ports
def read_build_log(filename):
data = collections.OrderedDict()
lines = []
found_sizes = False
with open(filename) as f:
for line in f:
line = line.strip()
if line.strip() == "COMPUTING SIZES":
found_sizes = True
elif found_sizes:
lines.append(line)
is_size_line = False
for line in lines:
if is_size_line:
fields = line.split()
data[fields[-1]] = [int(f) for f in fields[:-2]]
is_size_line = False
else:
is_size_line = line.startswith("text\t ")
return data
def do_diff(args):
"""Compute the difference between firmware sizes."""
# Parse arguments.
error_threshold = None
if len(args) >= 2 and args[0] == "--error-threshold":
args.pop(0)
error_threshold = int(args.pop(0))
if len(args) != 2:
print("usage: %s diff [--error-threshold <x>] <out1> <out2>" % sys.argv[0])
sys.exit(1)
data1 = read_build_log(args[0])
data2 = read_build_log(args[1])
max_delta = None
for key, value1 in data1.items():
value2 = data2[key]
for port in port_data.values():
if key == "ports/{}/{}".format(port.dir, port.output):
name = port.name
break
data = [v2 - v1 for v1, v2 in zip(value1, value2)]
warn = ""
board = re.search(r"/build-([A-Za-z0-9_]+)/", key)
if board:
board = board.group(1)
else:
board = ""
if name == "cc3200":
delta = data[0]
percent = 100 * delta / value1[0]
if data[1] != 0:
warn += " %+u(data)" % data[1]
else:
delta = data[3]
percent = 100 * delta / value1[3]
if data[1] != 0:
warn += " %+u(data)" % data[1]
if data[2] != 0:
warn += " %+u(bss)" % data[2]
if warn:
warn = "[incl%s]" % warn
print("%11s: %+5u %+.3f%% %s%s" % (name, delta, percent, board, warn))
max_delta = delta if max_delta is None else max(max_delta, delta)
if error_threshold is not None and max_delta is not None:
if max_delta > error_threshold:
sys.exit(1)
def do_clean(args):
"""Clean ports."""
ports = parse_port_list(args)
print("CLEANING")
for port in ports:
syscmd("make", "-C", "ports/{}".format(port.dir), port.make_flags, "clean")
def do_build(args):
"""Build ports and print firmware sizes."""
ports = parse_port_list(args)
if any(port.needs_mpy_cross for port in ports):
print("BUILDING MPY-CROSS")
syscmd("make", "-C", "mpy-cross", MAKE_FLAGS)
print("BUILDING PORTS")
for port in ports:
syscmd("make", "-C", "ports/{}".format(port.dir), MAKE_FLAGS, port.make_flags)
do_sizes(args)
def do_sizes(args):
"""Compute and print sizes of firmware."""
ports = parse_port_list(args)
print("COMPUTING SIZES")
for port in ports:
syscmd("size", "ports/{}/{}".format(port.dir, port.output))
def main():
# Get command to execute
if len(sys.argv) == 1:
print("Available commands:")
for cmd in globals():
if cmd.startswith("do_"):
print(" {:9} {}".format(cmd[3:], globals()[cmd].__doc__))
sys.exit(1)
cmd = sys.argv.pop(1)
# Dispatch to desired command
try:
cmd = globals()["do_{}".format(cmd)]
except KeyError:
print("{}: unknown command '{}'".format(sys.argv[0], cmd))
sys.exit(1)
cmd(sys.argv[1:])
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,21 @@
The MIT License (MIT)
Copyright (c) 2021-2022 Damien P. George
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.

View File

@@ -0,0 +1,75 @@
# mpremote -- MicroPython remote control
This CLI tool provides an integrated set of utilities to remotely interact with
and automate a MicroPython device over a serial connection.
The simplest way to use this tool is:
mpremote
This will automatically connect to the device and provide an interactive REPL.
The full list of supported commands are:
mpremote connect <device> -- connect to given device
device may be: list, auto, id:x, port:x
or any valid device name/path
mpremote disconnect -- disconnect current device
mpremote mount <local-dir> -- mount local directory on device
mpremote eval <string> -- evaluate and print the string
mpremote exec <string> -- execute the string
mpremote run <file> -- run the given local script
mpremote fs <command> <args...> -- execute filesystem commands on the device
command may be: cat, ls, cp, rm, mkdir, rmdir
use ":" as a prefix to specify a file on the device
mpremote repl -- enter REPL
options:
--capture <file>
--inject-code <string>
--inject-file <file>
mpremote help -- print list of commands and exit
Multiple commands can be specified and they will be run sequentially. Connection
and disconnection will be done automatically at the start and end of the execution
of the tool, if such commands are not explicitly given. Automatic connection will
search for the first available serial device. If no action is specified then the
REPL will be entered.
Shortcuts can be defined using the macro system. Built-in shortcuts are:
- a0, a1, a2, a3: connect to `/dev/ttyACM?`
- u0, u1, u2, u3: connect to `/dev/ttyUSB?`
- c0, c1, c2, c3: connect to `COM?`
- cat, ls, cp, rm, mkdir, rmdir, df: filesystem commands
- reset: reset the device
- bootloader: make the device enter its bootloader
Any user configuration, including user-defined shortcuts, can be placed in
.config/mpremote/config.py. For example:
# Custom macro commands
commands = {
"c33": "connect id:334D335C3138",
"bl": "bootloader",
"double x=4": {
"command": "eval x*2",
"help": "multiply by two"
}
}
Examples:
mpremote
mpremote a1
mpremote connect /dev/ttyUSB0 repl
mpremote ls
mpremote a1 ls
mpremote exec "import micropython; micropython.mem_info()"
mpremote eval 1/2 eval 3/4
mpremote mount .
mpremote mount . exec "import local_script"
mpremote ls
mpremote cat boot.py
mpremote cp :main.py .
mpremote cp main.py :
mpremote cp -r dir/ :

View File

@@ -0,0 +1,6 @@
#!/usr/bin/env python3
import sys
from mpremote import main
sys.exit(main.main())

View File

@@ -0,0 +1 @@
__version__ = "0.3.0"

View File

@@ -0,0 +1,6 @@
#!/usr/bin/env python3
import sys
from mpremote import main
sys.exit(main.main())

View File

@@ -0,0 +1,176 @@
import sys, time
try:
import select, termios
except ImportError:
termios = None
select = None
import msvcrt, signal
class ConsolePosix:
def __init__(self):
self.infd = sys.stdin.fileno()
self.infile = sys.stdin.buffer
self.outfile = sys.stdout.buffer
if hasattr(self.infile, "raw"):
self.infile = self.infile.raw
if hasattr(self.outfile, "raw"):
self.outfile = self.outfile.raw
self.orig_attr = termios.tcgetattr(self.infd)
def enter(self):
# attr is: [iflag, oflag, cflag, lflag, ispeed, ospeed, cc]
attr = termios.tcgetattr(self.infd)
attr[0] &= ~(
termios.BRKINT | termios.ICRNL | termios.INPCK | termios.ISTRIP | termios.IXON
)
attr[1] = 0
attr[2] = attr[2] & ~(termios.CSIZE | termios.PARENB) | termios.CS8
attr[3] = 0
attr[6][termios.VMIN] = 1
attr[6][termios.VTIME] = 0
termios.tcsetattr(self.infd, termios.TCSANOW, attr)
def exit(self):
termios.tcsetattr(self.infd, termios.TCSANOW, self.orig_attr)
def waitchar(self, pyb_serial):
# TODO pyb_serial might not have fd
select.select([self.infd, pyb_serial.fd], [], [])
def readchar(self):
res = select.select([self.infd], [], [], 0)
if res[0]:
return self.infile.read(1)
else:
return None
def write(self, buf):
self.outfile.write(buf)
class ConsoleWindows:
KEY_MAP = {
b"H": b"A", # UP
b"P": b"B", # DOWN
b"M": b"C", # RIGHT
b"K": b"D", # LEFT
b"G": b"H", # POS1
b"O": b"F", # END
b"Q": b"6~", # PGDN
b"I": b"5~", # PGUP
b"s": b"1;5D", # CTRL-LEFT,
b"t": b"1;5C", # CTRL-RIGHT,
b"\x8d": b"1;5A", # CTRL-UP,
b"\x91": b"1;5B", # CTRL-DOWN,
b"w": b"1;5H", # CTRL-POS1
b"u": b"1;5F", # CTRL-END
b"\x98": b"1;3A", # ALT-UP,
b"\xa0": b"1;3B", # ALT-DOWN,
b"\x9d": b"1;3C", # ALT-RIGHT,
b"\x9b": b"1;3D", # ALT-LEFT,
b"\x97": b"1;3H", # ALT-POS1,
b"\x9f": b"1;3F", # ALT-END,
b"S": b"3~", # DEL,
b"\x93": b"3;5~", # CTRL-DEL
b"R": b"2~", # INS
b"\x92": b"2;5~", # CTRL-INS
b"\x94": b"Z", # Ctrl-Tab = BACKTAB,
}
def __init__(self):
self.ctrl_c = 0
def _sigint_handler(self, signo, frame):
self.ctrl_c += 1
def enter(self):
signal.signal(signal.SIGINT, self._sigint_handler)
def exit(self):
signal.signal(signal.SIGINT, signal.SIG_DFL)
def inWaiting(self):
return 1 if self.ctrl_c or msvcrt.kbhit() else 0
def waitchar(self, pyb_serial):
while not (self.inWaiting() or pyb_serial.inWaiting()):
time.sleep(0.01)
def readchar(self):
if self.ctrl_c:
self.ctrl_c -= 1
return b"\x03"
if msvcrt.kbhit():
ch = msvcrt.getch()
while ch in b"\x00\xe0": # arrow or function key prefix?
if not msvcrt.kbhit():
return None
ch = msvcrt.getch() # second call returns the actual key code
try:
ch = b"\x1b[" + self.KEY_MAP[ch]
except KeyError:
return None
return ch
def write(self, buf):
buf = buf.decode() if isinstance(buf, bytes) else buf
sys.stdout.write(buf)
sys.stdout.flush()
# for b in buf:
# if isinstance(b, bytes):
# msvcrt.putch(b)
# else:
# msvcrt.putwch(b)
if termios:
Console = ConsolePosix
VT_ENABLED = True
else:
Console = ConsoleWindows
# Windows VT mode ( >= win10 only)
# https://bugs.python.org/msg291732
import ctypes, os
from ctypes import wintypes
kernel32 = ctypes.WinDLL("kernel32", use_last_error=True)
ERROR_INVALID_PARAMETER = 0x0057
ENABLE_VIRTUAL_TERMINAL_PROCESSING = 0x0004
def _check_bool(result, func, args):
if not result:
raise ctypes.WinError(ctypes.get_last_error())
return args
LPDWORD = ctypes.POINTER(wintypes.DWORD)
kernel32.GetConsoleMode.errcheck = _check_bool
kernel32.GetConsoleMode.argtypes = (wintypes.HANDLE, LPDWORD)
kernel32.SetConsoleMode.errcheck = _check_bool
kernel32.SetConsoleMode.argtypes = (wintypes.HANDLE, wintypes.DWORD)
def set_conout_mode(new_mode, mask=0xFFFFFFFF):
# don't assume StandardOutput is a console.
# open CONOUT$ instead
fdout = os.open("CONOUT$", os.O_RDWR)
try:
hout = msvcrt.get_osfhandle(fdout)
old_mode = wintypes.DWORD()
kernel32.GetConsoleMode(hout, ctypes.byref(old_mode))
mode = (new_mode & mask) | (old_mode.value & ~mask)
kernel32.SetConsoleMode(hout, mode)
return old_mode.value
finally:
os.close(fdout)
# def enable_vt_mode():
mode = mask = ENABLE_VIRTUAL_TERMINAL_PROCESSING
try:
set_conout_mode(mode, mask)
VT_ENABLED = True
except WindowsError as e:
VT_ENABLED = False

View File

@@ -0,0 +1,581 @@
"""
MicroPython Remote - Interaction and automation tool for MicroPython
MIT license; Copyright (c) 2019-2022 Damien P. George
This program provides a set of utilities to interact with and automate a
MicroPython device over a serial connection. Commands supported are:
mpremote -- auto-detect, connect and enter REPL
mpremote <device-shortcut> -- connect to given device
mpremote connect <device> -- connect to given device
mpremote disconnect -- disconnect current device
mpremote mount <local-dir> -- mount local directory on device
mpremote eval <string> -- evaluate and print the string
mpremote exec <string> -- execute the string
mpremote run <script> -- run the given local script
mpremote fs <command> <args...> -- execute filesystem commands on the device
mpremote repl -- enter REPL
"""
import os, sys
from collections.abc import Mapping
from textwrap import dedent
import serial.tools.list_ports
from . import pyboardextended as pyboard
from .console import Console, ConsolePosix
_PROG = "mpremote"
_COMMANDS = {
"connect": (
False,
False,
1,
"""\
connect to given device
device may be: list, auto, id:x, port:x
or any valid device name/path""",
),
"disconnect": (False, False, 0, "disconnect current device"),
"resume": (False, False, 0, "resume a previous mpremote session (will not auto soft-reset)"),
"soft-reset": (False, True, 0, "perform a soft-reset of the device"),
"mount": (
True,
False,
1,
"""\
mount local directory on device
options:
--unsafe-links, -l
follow symbolic links pointing outside of local directory""",
),
"umount": (True, False, 0, "unmount the local directory"),
"repl": (
False,
True,
0,
"""\
enter REPL
options:
--capture <file>
--inject-code <string>
--inject-file <file>""",
),
"eval": (True, True, 1, "evaluate and print the string"),
"exec": (True, True, 1, "execute the string"),
"run": (True, True, 1, "run the given local script"),
"fs": (True, True, 1, "execute filesystem commands on the device"),
"help": (False, False, 0, "print help and exit"),
"version": (False, False, 0, "print version and exit"),
}
_BUILTIN_COMMAND_EXPANSIONS = {
# Device connection shortcuts.
"devs": {
"command": "connect list",
"help": "list available serial ports",
},
# Filesystem shortcuts.
"cat": "fs cat",
"ls": "fs ls",
"cp": "fs cp",
"rm": "fs rm",
"mkdir": "fs mkdir",
"rmdir": "fs rmdir",
"df": [
"exec",
"import uos\nprint('mount \\tsize \\tused \\tavail \\tuse%')\nfor _m in [''] + uos.listdir('/'):\n _s = uos.stat('/' + _m)\n if not _s[0] & 1 << 14: continue\n _s = uos.statvfs(_m)\n if _s[0]:\n _size = _s[0] * _s[2]; _free = _s[0] * _s[3]; print(_m, _size, _size - _free, _free, int(100 * (_size - _free) / _size), sep='\\t')",
],
# Other shortcuts.
"reset t_ms=100": {
"command": [
"exec",
"--no-follow",
"import utime, machine; utime.sleep_ms(t_ms); machine.reset()",
],
"help": "reset the device after delay",
},
"bootloader t_ms=100": {
"command": [
"exec",
"--no-follow",
"import utime, machine; utime.sleep_ms(t_ms); machine.bootloader()",
],
"help": "make the device enter its bootloader",
},
"setrtc": [
"exec",
"import machine; machine.RTC().datetime((2020, 1, 1, 0, 10, 0, 0, 0))",
],
"--help": "help",
"--version": "version",
}
for port_num in range(4):
for prefix, port in [("a", "/dev/ttyACM"), ("u", "/dev/ttyUSB"), ("c", "COM")]:
_BUILTIN_COMMAND_EXPANSIONS["{}{}".format(prefix, port_num)] = {
"command": "connect {}{}".format(port, port_num),
"help": 'connect to serial port "{}{}"'.format(port, port_num),
}
def load_user_config():
# Create empty config object.
config = __build_class__(lambda: None, "Config")()
config.commands = {}
# Get config file name.
path = os.getenv("XDG_CONFIG_HOME")
if path is None:
path = os.getenv("HOME")
if path is None:
return config
path = os.path.join(path, ".config")
path = os.path.join(path, _PROG)
config_file = os.path.join(path, "config.py")
# Check if config file exists.
if not os.path.exists(config_file):
return config
# Exec the config file in its directory.
with open(config_file) as f:
config_data = f.read()
prev_cwd = os.getcwd()
os.chdir(path)
exec(config_data, config.__dict__)
os.chdir(prev_cwd)
return config
def prepare_command_expansions(config):
global _command_expansions
_command_expansions = {}
for command_set in (_BUILTIN_COMMAND_EXPANSIONS, config.commands):
for cmd, sub in command_set.items():
cmd = cmd.split()
if len(cmd) == 1:
args = ()
else:
args = tuple(c.split("=") for c in cmd[1:])
help_message = ""
if isinstance(sub, Mapping):
help_message = sub.get("help", "")
sub = sub["command"]
if isinstance(sub, str):
sub = sub.split()
_command_expansions[cmd[0]] = (args, sub, help_message)
def do_command_expansion(args):
def usage_error(cmd, exp_args, msg):
print(f"Command {cmd} {msg}; signature is:")
print(" ", cmd, " ".join("=".join(a) for a in exp_args))
sys.exit(1)
last_arg_idx = len(args)
pre = []
while args and args[0] in _command_expansions:
cmd = args.pop(0)
exp_args, exp_sub, _ = _command_expansions[cmd]
for exp_arg in exp_args:
exp_arg_name = exp_arg[0]
if args and "=" not in args[0]:
# Argument given without a name.
value = args.pop(0)
elif args and args[0].startswith(exp_arg_name + "="):
# Argument given with correct name.
value = args.pop(0).split("=", 1)[1]
else:
# No argument given, or argument given with a different name.
if len(exp_arg) == 1:
# Required argument (it has no default).
usage_error(cmd, exp_args, f"missing argument {exp_arg_name}")
else:
# Optional argument with a default.
value = exp_arg[1]
pre.append(f"{exp_arg_name}={value}")
args[0:0] = exp_sub
last_arg_idx = len(exp_sub)
if last_arg_idx < len(args) and "=" in args[last_arg_idx]:
# Extra unknown arguments given.
arg = args[last_arg_idx].split("=", 1)[0]
usage_error(cmd, exp_args, f"given unexpected argument {arg}")
sys.exit(1)
# Insert expansion with optional setting of arguments.
if pre:
args[0:0] = ["exec", ";".join(pre)]
def do_connect(args):
dev = args.pop(0)
try:
if dev == "list":
# List attached devices.
for p in sorted(serial.tools.list_ports.comports()):
print(
"{} {} {:04x}:{:04x} {} {}".format(
p.device,
p.serial_number,
p.vid if isinstance(p.vid, int) else 0,
p.pid if isinstance(p.pid, int) else 0,
p.manufacturer,
p.product,
)
)
return None
elif dev == "auto":
# Auto-detect and auto-connect to the first available device.
for p in sorted(serial.tools.list_ports.comports()):
try:
return pyboard.PyboardExtended(p.device, baudrate=115200)
except pyboard.PyboardError as er:
if not er.args[0].startswith("failed to access"):
raise er
raise pyboard.PyboardError("no device found")
elif dev.startswith("id:"):
# Search for a device with the given serial number.
serial_number = dev[len("id:") :]
dev = None
for p in serial.tools.list_ports.comports():
if p.serial_number == serial_number:
return pyboard.PyboardExtended(p.device, baudrate=115200)
raise pyboard.PyboardError("no device with serial number {}".format(serial_number))
else:
# Connect to the given device.
if dev.startswith("port:"):
dev = dev[len("port:") :]
return pyboard.PyboardExtended(dev, baudrate=115200)
except pyboard.PyboardError as er:
msg = er.args[0]
if msg.startswith("failed to access"):
msg += " (it may be in use by another program)"
print(msg)
sys.exit(1)
def do_disconnect(pyb):
try:
if pyb.mounted:
if not pyb.in_raw_repl:
pyb.enter_raw_repl(soft_reset=False)
pyb.umount_local()
if pyb.in_raw_repl:
pyb.exit_raw_repl()
except OSError:
# Ignore any OSError exceptions when shutting down, eg:
# - pyboard.filesystem_command will close the connecton if it had an error
# - umounting will fail if serial port disappeared
pass
pyb.close()
def show_progress_bar(size, total_size):
if not sys.stdout.isatty():
return
verbose_size = 2048
bar_length = 20
if total_size < verbose_size:
return
elif size >= total_size:
# Clear progress bar when copy completes
print("\r" + " " * (20 + bar_length) + "\r", end="")
else:
progress = size / total_size
bar = round(progress * bar_length)
print(
"\r ... copying {:3.0f}% [{}{}]".format(
progress * 100, "#" * bar, "-" * (bar_length - bar)
),
end="",
)
def do_filesystem(pyb, args):
def _list_recursive(files, path):
if os.path.isdir(path):
for entry in os.listdir(path):
_list_recursive(files, "/".join((path, entry)))
else:
files.append(os.path.split(path))
if args[0] == "cp" and args[1] == "-r":
args.pop(0)
args.pop(0)
assert args[-1] == ":"
args.pop()
src_files = []
for path in args:
_list_recursive(src_files, path)
known_dirs = {""}
pyb.exec_("import uos")
for dir, file in src_files:
dir_parts = dir.split("/")
for i in range(len(dir_parts)):
d = "/".join(dir_parts[: i + 1])
if d not in known_dirs:
pyb.exec_("try:\n uos.mkdir('%s')\nexcept OSError as e:\n print(e)" % d)
known_dirs.add(d)
pyboard.filesystem_command(
pyb,
["cp", "/".join((dir, file)), ":" + dir + "/"],
progress_callback=show_progress_bar,
)
else:
pyboard.filesystem_command(pyb, args, progress_callback=show_progress_bar)
args.clear()
def do_repl_main_loop(pyb, console_in, console_out_write, *, code_to_inject, file_to_inject):
while True:
console_in.waitchar(pyb.serial)
c = console_in.readchar()
if c:
if c == b"\x1d": # ctrl-], quit
break
elif c == b"\x04": # ctrl-D
# special handling needed for ctrl-D if filesystem is mounted
pyb.write_ctrl_d(console_out_write)
elif c == b"\x0a" and code_to_inject is not None: # ctrl-j, inject code
pyb.serial.write(code_to_inject)
elif c == b"\x0b" and file_to_inject is not None: # ctrl-k, inject script
console_out_write(bytes("Injecting %s\r\n" % file_to_inject, "utf8"))
pyb.enter_raw_repl(soft_reset=False)
with open(file_to_inject, "rb") as f:
pyfile = f.read()
try:
pyb.exec_raw_no_follow(pyfile)
except pyboard.PyboardError as er:
console_out_write(b"Error:\r\n")
console_out_write(er)
pyb.exit_raw_repl()
else:
pyb.serial.write(c)
try:
n = pyb.serial.inWaiting()
except OSError as er:
if er.args[0] == 5: # IO error, device disappeared
print("device disconnected")
break
if n > 0:
c = pyb.serial.read(1)
if c is not None:
# pass character through to the console
oc = ord(c)
if oc in (8, 9, 10, 13, 27) or 32 <= oc <= 126:
console_out_write(c)
else:
console_out_write(b"[%02x]" % ord(c))
def do_repl(pyb, args):
capture_file = None
code_to_inject = None
file_to_inject = None
while len(args):
if args[0] == "--capture":
args.pop(0)
capture_file = args.pop(0)
elif args[0] == "--inject-code":
args.pop(0)
code_to_inject = bytes(args.pop(0).replace("\\n", "\r\n"), "utf8")
elif args[0] == "--inject-file":
args.pop(0)
file_to_inject = args.pop(0)
else:
break
print("Connected to MicroPython at %s" % pyb.device_name)
print("Use Ctrl-] to exit this shell")
if capture_file is not None:
print('Capturing session to file "%s"' % capture_file)
capture_file = open(capture_file, "wb")
if code_to_inject is not None:
print("Use Ctrl-J to inject", code_to_inject)
if file_to_inject is not None:
print('Use Ctrl-K to inject file "%s"' % file_to_inject)
console = Console()
console.enter()
def console_out_write(b):
console.write(b)
if capture_file is not None:
capture_file.write(b)
capture_file.flush()
try:
do_repl_main_loop(
pyb,
console,
console_out_write,
code_to_inject=code_to_inject,
file_to_inject=file_to_inject,
)
finally:
console.exit()
if capture_file is not None:
capture_file.close()
def execbuffer(pyb, buf, follow):
ret_val = 0
try:
pyb.exec_raw_no_follow(buf)
if follow:
ret, ret_err = pyb.follow(timeout=None, data_consumer=pyboard.stdout_write_bytes)
if ret_err:
pyboard.stdout_write_bytes(ret_err)
ret_val = 1
except pyboard.PyboardError as er:
print(er)
ret_val = 1
except KeyboardInterrupt:
ret_val = 1
return ret_val
def print_help():
def print_commands_help(cmds, help_idx):
max_command_len = max(len(cmd) for cmd in cmds.keys())
for cmd in sorted(cmds.keys()):
help_message_lines = dedent(cmds[cmd][help_idx]).split("\n")
help_message = help_message_lines[0]
for line in help_message_lines[1:]:
help_message = "{}\n{}{}".format(help_message, " " * (max_command_len + 4), line)
print(" ", cmd, " " * (max_command_len - len(cmd) + 2), help_message, sep="")
print(_PROG, "-- MicroPython remote control")
print("See https://docs.micropython.org/en/latest/reference/mpremote.html")
print("\nList of commands:")
print_commands_help(_COMMANDS, 3)
print("\nList of shortcuts:")
print_commands_help(_command_expansions, 2)
def print_version():
from . import __version__
print(f"{_PROG} {__version__}")
def main():
config = load_user_config()
prepare_command_expansions(config)
args = sys.argv[1:]
pyb = None
auto_soft_reset = True
did_action = False
try:
while args:
do_command_expansion(args)
cmd = args.pop(0)
try:
need_raw_repl, is_action, num_args_min, _ = _COMMANDS[cmd]
except KeyError:
print(f"{_PROG}: '{cmd}' is not a command")
return 1
if len(args) < num_args_min:
print(f"{_PROG}: '{cmd}' neads at least {num_args_min} argument(s)")
return 1
if cmd == "connect":
if pyb is not None:
do_disconnect(pyb)
pyb = do_connect(args)
if pyb is None:
did_action = True
continue
elif cmd == "help":
print_help()
sys.exit(0)
elif cmd == "version":
print_version()
sys.exit(0)
elif cmd == "resume":
auto_soft_reset = False
continue
# The following commands need a connection, and either a raw or friendly REPL.
if pyb is None:
pyb = do_connect(["auto"])
if need_raw_repl:
if not pyb.in_raw_repl:
pyb.enter_raw_repl(soft_reset=auto_soft_reset)
auto_soft_reset = False
else:
if pyb.in_raw_repl:
pyb.exit_raw_repl()
if is_action:
did_action = True
if cmd == "disconnect":
do_disconnect(pyb)
pyb = None
auto_soft_reset = True
elif cmd == "soft-reset":
pyb.enter_raw_repl(soft_reset=True)
auto_soft_reset = False
elif cmd == "mount":
unsafe_links = False
if args[0] == "--unsafe-links" or args[0] == "-l":
args.pop(0)
unsafe_links = True
path = args.pop(0)
pyb.mount_local(path, unsafe_links=unsafe_links)
print(f"Local directory {path} is mounted at /remote")
elif cmd == "umount":
pyb.umount_local()
elif cmd in ("exec", "eval", "run"):
follow = True
if args[0] == "--no-follow":
args.pop(0)
follow = False
if cmd == "exec":
buf = args.pop(0)
elif cmd == "eval":
buf = "print(" + args.pop(0) + ")"
else:
filename = args.pop(0)
try:
with open(filename, "rb") as f:
buf = f.read()
except OSError:
print(f"{_PROG}: could not read file '{filename}'")
return 1
ret = execbuffer(pyb, buf, follow)
if ret:
return ret
elif cmd == "fs":
do_filesystem(pyb, args)
elif cmd == "repl":
do_repl(pyb, args)
if not did_action:
if pyb is None:
pyb = do_connect(["auto"])
if pyb.in_raw_repl:
pyb.exit_raw_repl()
do_repl(pyb, args)
finally:
if pyb is not None:
do_disconnect(pyb)

View File

@@ -0,0 +1,718 @@
import io, os, re, serial, struct, time
from errno import EPERM
from .console import VT_ENABLED
try:
from .pyboard import Pyboard, PyboardError, stdout_write_bytes, filesystem_command
except ImportError:
import sys
sys.path.append(os.path.dirname(__file__) + "/../..")
from pyboard import Pyboard, PyboardError, stdout_write_bytes, filesystem_command
fs_hook_cmds = {
"CMD_STAT": 1,
"CMD_ILISTDIR_START": 2,
"CMD_ILISTDIR_NEXT": 3,
"CMD_OPEN": 4,
"CMD_CLOSE": 5,
"CMD_READ": 6,
"CMD_WRITE": 7,
"CMD_SEEK": 8,
"CMD_REMOVE": 9,
"CMD_RENAME": 10,
"CMD_MKDIR": 11,
"CMD_RMDIR": 12,
}
fs_hook_code = """\
import uos, uio, ustruct, micropython
SEEK_SET = 0
class RemoteCommand:
def __init__(self):
import uselect, usys
self.buf4 = bytearray(4)
self.fout = usys.stdout.buffer
self.fin = usys.stdin.buffer
self.poller = uselect.poll()
self.poller.register(self.fin, uselect.POLLIN)
def poll_in(self):
for _ in self.poller.ipoll(1000):
return
self.end()
raise Exception('timeout waiting for remote')
def rd(self, n):
buf = bytearray(n)
self.rd_into(buf, n)
return buf
def rd_into(self, buf, n):
# implement reading with a timeout in case other side disappears
if n == 0:
return
self.poll_in()
r = self.fin.readinto(buf, n)
if r < n:
mv = memoryview(buf)
while r < n:
self.poll_in()
r += self.fin.readinto(mv[r:], n - r)
def begin(self, type):
micropython.kbd_intr(-1)
buf4 = self.buf4
buf4[0] = 0x18
buf4[1] = type
self.fout.write(buf4, 2)
# Wait for sync byte 0x18, but don't get stuck forever
for i in range(30):
self.poller.poll(1000)
self.fin.readinto(buf4, 1)
if buf4[0] == 0x18:
break
def end(self):
micropython.kbd_intr(3)
def rd_s8(self):
self.rd_into(self.buf4, 1)
n = self.buf4[0]
if n & 0x80:
n -= 0x100
return n
def rd_s32(self):
buf4 = self.buf4
self.rd_into(buf4, 4)
n = buf4[0] | buf4[1] << 8 | buf4[2] << 16 | buf4[3] << 24
if buf4[3] & 0x80:
n -= 0x100000000
return n
def rd_u32(self):
buf4 = self.buf4
self.rd_into(buf4, 4)
return buf4[0] | buf4[1] << 8 | buf4[2] << 16 | buf4[3] << 24
def rd_bytes(self, buf):
# TODO if n is large (eg >256) then we may miss bytes on stdin
n = self.rd_s32()
if buf is None:
ret = buf = bytearray(n)
else:
ret = n
self.rd_into(buf, n)
return ret
def rd_str(self):
n = self.rd_s32()
if n == 0:
return ''
else:
return str(self.rd(n), 'utf8')
def wr_s8(self, i):
self.buf4[0] = i
self.fout.write(self.buf4, 1)
def wr_s32(self, i):
ustruct.pack_into('<i', self.buf4, 0, i)
self.fout.write(self.buf4)
def wr_bytes(self, b):
self.wr_s32(len(b))
self.fout.write(b)
# str and bytes act the same in MicroPython
wr_str = wr_bytes
class RemoteFile(uio.IOBase):
def __init__(self, cmd, fd, is_text):
self.cmd = cmd
self.fd = fd
self.is_text = is_text
def __enter__(self):
return self
def __exit__(self, a, b, c):
self.close()
def ioctl(self, request, arg):
if request == 1: # FLUSH
self.flush()
elif request == 2: # SEEK
# This assumes a 32-bit bare-metal machine.
import machine
machine.mem32[arg] = self.seek(machine.mem32[arg], machine.mem32[arg + 4])
elif request == 4: # CLOSE
self.close()
return 0
def flush(self):
pass
def close(self):
if self.fd is None:
return
c = self.cmd
c.begin(CMD_CLOSE)
c.wr_s8(self.fd)
c.end()
self.fd = None
def read(self, n=-1):
c = self.cmd
c.begin(CMD_READ)
c.wr_s8(self.fd)
c.wr_s32(n)
data = c.rd_bytes(None)
c.end()
if self.is_text:
data = str(data, 'utf8')
else:
data = bytes(data)
return data
def readinto(self, buf):
c = self.cmd
c.begin(CMD_READ)
c.wr_s8(self.fd)
c.wr_s32(len(buf))
n = c.rd_bytes(buf)
c.end()
return n
def readline(self):
l = ''
while 1:
c = self.read(1)
l += c
if c == '\\n' or c == '':
return l
def readlines(self):
ls = []
while 1:
l = self.readline()
if not l:
return ls
ls.append(l)
def write(self, buf):
c = self.cmd
c.begin(CMD_WRITE)
c.wr_s8(self.fd)
c.wr_bytes(buf)
n = c.rd_s32()
c.end()
return n
def seek(self, n, whence=SEEK_SET):
c = self.cmd
c.begin(CMD_SEEK)
c.wr_s8(self.fd)
c.wr_s32(n)
c.wr_s8(whence)
n = c.rd_s32()
c.end()
if n < 0:
raise OSError(n)
return n
class RemoteFS:
def __init__(self, cmd):
self.cmd = cmd
def mount(self, readonly, mkfs):
pass
def umount(self):
pass
def chdir(self, path):
if not path.startswith("/"):
path = self.path + path
if not path.endswith("/"):
path += "/"
if path != "/":
self.stat(path)
self.path = path
def getcwd(self):
return self.path
def remove(self, path):
c = self.cmd
c.begin(CMD_REMOVE)
c.wr_str(self.path + path)
res = c.rd_s32()
c.end()
if res < 0:
raise OSError(-res)
def rename(self, old, new):
c = self.cmd
c.begin(CMD_RENAME)
c.wr_str(self.path + old)
c.wr_str(self.path + new)
res = c.rd_s32()
c.end()
if res < 0:
raise OSError(-res)
def mkdir(self, path):
c = self.cmd
c.begin(CMD_MKDIR)
c.wr_str(self.path + path)
res = c.rd_s32()
c.end()
if res < 0:
raise OSError(-res)
def rmdir(self, path):
c = self.cmd
c.begin(CMD_RMDIR)
c.wr_str(self.path + path)
res = c.rd_s32()
c.end()
if res < 0:
raise OSError(-res)
def stat(self, path):
c = self.cmd
c.begin(CMD_STAT)
c.wr_str(self.path + path)
res = c.rd_s8()
if res < 0:
c.end()
raise OSError(-res)
mode = c.rd_u32()
size = c.rd_u32()
atime = c.rd_u32()
mtime = c.rd_u32()
ctime = c.rd_u32()
c.end()
return mode, 0, 0, 0, 0, 0, size, atime, mtime, ctime
def ilistdir(self, path):
c = self.cmd
c.begin(CMD_ILISTDIR_START)
c.wr_str(self.path + path)
res = c.rd_s8()
c.end()
if res < 0:
raise OSError(-res)
def next():
while True:
c.begin(CMD_ILISTDIR_NEXT)
name = c.rd_str()
if name:
type = c.rd_u32()
c.end()
yield (name, type, 0)
else:
c.end()
break
return next()
def open(self, path, mode):
c = self.cmd
c.begin(CMD_OPEN)
c.wr_str(self.path + path)
c.wr_str(mode)
fd = c.rd_s8()
c.end()
if fd < 0:
raise OSError(-fd)
return RemoteFile(c, fd, mode.find('b') == -1)
def __mount():
uos.mount(RemoteFS(RemoteCommand()), '/remote')
uos.chdir('/remote')
"""
# Apply basic compression on hook code.
for key, value in fs_hook_cmds.items():
fs_hook_code = re.sub(key, str(value), fs_hook_code)
fs_hook_code = re.sub(" *#.*$", "", fs_hook_code, flags=re.MULTILINE)
fs_hook_code = re.sub("\n\n+", "\n", fs_hook_code)
fs_hook_code = re.sub(" ", " ", fs_hook_code)
fs_hook_code = re.sub("rd_", "r", fs_hook_code)
fs_hook_code = re.sub("wr_", "w", fs_hook_code)
fs_hook_code = re.sub("buf4", "b4", fs_hook_code)
class PyboardCommand:
def __init__(self, fin, fout, path, unsafe_links=False):
self.fin = fin
self.fout = fout
self.root = path + "/"
self.data_ilistdir = ["", []]
self.data_files = []
self.unsafe_links = unsafe_links
def rd_s8(self):
return struct.unpack("<b", self.fin.read(1))[0]
def rd_s32(self):
return struct.unpack("<i", self.fin.read(4))[0]
def rd_bytes(self):
n = self.rd_s32()
return self.fin.read(n)
def rd_str(self):
n = self.rd_s32()
if n == 0:
return ""
else:
return str(self.fin.read(n), "utf8")
def wr_s8(self, i):
self.fout.write(struct.pack("<b", i))
def wr_s32(self, i):
self.fout.write(struct.pack("<i", i))
def wr_u32(self, i):
self.fout.write(struct.pack("<I", i))
def wr_bytes(self, b):
self.wr_s32(len(b))
self.fout.write(b)
def wr_str(self, s):
b = bytes(s, "utf8")
self.wr_s32(len(b))
self.fout.write(b)
def log_cmd(self, msg):
print(f"[{msg}]", end="\r\n")
def path_check(self, path):
if not self.unsafe_links:
parent = os.path.realpath(self.root)
child = os.path.realpath(path)
else:
parent = os.path.abspath(self.root)
child = os.path.abspath(path)
if parent != os.path.commonpath([parent, child]):
raise OSError(EPERM, "") # File is outside mounted dir
def do_stat(self):
path = self.root + self.rd_str()
# self.log_cmd(f"stat {path}")
try:
self.path_check(path)
stat = os.stat(path)
except OSError as er:
self.wr_s8(-abs(er.errno))
else:
self.wr_s8(0)
# Note: st_ino would need to be 64-bit if added here
self.wr_u32(stat.st_mode)
self.wr_u32(stat.st_size)
self.wr_u32(int(stat.st_atime))
self.wr_u32(int(stat.st_mtime))
self.wr_u32(int(stat.st_ctime))
def do_ilistdir_start(self):
path = self.root + self.rd_str()
try:
self.path_check(path)
self.wr_s8(0)
except OSError as er:
self.wr_s8(-abs(er.errno))
else:
self.data_ilistdir[0] = path
self.data_ilistdir[1] = os.listdir(path)
def do_ilistdir_next(self):
if self.data_ilistdir[1]:
entry = self.data_ilistdir[1].pop(0)
try:
stat = os.lstat(self.data_ilistdir[0] + "/" + entry)
mode = stat.st_mode & 0xC000
except OSError as er:
mode = 0
self.wr_str(entry)
self.wr_u32(mode)
else:
self.wr_str("")
def do_open(self):
path = self.root + self.rd_str()
mode = self.rd_str()
# self.log_cmd(f"open {path} {mode}")
try:
self.path_check(path)
f = open(path, mode)
except OSError as er:
self.wr_s8(-abs(er.errno))
else:
is_text = mode.find("b") == -1
try:
fd = self.data_files.index(None)
self.data_files[fd] = (f, is_text)
except ValueError:
fd = len(self.data_files)
self.data_files.append((f, is_text))
self.wr_s8(fd)
def do_close(self):
fd = self.rd_s8()
# self.log_cmd(f"close {fd}")
self.data_files[fd][0].close()
self.data_files[fd] = None
def do_read(self):
fd = self.rd_s8()
n = self.rd_s32()
buf = self.data_files[fd][0].read(n)
if self.data_files[fd][1]:
buf = bytes(buf, "utf8")
self.wr_bytes(buf)
# self.log_cmd(f"read {fd} {n} -> {len(buf)}")
def do_seek(self):
fd = self.rd_s8()
n = self.rd_s32()
whence = self.rd_s8()
# self.log_cmd(f"seek {fd} {n}")
try:
n = self.data_files[fd][0].seek(n, whence)
except io.UnsupportedOperation:
n = -1
self.wr_s32(n)
def do_write(self):
fd = self.rd_s8()
buf = self.rd_bytes()
if self.data_files[fd][1]:
buf = str(buf, "utf8")
n = self.data_files[fd][0].write(buf)
self.wr_s32(n)
# self.log_cmd(f"write {fd} {len(buf)} -> {n}")
def do_remove(self):
path = self.root + self.rd_str()
# self.log_cmd(f"remove {path}")
try:
self.path_check(path)
os.remove(path)
ret = 0
except OSError as er:
ret = -abs(er.errno)
self.wr_s32(ret)
def do_rename(self):
old = self.root + self.rd_str()
new = self.root + self.rd_str()
# self.log_cmd(f"rename {old} {new}")
try:
self.path_check(old)
self.path_check(new)
os.rename(old, new)
ret = 0
except OSError as er:
ret = -abs(er.errno)
self.wr_s32(ret)
def do_mkdir(self):
path = self.root + self.rd_str()
# self.log_cmd(f"mkdir {path}")
try:
self.path_check(path)
os.mkdir(path)
ret = 0
except OSError as er:
ret = -abs(er.errno)
self.wr_s32(ret)
def do_rmdir(self):
path = self.root + self.rd_str()
# self.log_cmd(f"rmdir {path}")
try:
self.path_check(path)
os.rmdir(path)
ret = 0
except OSError as er:
ret = -abs(er.errno)
self.wr_s32(ret)
cmd_table = {
fs_hook_cmds["CMD_STAT"]: do_stat,
fs_hook_cmds["CMD_ILISTDIR_START"]: do_ilistdir_start,
fs_hook_cmds["CMD_ILISTDIR_NEXT"]: do_ilistdir_next,
fs_hook_cmds["CMD_OPEN"]: do_open,
fs_hook_cmds["CMD_CLOSE"]: do_close,
fs_hook_cmds["CMD_READ"]: do_read,
fs_hook_cmds["CMD_WRITE"]: do_write,
fs_hook_cmds["CMD_SEEK"]: do_seek,
fs_hook_cmds["CMD_REMOVE"]: do_remove,
fs_hook_cmds["CMD_RENAME"]: do_rename,
fs_hook_cmds["CMD_MKDIR"]: do_mkdir,
fs_hook_cmds["CMD_RMDIR"]: do_rmdir,
}
class SerialIntercept:
def __init__(self, serial, cmd):
self.orig_serial = serial
self.cmd = cmd
self.buf = b""
self.orig_serial.timeout = 5.0
def _check_input(self, blocking):
if blocking or self.orig_serial.inWaiting() > 0:
c = self.orig_serial.read(1)
if c == b"\x18":
# a special command
c = self.orig_serial.read(1)[0]
self.orig_serial.write(b"\x18") # Acknowledge command
PyboardCommand.cmd_table[c](self.cmd)
elif not VT_ENABLED and c == b"\x1b":
# ESC code, ignore these on windows
esctype = self.orig_serial.read(1)
if esctype == b"[": # CSI
while not (0x40 < self.orig_serial.read(1)[0] < 0x7E):
# Looking for "final byte" of escape sequence
pass
else:
self.buf += c
@property
def fd(self):
return self.orig_serial.fd
def close(self):
self.orig_serial.close()
def inWaiting(self):
self._check_input(False)
return len(self.buf)
def read(self, n):
while len(self.buf) < n:
self._check_input(True)
out = self.buf[:n]
self.buf = self.buf[n:]
return out
def write(self, buf):
self.orig_serial.write(buf)
class PyboardExtended(Pyboard):
def __init__(self, dev, *args, **kwargs):
super().__init__(dev, *args, **kwargs)
self.device_name = dev
self.mounted = False
def mount_local(self, path, unsafe_links=False):
fout = self.serial
if self.eval('"RemoteFS" in globals()') == b"False":
self.exec_(fs_hook_code)
self.exec_("__mount()")
self.mounted = True
self.cmd = PyboardCommand(self.serial, fout, path, unsafe_links=unsafe_links)
self.serial = SerialIntercept(self.serial, self.cmd)
def write_ctrl_d(self, out_callback):
self.serial.write(b"\x04")
if not self.mounted:
return
# Read response from the device until it is quiet (with a timeout).
INITIAL_TIMEOUT = 0.5
BANNER_TIMEOUT = 2
QUIET_TIMEOUT = 0.1
FULL_TIMEOUT = 5
t_start = t_last_activity = time.monotonic()
data_all = b""
soft_reboot_started = False
soft_reboot_banner = False
while True:
t = time.monotonic()
n = self.serial.inWaiting()
if n > 0:
data = self.serial.read(n)
out_callback(data)
data_all += data
t_last_activity = t
else:
if len(data_all) == 0:
if t - t_start > INITIAL_TIMEOUT:
return
else:
if t - t_start > FULL_TIMEOUT:
if soft_reboot_started:
break
return
next_data_timeout = QUIET_TIMEOUT
if not soft_reboot_started and data_all.find(b"MPY: soft reboot") != -1:
soft_reboot_started = True
if soft_reboot_started and not soft_reboot_banner:
# Once soft reboot has been initiated, give some more time for the startup
# banner to be shown
if data_all.find(b"\nMicroPython ") != -1:
soft_reboot_banner = True
elif data_all.find(b"\nraw REPL; CTRL-B to exit\r\n") != -1:
soft_reboot_banner = True
else:
next_data_timeout = BANNER_TIMEOUT
if t - t_last_activity > next_data_timeout:
break
if not soft_reboot_started:
return
if not soft_reboot_banner:
out_callback(b"Warning: Could not remount local filesystem\r\n")
return
# Determine type of prompt
if data_all.endswith(b">"):
in_friendly_repl = False
prompt = b">"
else:
in_friendly_repl = True
prompt = data_all.rsplit(b"\r\n", 1)[-1]
# Clear state while board remounts, it will be re-set once mounted.
self.mounted = False
self.serial = self.serial.orig_serial
# Provide a message about the remount.
out_callback(bytes(f"\r\nRemount local directory {self.cmd.root} at /remote\r\n", "utf8"))
# Enter raw REPL and re-mount the remote filesystem.
self.serial.write(b"\x01")
self.exec_(fs_hook_code)
self.exec_("__mount()")
self.mounted = True
# Exit raw REPL if needed, and wait for the friendly REPL prompt.
if in_friendly_repl:
self.exit_raw_repl()
self.read_until(len(prompt), prompt)
out_callback(prompt)
self.serial = SerialIntercept(self.serial, self.cmd)
def umount_local(self):
if self.mounted:
self.exec_('uos.umount("/remote")')
self.mounted = False
self.serial = self.serial.orig_serial

View File

@@ -0,0 +1,6 @@
[build-system]
requires = [
"setuptools>=42",
"wheel"
]
build-backend = "setuptools.build_meta"

View File

@@ -0,0 +1,25 @@
[metadata]
name = mpremote
version = 0.3.0
author = Damien George
author_email = damien@micropython.org
description = Tool for interacting remotely with MicroPython
long_description = file: README.md
long_description_content_type = text/markdown
url = https://github.com/micropython/micropython
project_urls =
Bug Tracker = https://github.com/micropython/micropython/issues
classifiers =
Programming Language :: Python :: 3
License :: OSI Approved :: MIT License
Operating System :: OS Independent
[options]
packages = mpremote
python_requires = >= 3.4
install_requires =
pyserial >= 3.3
[options.entry_points]
console_scripts =
mpremote = mpremote.main:main

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,18 @@
#!/usr/bin/env python3
#
# This tool converts binary resource files passed on the command line
# into a Python source file containing data from these files, which can
# be accessed using standard pkg_resources.resource_stream() function
# from micropython-lib:
# https://github.com/micropython/micropython-lib/tree/master/pkg_resources
#
import sys
print("R = {")
for fname in sys.argv[1:]:
with open(fname, "rb") as f:
b = f.read()
print("%r: %r," % (fname, b))
print("}")

View File

@@ -0,0 +1,41 @@
#!/usr/bin/env python3
import argparse
import os
import os.path
argparser = argparse.ArgumentParser(description="Compile all .py files to .mpy recursively")
argparser.add_argument("-o", "--out", help="output directory (default: input dir)")
argparser.add_argument("--target", help="select MicroPython target config")
argparser.add_argument("dir", help="input directory")
args = argparser.parse_args()
TARGET_OPTS = {
"unix": "",
"baremetal": "",
}
args.dir = args.dir.rstrip("/")
if not args.out:
args.out = args.dir
path_prefix_len = len(args.dir) + 1
for path, subdirs, files in os.walk(args.dir):
for f in files:
if f.endswith(".py"):
fpath = path + "/" + f
# print(fpath)
out_fpath = args.out + "/" + fpath[path_prefix_len:-3] + ".mpy"
out_dir = os.path.dirname(out_fpath)
if not os.path.isdir(out_dir):
os.makedirs(out_dir)
cmd = "mpy-cross -v -v %s -s %s %s -o %s" % (
TARGET_OPTS.get(args.target, ""),
fpath[path_prefix_len:],
fpath,
out_fpath,
)
# print(cmd)
res = os.system(cmd)
assert res == 0

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,797 @@
#!/usr/bin/env python3
#
# This file is part of the MicroPython project, http://micropython.org/
#
# The MIT License (MIT)
#
# Copyright (c) 2014-2021 Damien P. George
# Copyright (c) 2017 Paul Sokolovsky
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
"""
pyboard interface
This module provides the Pyboard class, used to communicate with and
control a MicroPython device over a communication channel. Both real
boards and emulated devices (e.g. running in QEMU) are supported.
Various communication channels are supported, including a serial
connection, telnet-style network connection, external process
connection.
Example usage:
import pyboard
pyb = pyboard.Pyboard('/dev/ttyACM0')
Or:
pyb = pyboard.Pyboard('192.168.1.1')
Then:
pyb.enter_raw_repl()
pyb.exec('import pyb')
pyb.exec('pyb.LED(1).on()')
pyb.exit_raw_repl()
Note: if using Python2 then pyb.exec must be written as pyb.exec_.
To run a script from the local machine on the board and print out the results:
import pyboard
pyboard.execfile('test.py', device='/dev/ttyACM0')
This script can also be run directly. To execute a local script, use:
./pyboard.py test.py
Or:
python pyboard.py test.py
"""
import sys
import time
import os
import ast
try:
stdout = sys.stdout.buffer
except AttributeError:
# Python2 doesn't have buffer attr
stdout = sys.stdout
def stdout_write_bytes(b):
b = b.replace(b"\x04", b"")
stdout.write(b)
stdout.flush()
class PyboardError(Exception):
pass
class TelnetToSerial:
def __init__(self, ip, user, password, read_timeout=None):
self.tn = None
import telnetlib
self.tn = telnetlib.Telnet(ip, timeout=15)
self.read_timeout = read_timeout
if b"Login as:" in self.tn.read_until(b"Login as:", timeout=read_timeout):
self.tn.write(bytes(user, "ascii") + b"\r\n")
if b"Password:" in self.tn.read_until(b"Password:", timeout=read_timeout):
# needed because of internal implementation details of the telnet server
time.sleep(0.2)
self.tn.write(bytes(password, "ascii") + b"\r\n")
if b"for more information." in self.tn.read_until(
b'Type "help()" for more information.', timeout=read_timeout
):
# login successful
from collections import deque
self.fifo = deque()
return
raise PyboardError("Failed to establish a telnet connection with the board")
def __del__(self):
self.close()
def close(self):
if self.tn:
self.tn.close()
def read(self, size=1):
while len(self.fifo) < size:
timeout_count = 0
data = self.tn.read_eager()
if len(data):
self.fifo.extend(data)
timeout_count = 0
else:
time.sleep(0.25)
if self.read_timeout is not None and timeout_count > 4 * self.read_timeout:
break
timeout_count += 1
data = b""
while len(data) < size and len(self.fifo) > 0:
data += bytes([self.fifo.popleft()])
return data
def write(self, data):
self.tn.write(data)
return len(data)
def inWaiting(self):
n_waiting = len(self.fifo)
if not n_waiting:
data = self.tn.read_eager()
self.fifo.extend(data)
return len(data)
else:
return n_waiting
class ProcessToSerial:
"Execute a process and emulate serial connection using its stdin/stdout."
def __init__(self, cmd):
import subprocess
self.subp = subprocess.Popen(
cmd,
bufsize=0,
shell=True,
preexec_fn=os.setsid,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
)
# Initially was implemented with selectors, but that adds Python3
# dependency. However, there can be race conditions communicating
# with a particular child process (like QEMU), and selectors may
# still work better in that case, so left inplace for now.
#
# import selectors
# self.sel = selectors.DefaultSelector()
# self.sel.register(self.subp.stdout, selectors.EVENT_READ)
import select
self.poll = select.poll()
self.poll.register(self.subp.stdout.fileno())
def close(self):
import signal
os.killpg(os.getpgid(self.subp.pid), signal.SIGTERM)
def read(self, size=1):
data = b""
while len(data) < size:
data += self.subp.stdout.read(size - len(data))
return data
def write(self, data):
self.subp.stdin.write(data)
return len(data)
def inWaiting(self):
# res = self.sel.select(0)
res = self.poll.poll(0)
if res:
return 1
return 0
class ProcessPtyToTerminal:
"""Execute a process which creates a PTY and prints slave PTY as
first line of its output, and emulate serial connection using
this PTY."""
def __init__(self, cmd):
import subprocess
import re
import serial
self.subp = subprocess.Popen(
cmd.split(),
bufsize=0,
shell=False,
preexec_fn=os.setsid,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
pty_line = self.subp.stderr.readline().decode("utf-8")
m = re.search(r"/dev/pts/[0-9]+", pty_line)
if not m:
print("Error: unable to find PTY device in startup line:", pty_line)
self.close()
sys.exit(1)
pty = m.group()
# rtscts, dsrdtr params are to workaround pyserial bug:
# http://stackoverflow.com/questions/34831131/pyserial-does-not-play-well-with-virtual-port
self.ser = serial.Serial(pty, interCharTimeout=1, rtscts=True, dsrdtr=True)
def close(self):
import signal
os.killpg(os.getpgid(self.subp.pid), signal.SIGTERM)
def read(self, size=1):
return self.ser.read(size)
def write(self, data):
return self.ser.write(data)
def inWaiting(self):
return self.ser.inWaiting()
class Pyboard:
def __init__(
self, device, baudrate=115200, user="micro", password="python", wait=0, exclusive=True
):
self.in_raw_repl = False
self.use_raw_paste = True
if device.startswith("exec:"):
self.serial = ProcessToSerial(device[len("exec:") :])
elif device.startswith("execpty:"):
self.serial = ProcessPtyToTerminal(device[len("qemupty:") :])
elif device and device[0].isdigit() and device[-1].isdigit() and device.count(".") == 3:
# device looks like an IP address
self.serial = TelnetToSerial(device, user, password, read_timeout=10)
else:
import serial
# Set options, and exclusive if pyserial supports it
serial_kwargs = {"baudrate": baudrate, "interCharTimeout": 1}
if serial.__version__ >= "3.3":
serial_kwargs["exclusive"] = exclusive
delayed = False
for attempt in range(wait + 1):
try:
self.serial = serial.Serial(device, **serial_kwargs)
break
except (OSError, IOError): # Py2 and Py3 have different errors
if wait == 0:
continue
if attempt == 0:
sys.stdout.write("Waiting {} seconds for pyboard ".format(wait))
delayed = True
time.sleep(1)
sys.stdout.write(".")
sys.stdout.flush()
else:
if delayed:
print("")
raise PyboardError("failed to access " + device)
if delayed:
print("")
def close(self):
self.serial.close()
def read_until(self, min_num_bytes, ending, timeout=10, data_consumer=None):
# if data_consumer is used then data is not accumulated and the ending must be 1 byte long
assert data_consumer is None or len(ending) == 1
data = self.serial.read(min_num_bytes)
if data_consumer:
data_consumer(data)
timeout_count = 0
while True:
if data.endswith(ending):
break
elif self.serial.inWaiting() > 0:
new_data = self.serial.read(1)
if data_consumer:
data_consumer(new_data)
data = new_data
else:
data = data + new_data
timeout_count = 0
else:
timeout_count += 1
if timeout is not None and timeout_count >= 100 * timeout:
break
time.sleep(0.01)
return data
def enter_raw_repl(self, soft_reset=True):
self.serial.write(b"\r\x03\x03") # ctrl-C twice: interrupt any running program
# flush input (without relying on serial.flushInput())
n = self.serial.inWaiting()
while n > 0:
self.serial.read(n)
n = self.serial.inWaiting()
self.serial.write(b"\r\x01") # ctrl-A: enter raw REPL
if soft_reset:
data = self.read_until(1, b"raw REPL; CTRL-B to exit\r\n>")
if not data.endswith(b"raw REPL; CTRL-B to exit\r\n>"):
print(data)
raise PyboardError("could not enter raw repl")
self.serial.write(b"\x04") # ctrl-D: soft reset
# Waiting for "soft reboot" independently to "raw REPL" (done below)
# allows boot.py to print, which will show up after "soft reboot"
# and before "raw REPL".
data = self.read_until(1, b"soft reboot\r\n")
if not data.endswith(b"soft reboot\r\n"):
print(data)
raise PyboardError("could not enter raw repl")
data = self.read_until(1, b"raw REPL; CTRL-B to exit\r\n")
if not data.endswith(b"raw REPL; CTRL-B to exit\r\n"):
print(data)
raise PyboardError("could not enter raw repl")
self.in_raw_repl = True
def exit_raw_repl(self):
self.serial.write(b"\r\x02") # ctrl-B: enter friendly REPL
self.in_raw_repl = False
def follow(self, timeout, data_consumer=None):
# wait for normal output
data = self.read_until(1, b"\x04", timeout=timeout, data_consumer=data_consumer)
if not data.endswith(b"\x04"):
raise PyboardError("timeout waiting for first EOF reception")
data = data[:-1]
# wait for error output
data_err = self.read_until(1, b"\x04", timeout=timeout)
if not data_err.endswith(b"\x04"):
raise PyboardError("timeout waiting for second EOF reception")
data_err = data_err[:-1]
# return normal and error output
return data, data_err
def raw_paste_write(self, command_bytes):
# Read initial header, with window size.
data = self.serial.read(2)
window_size = data[0] | data[1] << 8
window_remain = window_size
# Write out the command_bytes data.
i = 0
while i < len(command_bytes):
while window_remain == 0 or self.serial.inWaiting():
data = self.serial.read(1)
if data == b"\x01":
# Device indicated that a new window of data can be sent.
window_remain += window_size
elif data == b"\x04":
# Device indicated abrupt end. Acknowledge it and finish.
self.serial.write(b"\x04")
return
else:
# Unexpected data from device.
raise PyboardError("unexpected read during raw paste: {}".format(data))
# Send out as much data as possible that fits within the allowed window.
b = command_bytes[i : min(i + window_remain, len(command_bytes))]
self.serial.write(b)
window_remain -= len(b)
i += len(b)
# Indicate end of data.
self.serial.write(b"\x04")
# Wait for device to acknowledge end of data.
data = self.read_until(1, b"\x04")
if not data.endswith(b"\x04"):
raise PyboardError("could not complete raw paste: {}".format(data))
def exec_raw_no_follow(self, command):
if isinstance(command, bytes):
command_bytes = command
else:
command_bytes = bytes(command, encoding="utf8")
# check we have a prompt
data = self.read_until(1, b">")
if not data.endswith(b">"):
raise PyboardError("could not enter raw repl")
if self.use_raw_paste:
# Try to enter raw-paste mode.
self.serial.write(b"\x05A\x01")
data = self.serial.read(2)
if data == b"R\x00":
# Device understood raw-paste command but doesn't support it.
pass
elif data == b"R\x01":
# Device supports raw-paste mode, write out the command using this mode.
return self.raw_paste_write(command_bytes)
else:
# Device doesn't support raw-paste, fall back to normal raw REPL.
data = self.read_until(1, b"w REPL; CTRL-B to exit\r\n>")
if not data.endswith(b"w REPL; CTRL-B to exit\r\n>"):
print(data)
raise PyboardError("could not enter raw repl")
# Don't try to use raw-paste mode again for this connection.
self.use_raw_paste = False
# Write command using standard raw REPL, 256 bytes every 10ms.
for i in range(0, len(command_bytes), 256):
self.serial.write(command_bytes[i : min(i + 256, len(command_bytes))])
time.sleep(0.01)
self.serial.write(b"\x04")
# check if we could exec command
data = self.serial.read(2)
if data != b"OK":
raise PyboardError("could not exec command (response: %r)" % data)
def exec_raw(self, command, timeout=10, data_consumer=None):
self.exec_raw_no_follow(command)
return self.follow(timeout, data_consumer)
def eval(self, expression):
ret = self.exec_("print({})".format(expression))
ret = ret.strip()
return ret
def exec_(self, command, data_consumer=None):
ret, ret_err = self.exec_raw(command, data_consumer=data_consumer)
if ret_err:
raise PyboardError("exception", ret, ret_err)
return ret
def execfile(self, filename):
with open(filename, "rb") as f:
pyfile = f.read()
return self.exec_(pyfile)
def get_time(self):
t = str(self.eval("pyb.RTC().datetime()"), encoding="utf8")[1:-1].split(", ")
return int(t[4]) * 3600 + int(t[5]) * 60 + int(t[6])
def fs_ls(self, src):
cmd = (
"import uos\nfor f in uos.ilistdir(%s):\n"
" print('{:12} {}{}'.format(f[3]if len(f)>3 else 0,f[0],'/'if f[1]&0x4000 else ''))"
% (("'%s'" % src) if src else "")
)
self.exec_(cmd, data_consumer=stdout_write_bytes)
def fs_cat(self, src, chunk_size=256):
cmd = (
"with open('%s') as f:\n while 1:\n"
" b=f.read(%u)\n if not b:break\n print(b,end='')" % (src, chunk_size)
)
self.exec_(cmd, data_consumer=stdout_write_bytes)
def fs_get(self, src, dest, chunk_size=256, progress_callback=None):
if progress_callback:
src_size = int(self.exec_("import os\nprint(os.stat('%s')[6])" % src))
written = 0
self.exec_("f=open('%s','rb')\nr=f.read" % src)
with open(dest, "wb") as f:
while True:
data = bytearray()
self.exec_("print(r(%u))" % chunk_size, data_consumer=lambda d: data.extend(d))
assert data.endswith(b"\r\n\x04")
try:
data = ast.literal_eval(str(data[:-3], "ascii"))
if not isinstance(data, bytes):
raise ValueError("Not bytes")
except (UnicodeError, ValueError) as e:
raise PyboardError("fs_get: Could not interpret received data: %s" % str(e))
if not data:
break
f.write(data)
if progress_callback:
written += len(data)
progress_callback(written, src_size)
self.exec_("f.close()")
def fs_put(self, src, dest, chunk_size=256, progress_callback=None):
if progress_callback:
src_size = os.path.getsize(src)
written = 0
self.exec_("f=open('%s','wb')\nw=f.write" % dest)
with open(src, "rb") as f:
while True:
data = f.read(chunk_size)
if not data:
break
if sys.version_info < (3,):
self.exec_("w(b" + repr(data) + ")")
else:
self.exec_("w(" + repr(data) + ")")
if progress_callback:
written += len(data)
progress_callback(written, src_size)
self.exec_("f.close()")
def fs_mkdir(self, dir):
self.exec_("import uos\nuos.mkdir('%s')" % dir)
def fs_rmdir(self, dir):
self.exec_("import uos\nuos.rmdir('%s')" % dir)
def fs_rm(self, src):
self.exec_("import uos\nuos.remove('%s')" % src)
# in Python2 exec is a keyword so one must use "exec_"
# but for Python3 we want to provide the nicer version "exec"
setattr(Pyboard, "exec", Pyboard.exec_)
def execfile(filename, device="/dev/ttyACM0", baudrate=115200, user="micro", password="python"):
pyb = Pyboard(device, baudrate, user, password)
pyb.enter_raw_repl()
output = pyb.execfile(filename)
stdout_write_bytes(output)
pyb.exit_raw_repl()
pyb.close()
def filesystem_command(pyb, args, progress_callback=None):
def fname_remote(src):
if src.startswith(":"):
src = src[1:]
return src
def fname_cp_dest(src, dest):
src = src.rsplit("/", 1)[-1]
if dest is None or dest == "":
dest = src
elif dest == ".":
dest = "./" + src
elif dest.endswith("/"):
dest += src
return dest
cmd = args[0]
args = args[1:]
try:
if cmd == "cp":
srcs = args[:-1]
dest = args[-1]
if srcs[0].startswith("./") or dest.startswith(":"):
op = pyb.fs_put
fmt = "cp %s :%s"
dest = fname_remote(dest)
else:
op = pyb.fs_get
fmt = "cp :%s %s"
for src in srcs:
src = fname_remote(src)
dest2 = fname_cp_dest(src, dest)
print(fmt % (src, dest2))
op(src, dest2, progress_callback=progress_callback)
else:
op = {
"ls": pyb.fs_ls,
"cat": pyb.fs_cat,
"mkdir": pyb.fs_mkdir,
"rmdir": pyb.fs_rmdir,
"rm": pyb.fs_rm,
}[cmd]
if cmd == "ls" and not args:
args = [""]
for src in args:
src = fname_remote(src)
print("%s :%s" % (cmd, src))
op(src)
except PyboardError as er:
print(str(er.args[2], "ascii"))
pyb.exit_raw_repl()
pyb.close()
sys.exit(1)
_injected_import_hook_code = """\
import uos, uio
class _FS:
class File(uio.IOBase):
def __init__(self):
self.off = 0
def ioctl(self, request, arg):
return 0
def readinto(self, buf):
buf[:] = memoryview(_injected_buf)[self.off:self.off + len(buf)]
self.off += len(buf)
return len(buf)
mount = umount = chdir = lambda *args: None
def stat(self, path):
if path == '_injected.mpy':
return tuple(0 for _ in range(10))
else:
raise OSError(-2) # ENOENT
def open(self, path, mode):
return self.File()
uos.mount(_FS(), '/_')
uos.chdir('/_')
from _injected import *
uos.umount('/_')
del _injected_buf, _FS
"""
def main():
import argparse
cmd_parser = argparse.ArgumentParser(description="Run scripts on the pyboard.")
cmd_parser.add_argument(
"-d",
"--device",
default=os.environ.get("PYBOARD_DEVICE", "/dev/ttyACM0"),
help="the serial device or the IP address of the pyboard",
)
cmd_parser.add_argument(
"-b",
"--baudrate",
default=os.environ.get("PYBOARD_BAUDRATE", "115200"),
help="the baud rate of the serial device",
)
cmd_parser.add_argument("-u", "--user", default="micro", help="the telnet login username")
cmd_parser.add_argument("-p", "--password", default="python", help="the telnet login password")
cmd_parser.add_argument("-c", "--command", help="program passed in as string")
cmd_parser.add_argument(
"-w",
"--wait",
default=0,
type=int,
help="seconds to wait for USB connected board to become available",
)
group = cmd_parser.add_mutually_exclusive_group()
group.add_argument(
"--soft-reset",
default=True,
action="store_true",
help="Whether to perform a soft reset when connecting to the board [default]",
)
group.add_argument(
"--no-soft-reset",
action="store_false",
dest="soft_reset",
)
group = cmd_parser.add_mutually_exclusive_group()
group.add_argument(
"--follow",
action="store_true",
default=None,
help="follow the output after running the scripts [default if no scripts given]",
)
group.add_argument(
"--no-follow",
action="store_false",
dest="follow",
)
group = cmd_parser.add_mutually_exclusive_group()
group.add_argument(
"--exclusive",
action="store_true",
default=True,
help="Open the serial device for exclusive access [default]",
)
group.add_argument(
"--no-exclusive",
action="store_false",
dest="exclusive",
)
cmd_parser.add_argument(
"-f",
"--filesystem",
action="store_true",
help="perform a filesystem action: "
"cp local :device | cp :device local | cat path | ls [path] | rm path | mkdir path | rmdir path",
)
cmd_parser.add_argument("files", nargs="*", help="input files")
args = cmd_parser.parse_args()
# open the connection to the pyboard
try:
pyb = Pyboard(
args.device, args.baudrate, args.user, args.password, args.wait, args.exclusive
)
except PyboardError as er:
print(er)
sys.exit(1)
# run any command or file(s)
if args.command is not None or args.filesystem or len(args.files):
# we must enter raw-REPL mode to execute commands
# this will do a soft-reset of the board
try:
pyb.enter_raw_repl(args.soft_reset)
except PyboardError as er:
print(er)
pyb.close()
sys.exit(1)
def execbuffer(buf):
try:
if args.follow is None or args.follow:
ret, ret_err = pyb.exec_raw(
buf, timeout=None, data_consumer=stdout_write_bytes
)
else:
pyb.exec_raw_no_follow(buf)
ret_err = None
except PyboardError as er:
print(er)
pyb.close()
sys.exit(1)
except KeyboardInterrupt:
sys.exit(1)
if ret_err:
pyb.exit_raw_repl()
pyb.close()
stdout_write_bytes(ret_err)
sys.exit(1)
# do filesystem commands, if given
if args.filesystem:
filesystem_command(pyb, args.files)
del args.files[:]
# run the command, if given
if args.command is not None:
execbuffer(args.command.encode("utf-8"))
# run any files
for filename in args.files:
with open(filename, "rb") as f:
pyfile = f.read()
if filename.endswith(".mpy") and pyfile[0] == ord("M"):
pyb.exec_("_injected_buf=" + repr(pyfile))
pyfile = _injected_import_hook_code
execbuffer(pyfile)
# exiting raw-REPL just drops to friendly-REPL mode
pyb.exit_raw_repl()
# if asked explicitly, or no files given, then follow the output
if args.follow or (args.command is None and not args.filesystem and len(args.files) == 0):
try:
ret, ret_err = pyb.follow(timeout=None, data_consumer=stdout_write_bytes)
except PyboardError as er:
print(er)
sys.exit(1)
except KeyboardInterrupt:
sys.exit(1)
if ret_err:
pyb.close()
stdout_write_bytes(ret_err)
sys.exit(1)
# close the connection to the pyboard
pyb.close()
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,627 @@
#!/usr/bin/env python
# This file is part of the OpenMV project.
# Copyright (c) 2013/2014 Ibrahim Abdelkader <i.abdalkader@gmail.com>
# This work is licensed under the MIT license, see the file LICENSE for
# details.
"""This module implements enough functionality to program the STM32F4xx over
DFU, without requiring dfu-util.
See app note AN3156 for a description of the DFU protocol.
See document UM0391 for a dscription of the DFuse file.
"""
from __future__ import print_function
import argparse
import collections
import inspect
import re
import struct
import sys
import usb.core
import usb.util
import zlib
# USB request __TIMEOUT
__TIMEOUT = 4000
# DFU commands
__DFU_DETACH = 0
__DFU_DNLOAD = 1
__DFU_UPLOAD = 2
__DFU_GETSTATUS = 3
__DFU_CLRSTATUS = 4
__DFU_GETSTATE = 5
__DFU_ABORT = 6
# DFU status
__DFU_STATE_APP_IDLE = 0x00
__DFU_STATE_APP_DETACH = 0x01
__DFU_STATE_DFU_IDLE = 0x02
__DFU_STATE_DFU_DOWNLOAD_SYNC = 0x03
__DFU_STATE_DFU_DOWNLOAD_BUSY = 0x04
__DFU_STATE_DFU_DOWNLOAD_IDLE = 0x05
__DFU_STATE_DFU_MANIFEST_SYNC = 0x06
__DFU_STATE_DFU_MANIFEST = 0x07
__DFU_STATE_DFU_MANIFEST_WAIT_RESET = 0x08
__DFU_STATE_DFU_UPLOAD_IDLE = 0x09
__DFU_STATE_DFU_ERROR = 0x0A
_DFU_DESCRIPTOR_TYPE = 0x21
__DFU_STATUS_STR = {
__DFU_STATE_APP_IDLE: "STATE_APP_IDLE",
__DFU_STATE_APP_DETACH: "STATE_APP_DETACH",
__DFU_STATE_DFU_IDLE: "STATE_DFU_IDLE",
__DFU_STATE_DFU_DOWNLOAD_SYNC: "STATE_DFU_DOWNLOAD_SYNC",
__DFU_STATE_DFU_DOWNLOAD_BUSY: "STATE_DFU_DOWNLOAD_BUSY",
__DFU_STATE_DFU_DOWNLOAD_IDLE: "STATE_DFU_DOWNLOAD_IDLE",
__DFU_STATE_DFU_MANIFEST_SYNC: "STATE_DFU_MANIFEST_SYNC",
__DFU_STATE_DFU_MANIFEST: "STATE_DFU_MANIFEST",
__DFU_STATE_DFU_MANIFEST_WAIT_RESET: "STATE_DFU_MANIFEST_WAIT_RESET",
__DFU_STATE_DFU_UPLOAD_IDLE: "STATE_DFU_UPLOAD_IDLE",
__DFU_STATE_DFU_ERROR: "STATE_DFU_ERROR",
}
# USB device handle
__dev = None
# Configuration descriptor of the device
__cfg_descr = None
__verbose = None
# USB DFU interface
__DFU_INTERFACE = 0
# Python 3 deprecated getargspec in favour of getfullargspec, but
# Python 2 doesn't have the latter, so detect which one to use
getargspec = getattr(inspect, "getfullargspec", inspect.getargspec)
if "length" in getargspec(usb.util.get_string).args:
# PyUSB 1.0.0.b1 has the length argument
def get_string(dev, index):
return usb.util.get_string(dev, 255, index)
else:
# PyUSB 1.0.0.b2 dropped the length argument
def get_string(dev, index):
return usb.util.get_string(dev, index)
def find_dfu_cfg_descr(descr):
if len(descr) == 9 and descr[0] == 9 and descr[1] == _DFU_DESCRIPTOR_TYPE:
nt = collections.namedtuple(
"CfgDescr",
[
"bLength",
"bDescriptorType",
"bmAttributes",
"wDetachTimeOut",
"wTransferSize",
"bcdDFUVersion",
],
)
return nt(*struct.unpack("<BBBHHH", bytearray(descr)))
return None
def init(**kwargs):
"""Initializes the found DFU device so that we can program it."""
global __dev, __cfg_descr
devices = get_dfu_devices(**kwargs)
if not devices:
raise ValueError("No DFU device found")
if len(devices) > 1:
raise ValueError("Multiple DFU devices found")
__dev = devices[0]
__dev.set_configuration()
# Claim DFU interface
usb.util.claim_interface(__dev, __DFU_INTERFACE)
# Find the DFU configuration descriptor, either in the device or interfaces
__cfg_descr = None
for cfg in __dev.configurations():
__cfg_descr = find_dfu_cfg_descr(cfg.extra_descriptors)
if __cfg_descr:
break
for itf in cfg.interfaces():
__cfg_descr = find_dfu_cfg_descr(itf.extra_descriptors)
if __cfg_descr:
break
# Get device into idle state
for attempt in range(4):
status = get_status()
if status == __DFU_STATE_DFU_IDLE:
break
elif status == __DFU_STATE_DFU_DOWNLOAD_IDLE or status == __DFU_STATE_DFU_UPLOAD_IDLE:
abort_request()
else:
clr_status()
def abort_request():
"""Sends an abort request."""
__dev.ctrl_transfer(0x21, __DFU_ABORT, 0, __DFU_INTERFACE, None, __TIMEOUT)
def clr_status():
"""Clears any error status (perhaps left over from a previous session)."""
__dev.ctrl_transfer(0x21, __DFU_CLRSTATUS, 0, __DFU_INTERFACE, None, __TIMEOUT)
def get_status():
"""Get the status of the last operation."""
stat = __dev.ctrl_transfer(0xA1, __DFU_GETSTATUS, 0, __DFU_INTERFACE, 6, 20000)
# firmware can provide an optional string for any error
if stat[5]:
message = get_string(__dev, stat[5])
if message:
print(message)
return stat[4]
def check_status(stage, expected):
status = get_status()
if status != expected:
raise SystemExit("DFU: %s failed (%s)" % (stage, __DFU_STATUS_STR.get(status, status)))
def mass_erase():
"""Performs a MASS erase (i.e. erases the entire device)."""
# Send DNLOAD with first byte=0x41
__dev.ctrl_transfer(0x21, __DFU_DNLOAD, 0, __DFU_INTERFACE, "\x41", __TIMEOUT)
# Execute last command
check_status("erase", __DFU_STATE_DFU_DOWNLOAD_BUSY)
# Check command state
check_status("erase", __DFU_STATE_DFU_DOWNLOAD_IDLE)
def page_erase(addr):
"""Erases a single page."""
if __verbose:
print("Erasing page: 0x%x..." % (addr))
# Send DNLOAD with first byte=0x41 and page address
buf = struct.pack("<BI", 0x41, addr)
__dev.ctrl_transfer(0x21, __DFU_DNLOAD, 0, __DFU_INTERFACE, buf, __TIMEOUT)
# Execute last command
check_status("erase", __DFU_STATE_DFU_DOWNLOAD_BUSY)
# Check command state
check_status("erase", __DFU_STATE_DFU_DOWNLOAD_IDLE)
def set_address(addr):
"""Sets the address for the next operation."""
# Send DNLOAD with first byte=0x21 and page address
buf = struct.pack("<BI", 0x21, addr)
__dev.ctrl_transfer(0x21, __DFU_DNLOAD, 0, __DFU_INTERFACE, buf, __TIMEOUT)
# Execute last command
check_status("set address", __DFU_STATE_DFU_DOWNLOAD_BUSY)
# Check command state
check_status("set address", __DFU_STATE_DFU_DOWNLOAD_IDLE)
def write_memory(addr, buf, progress=None, progress_addr=0, progress_size=0):
"""Writes a buffer into memory. This routine assumes that memory has
already been erased.
"""
xfer_count = 0
xfer_bytes = 0
xfer_total = len(buf)
xfer_base = addr
while xfer_bytes < xfer_total:
if __verbose and xfer_count % 512 == 0:
print(
"Addr 0x%x %dKBs/%dKBs..."
% (xfer_base + xfer_bytes, xfer_bytes // 1024, xfer_total // 1024)
)
if progress and xfer_count % 2 == 0:
progress(progress_addr, xfer_base + xfer_bytes - progress_addr, progress_size)
# Set mem write address
set_address(xfer_base + xfer_bytes)
# Send DNLOAD with fw data
chunk = min(__cfg_descr.wTransferSize, xfer_total - xfer_bytes)
__dev.ctrl_transfer(
0x21, __DFU_DNLOAD, 2, __DFU_INTERFACE, buf[xfer_bytes : xfer_bytes + chunk], __TIMEOUT
)
# Execute last command
check_status("write memory", __DFU_STATE_DFU_DOWNLOAD_BUSY)
# Check command state
check_status("write memory", __DFU_STATE_DFU_DOWNLOAD_IDLE)
xfer_count += 1
xfer_bytes += chunk
def write_page(buf, xfer_offset):
"""Writes a single page. This routine assumes that memory has already
been erased.
"""
xfer_base = 0x08000000
# Set mem write address
set_address(xfer_base + xfer_offset)
# Send DNLOAD with fw data
__dev.ctrl_transfer(0x21, __DFU_DNLOAD, 2, __DFU_INTERFACE, buf, __TIMEOUT)
# Execute last command
check_status("write memory", __DFU_STATE_DFU_DOWNLOAD_BUSY)
# Check command state
check_status("write memory", __DFU_STATE_DFU_DOWNLOAD_IDLE)
if __verbose:
print("Write: 0x%x " % (xfer_base + xfer_offset))
def exit_dfu():
"""Exit DFU mode, and start running the program."""
# Set jump address
set_address(0x08000000)
# Send DNLOAD with 0 length to exit DFU
__dev.ctrl_transfer(0x21, __DFU_DNLOAD, 0, __DFU_INTERFACE, None, __TIMEOUT)
try:
# Execute last command
if get_status() != __DFU_STATE_DFU_MANIFEST:
print("Failed to reset device")
# Release device
usb.util.dispose_resources(__dev)
except:
pass
def named(values, names):
"""Creates a dict with `names` as fields, and `values` as values."""
return dict(zip(names.split(), values))
def consume(fmt, data, names):
"""Parses the struct defined by `fmt` from `data`, stores the parsed fields
into a named tuple using `names`. Returns the named tuple, and the data
with the struct stripped off."""
size = struct.calcsize(fmt)
return named(struct.unpack(fmt, data[:size]), names), data[size:]
def cstring(string):
"""Extracts a null-terminated string from a byte array."""
return string.decode("utf-8").split("\0", 1)[0]
def compute_crc(data):
"""Computes the CRC32 value for the data passed in."""
return 0xFFFFFFFF & -zlib.crc32(data) - 1
def read_dfu_file(filename):
"""Reads a DFU file, and parses the individual elements from the file.
Returns an array of elements. Each element is a dictionary with the
following keys:
num - The element index.
address - The address that the element data should be written to.
size - The size of the element data.
data - The element data.
If an error occurs while parsing the file, then None is returned.
"""
print("File: {}".format(filename))
with open(filename, "rb") as fin:
data = fin.read()
crc = compute_crc(data[:-4])
elements = []
# Decode the DFU Prefix
#
# <5sBIB
# < little endian Endianness
# 5s char[5] signature "DfuSe"
# B uint8_t version 1
# I uint32_t size Size of the DFU file (without suffix)
# B uint8_t targets Number of targets
dfu_prefix, data = consume("<5sBIB", data, "signature version size targets")
print(
" %(signature)s v%(version)d, image size: %(size)d, "
"targets: %(targets)d" % dfu_prefix
)
for target_idx in range(dfu_prefix["targets"]):
# Decode the Image Prefix
#
# <6sBI255s2I
# < little endian Endianness
# 6s char[6] signature "Target"
# B uint8_t altsetting
# I uint32_t named Bool indicating if a name was used
# 255s char[255] name Name of the target
# I uint32_t size Size of image (without prefix)
# I uint32_t elements Number of elements in the image
img_prefix, data = consume(
"<6sBI255s2I", data, "signature altsetting named name " "size elements"
)
img_prefix["num"] = target_idx
if img_prefix["named"]:
img_prefix["name"] = cstring(img_prefix["name"])
else:
img_prefix["name"] = ""
print(
" %(signature)s %(num)d, alt setting: %(altsetting)s, "
'name: "%(name)s", size: %(size)d, elements: %(elements)d' % img_prefix
)
target_size = img_prefix["size"]
target_data = data[:target_size]
data = data[target_size:]
for elem_idx in range(img_prefix["elements"]):
# Decode target prefix
#
# <2I
# < little endian Endianness
# I uint32_t element Address
# I uint32_t element Size
elem_prefix, target_data = consume("<2I", target_data, "addr size")
elem_prefix["num"] = elem_idx
print(" %(num)d, address: 0x%(addr)08x, size: %(size)d" % elem_prefix)
elem_size = elem_prefix["size"]
elem_data = target_data[:elem_size]
target_data = target_data[elem_size:]
elem_prefix["data"] = elem_data
elements.append(elem_prefix)
if len(target_data):
print("target %d PARSE ERROR" % target_idx)
# Decode DFU Suffix
#
# <4H3sBI
# < little endian Endianness
# H uint16_t device Firmware version
# H uint16_t product
# H uint16_t vendor
# H uint16_t dfu 0x11a (DFU file format version)
# 3s char[3] ufd "UFD"
# B uint8_t len 16
# I uint32_t crc32 Checksum
dfu_suffix = named(
struct.unpack("<4H3sBI", data[:16]), "device product vendor dfu ufd len crc"
)
print(
" usb: %(vendor)04x:%(product)04x, device: 0x%(device)04x, "
"dfu: 0x%(dfu)04x, %(ufd)s, %(len)d, 0x%(crc)08x" % dfu_suffix
)
if crc != dfu_suffix["crc"]:
print("CRC ERROR: computed crc32 is 0x%08x" % crc)
return
data = data[16:]
if data:
print("PARSE ERROR")
return
return elements
class FilterDFU(object):
"""Class for filtering USB devices to identify devices which are in DFU
mode.
"""
def __call__(self, device):
for cfg in device:
for intf in cfg:
return intf.bInterfaceClass == 0xFE and intf.bInterfaceSubClass == 1
def get_dfu_devices(*args, **kwargs):
"""Returns a list of USB devices which are currently in DFU mode.
Additional filters (like idProduct and idVendor) can be passed in
to refine the search.
"""
# Convert to list for compatibility with newer PyUSB
return list(usb.core.find(*args, find_all=True, custom_match=FilterDFU(), **kwargs))
def get_memory_layout(device):
"""Returns an array which identifies the memory layout. Each entry
of the array will contain a dictionary with the following keys:
addr - Address of this memory segment.
last_addr - Last address contained within the memory segment.
size - Size of the segment, in bytes.
num_pages - Number of pages in the segment.
page_size - Size of each page, in bytes.
"""
cfg = device[0]
intf = cfg[(0, 0)]
mem_layout_str = get_string(device, intf.iInterface)
mem_layout = mem_layout_str.split("/")
result = []
for mem_layout_index in range(1, len(mem_layout), 2):
addr = int(mem_layout[mem_layout_index], 0)
segments = mem_layout[mem_layout_index + 1].split(",")
seg_re = re.compile(r"(\d+)\*(\d+)(.)(.)")
for segment in segments:
seg_match = seg_re.match(segment)
num_pages = int(seg_match.groups()[0], 10)
page_size = int(seg_match.groups()[1], 10)
multiplier = seg_match.groups()[2]
if multiplier == "K":
page_size *= 1024
if multiplier == "M":
page_size *= 1024 * 1024
size = num_pages * page_size
last_addr = addr + size - 1
result.append(
named(
(addr, last_addr, size, num_pages, page_size),
"addr last_addr size num_pages page_size",
)
)
addr += size
return result
def list_dfu_devices(*args, **kwargs):
"""Prints a lits of devices detected in DFU mode."""
devices = get_dfu_devices(*args, **kwargs)
if not devices:
raise SystemExit("No DFU capable devices found")
for device in devices:
print(
"Bus {} Device {:03d}: ID {:04x}:{:04x}".format(
device.bus, device.address, device.idVendor, device.idProduct
)
)
layout = get_memory_layout(device)
print("Memory Layout")
for entry in layout:
print(
" 0x{:x} {:2d} pages of {:3d}K bytes".format(
entry["addr"], entry["num_pages"], entry["page_size"] // 1024
)
)
def write_elements(elements, mass_erase_used, progress=None):
"""Writes the indicated elements into the target memory,
erasing as needed.
"""
mem_layout = get_memory_layout(__dev)
for elem in elements:
addr = elem["addr"]
size = elem["size"]
data = elem["data"]
elem_size = size
elem_addr = addr
if progress and elem_size:
progress(elem_addr, 0, elem_size)
while size > 0:
write_size = size
if not mass_erase_used:
for segment in mem_layout:
if addr >= segment["addr"] and addr <= segment["last_addr"]:
# We found the page containing the address we want to
# write, erase it
page_size = segment["page_size"]
page_addr = addr & ~(page_size - 1)
if addr + write_size > page_addr + page_size:
write_size = page_addr + page_size - addr
page_erase(page_addr)
break
write_memory(addr, data[:write_size], progress, elem_addr, elem_size)
data = data[write_size:]
addr += write_size
size -= write_size
if progress:
progress(elem_addr, addr - elem_addr, elem_size)
def cli_progress(addr, offset, size):
"""Prints a progress report suitable for use on the command line."""
width = 25
done = offset * width // size
print(
"\r0x{:08x} {:7d} [{}{}] {:3d}% ".format(
addr, size, "=" * done, " " * (width - done), offset * 100 // size
),
end="",
)
try:
sys.stdout.flush()
except OSError:
pass # Ignore Windows CLI "WinError 87" on Python 3.6
if offset == size:
print("")
def main():
"""Test program for verifying this files functionality."""
global __verbose
# Parse CMD args
parser = argparse.ArgumentParser(description="DFU Python Util")
parser.add_argument(
"-l", "--list", help="list available DFU devices", action="store_true", default=False
)
parser.add_argument("--vid", help="USB Vendor ID", type=lambda x: int(x, 0), default=None)
parser.add_argument("--pid", help="USB Product ID", type=lambda x: int(x, 0), default=None)
parser.add_argument(
"-m", "--mass-erase", help="mass erase device", action="store_true", default=False
)
parser.add_argument(
"-u", "--upload", help="read file from DFU device", dest="path", default=False
)
parser.add_argument("-x", "--exit", help="Exit DFU", action="store_true", default=False)
parser.add_argument(
"-v", "--verbose", help="increase output verbosity", action="store_true", default=False
)
args = parser.parse_args()
__verbose = args.verbose
kwargs = {}
if args.vid:
kwargs["idVendor"] = args.vid
if args.pid:
kwargs["idProduct"] = args.pid
if args.list:
list_dfu_devices(**kwargs)
return
init(**kwargs)
command_run = False
if args.mass_erase:
print("Mass erase...")
mass_erase()
command_run = True
if args.path:
elements = read_dfu_file(args.path)
if not elements:
print("No data in dfu file")
return
print("Writing memory...")
write_elements(elements, args.mass_erase, progress=cli_progress)
print("Exiting DFU...")
exit_dfu()
command_run = True
if args.exit:
print("Exiting DFU...")
exit_dfu()
command_run = True
if command_run:
print("Finished")
else:
print("No command specified")
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,136 @@
#!/usr/bin/env python3
import os, sys
from glob import glob
from re import sub
import argparse
def escape(s):
s = s.decode()
lookup = {
"\0": "\\0",
"\t": "\\t",
"\n": '\\n"\n"',
"\r": "\\r",
"\\": "\\\\",
'"': '\\"',
}
return '""\n"{}"'.format("".join([lookup[x] if x in lookup else x for x in s]))
def chew_filename(t):
return {"func": "test_{}_fn".format(sub(r"/|\.|-", "_", t)), "desc": t}
def script_to_map(test_file):
r = {"name": chew_filename(test_file)["func"]}
with open(test_file, "rb") as f:
r["script"] = escape(f.read())
with open(test_file + ".exp", "rb") as f:
r["output"] = escape(f.read())
return r
test_function = (
"void {name}(void* data) {{\n"
" static const char pystr[] = {script};\n"
" static const char exp[] = {output};\n"
' printf("\\n");\n'
" upytest_set_expected_output(exp, sizeof(exp) - 1);\n"
" upytest_execute_test(pystr);\n"
' printf("result: ");\n'
"}}"
)
testcase_struct = "struct testcase_t {name}_tests[] = {{\n{body}\n END_OF_TESTCASES\n}};"
testcase_member = ' {{ "{desc}", {func}, TT_ENABLED_, 0, 0 }},'
testgroup_struct = "struct testgroup_t groups[] = {{\n{body}\n END_OF_GROUPS\n}};"
testgroup_member = ' {{ "{name}", {name}_tests }},'
## XXX: may be we could have `--without <groups>` argument...
# currently these tests are selected because they pass on qemu-arm
test_dirs = (
"basics",
"micropython",
"misc",
"extmod",
"float",
"inlineasm",
"qemu-arm",
) # 'import', 'io',)
exclude_tests = (
# pattern matching in .exp
"basics/bytes_compare3.py",
"extmod/ticks_diff.py",
"extmod/time_ms_us.py",
"extmod/uheapq_timeq.py",
# unicode char issue
"extmod/ujson_loads.py",
# doesn't output to python stdout
"extmod/ure_debug.py",
"extmod/vfs_basic.py",
"extmod/vfs_fat_ramdisk.py",
"extmod/vfs_fat_fileio.py",
"extmod/vfs_fat_fsusermount.py",
"extmod/vfs_fat_oldproto.py",
# rounding issues
"float/float_divmod.py",
# requires double precision floating point to work
"float/float2int_doubleprec_intbig.py",
"float/float_parse_doubleprec.py",
# inline asm FP tests (require Cortex-M4)
"inlineasm/asmfpaddsub.py",
"inlineasm/asmfpcmp.py",
"inlineasm/asmfpldrstr.py",
"inlineasm/asmfpmuldiv.py",
"inlineasm/asmfpsqrt.py",
# different filename in output
"micropython/emg_exc.py",
"micropython/heapalloc_traceback.py",
# don't have emergency exception buffer
"micropython/heapalloc_exc_compressed_emg_exc.py",
# pattern matching in .exp
"micropython/meminfo.py",
# needs sys stdfiles
"misc/print_exception.py",
# settrace .exp files are too large
"misc/sys_settrace_loop.py",
"misc/sys_settrace_generator.py",
"misc/sys_settrace_features.py",
# don't have f-string
"basics/string_fstring.py",
"basics/string_fstring_debug.py",
)
output = []
tests = []
argparser = argparse.ArgumentParser(
description="Convert native MicroPython tests to tinytest/upytesthelper C code"
)
argparser.add_argument("--stdin", action="store_true", help="read list of tests from stdin")
argparser.add_argument("--exclude", action="append", help="exclude test by name")
args = argparser.parse_args()
if not args.stdin:
if args.exclude:
exclude_tests += tuple(args.exclude)
for group in test_dirs:
tests += [test for test in glob("{}/*.py".format(group)) if test not in exclude_tests]
else:
for l in sys.stdin:
tests.append(l.rstrip())
output.extend([test_function.format(**script_to_map(test)) for test in tests])
testcase_members = [testcase_member.format(**chew_filename(test)) for test in tests]
output.append(testcase_struct.format(name="", body="\n".join(testcase_members)))
testgroup_members = [testgroup_member.format(name=group) for group in [""]]
output.append(testgroup_struct.format(body="\n".join(testgroup_members)))
## XXX: may be we could have `--output <filename>` argument...
# Don't depend on what system locale is set, use utf8 encoding.
sys.stdout.buffer.write("\n\n".join(output).encode("utf8"))

View File

@@ -0,0 +1,438 @@
#!/usr/bin/env python3
# Microsoft UF2
#
# The MIT License (MIT)
#
# Copyright (c) Microsoft Corporation
#
# All rights reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import sys
import struct
import subprocess
import re
import os
import os.path
import argparse
import json
UF2_MAGIC_START0 = 0x0A324655 # "UF2\n"
UF2_MAGIC_START1 = 0x9E5D5157 # Randomly selected
UF2_MAGIC_END = 0x0AB16F30 # Ditto
INFO_FILE = "/INFO_UF2.TXT"
appstartaddr = 0x2000
familyid = 0x0
def is_uf2(buf):
w = struct.unpack("<II", buf[0:8])
return w[0] == UF2_MAGIC_START0 and w[1] == UF2_MAGIC_START1
def is_hex(buf):
try:
w = buf[0:30].decode("utf-8")
except UnicodeDecodeError:
return False
if w[0] == ":" and re.match(b"^[:0-9a-fA-F\r\n]+$", buf):
return True
return False
def convert_from_uf2(buf):
global appstartaddr
global familyid
numblocks = len(buf) // 512
curraddr = None
currfamilyid = None
families_found = {}
prev_flag = None
all_flags_same = True
outp = []
for blockno in range(numblocks):
ptr = blockno * 512
block = buf[ptr : ptr + 512]
hd = struct.unpack(b"<IIIIIIII", block[0:32])
if hd[0] != UF2_MAGIC_START0 or hd[1] != UF2_MAGIC_START1:
print("Skipping block at " + ptr + "; bad magic")
continue
if hd[2] & 1:
# NO-flash flag set; skip block
continue
datalen = hd[4]
if datalen > 476:
assert False, "Invalid UF2 data size at " + ptr
newaddr = hd[3]
if (hd[2] & 0x2000) and (currfamilyid == None):
currfamilyid = hd[7]
if curraddr == None or ((hd[2] & 0x2000) and hd[7] != currfamilyid):
currfamilyid = hd[7]
curraddr = newaddr
if familyid == 0x0 or familyid == hd[7]:
appstartaddr = newaddr
padding = newaddr - curraddr
if padding < 0:
assert False, "Block out of order at " + ptr
if padding > 10 * 1024 * 1024:
assert False, "More than 10M of padding needed at " + ptr
if padding % 4 != 0:
assert False, "Non-word padding size at " + ptr
while padding > 0:
padding -= 4
outp += b"\x00\x00\x00\x00"
if familyid == 0x0 or ((hd[2] & 0x2000) and familyid == hd[7]):
outp.append(block[32 : 32 + datalen])
curraddr = newaddr + datalen
if hd[2] & 0x2000:
if hd[7] in families_found.keys():
if families_found[hd[7]] > newaddr:
families_found[hd[7]] = newaddr
else:
families_found[hd[7]] = newaddr
if prev_flag == None:
prev_flag = hd[2]
if prev_flag != hd[2]:
all_flags_same = False
if blockno == (numblocks - 1):
print("--- UF2 File Header Info ---")
families = load_families()
for family_hex in families_found.keys():
family_short_name = ""
for name, value in families.items():
if value == family_hex:
family_short_name = name
print(
"Family ID is {:s}, hex value is 0x{:08x}".format(
family_short_name, family_hex
)
)
print("Target Address is 0x{:08x}".format(families_found[family_hex]))
if all_flags_same:
print("All block flag values consistent, 0x{:04x}".format(hd[2]))
else:
print("Flags were not all the same")
print("----------------------------")
if len(families_found) > 1 and familyid == 0x0:
outp = []
appstartaddr = 0x0
return b"".join(outp)
def convert_to_carray(file_content):
outp = "const unsigned long bindata_len = %d;\n" % len(file_content)
outp += "const unsigned char bindata[] __attribute__((aligned(16))) = {"
for i in range(len(file_content)):
if i % 16 == 0:
outp += "\n"
outp += "0x%02x, " % file_content[i]
outp += "\n};\n"
return bytes(outp, "utf-8")
def convert_to_uf2(file_content):
global familyid
datapadding = b""
while len(datapadding) < 512 - 256 - 32 - 4:
datapadding += b"\x00\x00\x00\x00"
numblocks = (len(file_content) + 255) // 256
outp = []
for blockno in range(numblocks):
ptr = 256 * blockno
chunk = file_content[ptr : ptr + 256]
flags = 0x0
if familyid:
flags |= 0x2000
hd = struct.pack(
b"<IIIIIIII",
UF2_MAGIC_START0,
UF2_MAGIC_START1,
flags,
ptr + appstartaddr,
256,
blockno,
numblocks,
familyid,
)
while len(chunk) < 256:
chunk += b"\x00"
block = hd + chunk + datapadding + struct.pack(b"<I", UF2_MAGIC_END)
assert len(block) == 512
outp.append(block)
return b"".join(outp)
class Block:
def __init__(self, addr):
self.addr = addr
self.bytes = bytearray(256)
def encode(self, blockno, numblocks):
global familyid
flags = 0x0
if familyid:
flags |= 0x2000
hd = struct.pack(
"<IIIIIIII",
UF2_MAGIC_START0,
UF2_MAGIC_START1,
flags,
self.addr,
256,
blockno,
numblocks,
familyid,
)
hd += self.bytes[0:256]
while len(hd) < 512 - 4:
hd += b"\x00"
hd += struct.pack("<I", UF2_MAGIC_END)
return hd
def convert_from_hex_to_uf2(buf):
global appstartaddr
appstartaddr = None
upper = 0
currblock = None
blocks = []
for line in buf.split("\n"):
if line[0] != ":":
continue
i = 1
rec = []
while i < len(line) - 1:
rec.append(int(line[i : i + 2], 16))
i += 2
tp = rec[3]
if tp == 4:
upper = ((rec[4] << 8) | rec[5]) << 16
elif tp == 2:
upper = ((rec[4] << 8) | rec[5]) << 4
elif tp == 1:
break
elif tp == 0:
addr = upper + ((rec[1] << 8) | rec[2])
if appstartaddr == None:
appstartaddr = addr
i = 4
while i < len(rec) - 1:
if not currblock or currblock.addr & ~0xFF != addr & ~0xFF:
currblock = Block(addr & ~0xFF)
blocks.append(currblock)
currblock.bytes[addr & 0xFF] = rec[i]
addr += 1
i += 1
numblocks = len(blocks)
resfile = b""
for i in range(0, numblocks):
resfile += blocks[i].encode(i, numblocks)
return resfile
def to_str(b):
return b.decode("utf-8")
def get_drives():
drives = []
if sys.platform == "win32":
r = subprocess.check_output(
[
"wmic",
"PATH",
"Win32_LogicalDisk",
"get",
"DeviceID,",
"VolumeName,",
"FileSystem,",
"DriveType",
]
)
for line in to_str(r).split("\n"):
words = re.split("\s+", line)
if len(words) >= 3 and words[1] == "2" and words[2] == "FAT":
drives.append(words[0])
else:
rootpath = "/media"
if sys.platform == "darwin":
rootpath = "/Volumes"
elif sys.platform == "linux":
tmp = rootpath + "/" + os.environ["USER"]
if os.path.isdir(tmp):
rootpath = tmp
for d in os.listdir(rootpath):
drives.append(os.path.join(rootpath, d))
def has_info(d):
try:
return os.path.isfile(d + INFO_FILE)
except:
return False
return list(filter(has_info, drives))
def board_id(path):
with open(path + INFO_FILE, mode="r") as file:
file_content = file.read()
return re.search("Board-ID: ([^\r\n]*)", file_content).group(1)
def list_drives():
for d in get_drives():
print(d, board_id(d))
def write_file(name, buf):
with open(name, "wb") as f:
f.write(buf)
print("Wrote %d bytes to %s" % (len(buf), name))
def load_families():
# The expectation is that the `uf2families.json` file is in the same
# directory as this script. Make a path that works using `__file__`
# which contains the full path to this script.
filename = "uf2families.json"
pathname = os.path.join(os.path.dirname(os.path.abspath(__file__)), filename)
with open(pathname) as f:
raw_families = json.load(f)
families = {}
for family in raw_families:
families[family["short_name"]] = int(family["id"], 0)
return families
def main():
global appstartaddr, familyid
def error(msg):
print(msg)
sys.exit(1)
parser = argparse.ArgumentParser(description="Convert to UF2 or flash directly.")
parser.add_argument(
"input", metavar="INPUT", type=str, nargs="?", help="input file (HEX, BIN or UF2)"
)
parser.add_argument(
"-b",
"--base",
dest="base",
type=str,
default="0x2000",
help="set base address of application for BIN format (default: 0x2000)",
)
parser.add_argument(
"-o",
"--output",
metavar="FILE",
dest="output",
type=str,
help='write output to named file; defaults to "flash.uf2" or "flash.bin" where sensible',
)
parser.add_argument("-d", "--device", dest="device_path", help="select a device path to flash")
parser.add_argument("-l", "--list", action="store_true", help="list connected devices")
parser.add_argument("-c", "--convert", action="store_true", help="do not flash, just convert")
parser.add_argument("-D", "--deploy", action="store_true", help="just flash, do not convert")
parser.add_argument(
"-f",
"--family",
dest="family",
type=str,
default="0x0",
help="specify familyID - number or name (default: 0x0)",
)
parser.add_argument(
"-C", "--carray", action="store_true", help="convert binary file to a C array, not UF2"
)
parser.add_argument(
"-i",
"--info",
action="store_true",
help="display header information from UF2, do not convert",
)
args = parser.parse_args()
appstartaddr = int(args.base, 0)
families = load_families()
if args.family.upper() in families:
familyid = families[args.family.upper()]
else:
try:
familyid = int(args.family, 0)
except ValueError:
error("Family ID needs to be a number or one of: " + ", ".join(families.keys()))
if args.list:
list_drives()
else:
if not args.input:
error("Need input file")
with open(args.input, mode="rb") as f:
inpbuf = f.read()
from_uf2 = is_uf2(inpbuf)
ext = "uf2"
if args.deploy:
outbuf = inpbuf
elif from_uf2 and not args.info:
outbuf = convert_from_uf2(inpbuf)
ext = "bin"
elif from_uf2 and args.info:
outbuf = ""
convert_from_uf2(inpbuf)
elif is_hex(inpbuf):
outbuf = convert_from_hex_to_uf2(inpbuf.decode("utf-8"))
elif args.carray:
outbuf = convert_to_carray(inpbuf)
ext = "h"
else:
outbuf = convert_to_uf2(inpbuf)
if not args.deploy and not args.info:
print(
"Converted to %s, output size: %d, start address: 0x%x"
% (ext, len(outbuf), appstartaddr)
)
if args.convert or ext != "uf2":
drives = []
if args.output == None:
args.output = "flash." + ext
else:
drives = get_drives()
if args.output:
write_file(args.output, outbuf)
else:
if len(drives) == 0:
error("No drive to deploy.")
for d in drives:
print("Flashing %s (%s)" % (d, board_id(d)))
write_file(d + "/NEW.UF2", outbuf)
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,192 @@
[
{
"id": "0x16573617",
"short_name": "ATMEGA32",
"description": "Microchip (Atmel) ATmega32"
},
{
"id": "0x1851780a",
"short_name": "SAML21",
"description": "Microchip (Atmel) SAML21"
},
{
"id": "0x1b57745f",
"short_name": "NRF52",
"description": "Nordic NRF52"
},
{
"id": "0x1c5f21b0",
"short_name": "ESP32",
"description": "ESP32"
},
{
"id": "0x1e1f432d",
"short_name": "STM32L1",
"description": "ST STM32L1xx"
},
{
"id": "0x202e3a91",
"short_name": "STM32L0",
"description": "ST STM32L0xx"
},
{
"id": "0x21460ff0",
"short_name": "STM32WL",
"description": "ST STM32WLxx"
},
{
"id": "0x2abc77ec",
"short_name": "LPC55",
"description": "NXP LPC55xx"
},
{
"id": "0x300f5633",
"short_name": "STM32G0",
"description": "ST STM32G0xx"
},
{
"id": "0x31d228c6",
"short_name": "GD32F350",
"description": "GD32F350"
},
{
"id": "0x04240bdf",
"short_name": "STM32L5",
"description": "ST STM32L5xx"
},
{
"id": "0x4c71240a",
"short_name": "STM32G4",
"description": "ST STM32G4xx"
},
{
"id": "0x4fb2d5bd",
"short_name": "MIMXRT10XX",
"description": "NXP i.MX RT10XX"
},
{
"id": "0x53b80f00",
"short_name": "STM32F7",
"description": "ST STM32F7xx"
},
{
"id": "0x55114460",
"short_name": "SAMD51",
"description": "Microchip (Atmel) SAMD51"
},
{
"id": "0x57755a57",
"short_name": "STM32F4",
"description": "ST STM32F401"
},
{
"id": "0x5a18069b",
"short_name": "FX2",
"description": "Cypress FX2"
},
{
"id": "0x5d1a0a2e",
"short_name": "STM32F2",
"description": "ST STM32F2xx"
},
{
"id": "0x5ee21072",
"short_name": "STM32F1",
"description": "ST STM32F103"
},
{
"id": "0x621e937a",
"short_name": "NRF52833",
"description": "Nordic NRF52833"
},
{
"id": "0x647824b6",
"short_name": "STM32F0",
"description": "ST STM32F0xx"
},
{
"id": "0x68ed2b88",
"short_name": "SAMD21",
"description": "Microchip (Atmel) SAMD21"
},
{
"id": "0x6b846188",
"short_name": "STM32F3",
"description": "ST STM32F3xx"
},
{
"id": "0x6d0922fa",
"short_name": "STM32F407",
"description": "ST STM32F407"
},
{
"id": "0x6db66082",
"short_name": "STM32H7",
"description": "ST STM32H7xx"
},
{
"id": "0x70d16653",
"short_name": "STM32WB",
"description": "ST STM32WBxx"
},
{
"id": "0x7eab61ed",
"short_name": "ESP8266",
"description": "ESP8266"
},
{
"id": "0x7f83e793",
"short_name": "KL32L2",
"description": "NXP KL32L2x"
},
{
"id": "0x8fb060fe",
"short_name": "STM32F407VG",
"description": "ST STM32F407VG"
},
{
"id": "0xada52840",
"short_name": "NRF52840",
"description": "Nordic NRF52840"
},
{
"id": "0xbfdd4eee",
"short_name": "ESP32S2",
"description": "ESP32-S2"
},
{
"id": "0xc47e5767",
"short_name": "ESP32S3",
"description": "ESP32-S3"
},
{
"id": "0xd42ba06c",
"short_name": "ESP32C3",
"description": "ESP32-C3"
},
{
"id": "0x2b88d29c",
"short_name": "ESP32C2",
"description": "ESP32-C2"
},
{
"id": "0x332726f6",
"short_name": "ESP32H2",
"description": "ESP32-H2"
},
{
"id": "0xe48bff56",
"short_name": "RP2040",
"description": "Raspberry Pi RP2040"
},
{
"id": "0x00ff6919",
"short_name": "STM32L4",
"description": "ST STM32L4xx"
},
{
"id": "0x9af03e33",
"short_name": "GD32VF103",
"description": "GigaDevice GD32VF103"
}
]

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,351 @@
#
# upip - Package manager for MicroPython
#
# Copyright (c) 2015-2018 Paul Sokolovsky
#
# Licensed under the MIT license.
#
import sys
import gc
import uos as os
import uerrno as errno
import ujson as json
import uzlib
import upip_utarfile as tarfile
gc.collect()
debug = False
index_urls = ["https://micropython.org/pi", "https://pypi.org/pypi"]
install_path = None
cleanup_files = []
gzdict_sz = 16 + 15
file_buf = bytearray(512)
class NotFoundError(Exception):
pass
def op_split(path):
if path == "":
return ("", "")
r = path.rsplit("/", 1)
if len(r) == 1:
return ("", path)
head = r[0]
if not head:
head = "/"
return (head, r[1])
# Expects *file* name
def _makedirs(name, mode=0o777):
ret = False
s = ""
comps = name.rstrip("/").split("/")[:-1]
if comps[0] == "":
s = "/"
for c in comps:
if s and s[-1] != "/":
s += "/"
s += c
try:
os.mkdir(s)
ret = True
except OSError as e:
if e.errno != errno.EEXIST and e.errno != errno.EISDIR:
raise e
ret = False
return ret
def save_file(fname, subf):
global file_buf
with open(fname, "wb") as outf:
while True:
sz = subf.readinto(file_buf)
if not sz:
break
outf.write(file_buf, sz)
def install_tar(f, prefix):
meta = {}
for info in f:
# print(info)
fname = info.name
try:
fname = fname[fname.index("/") + 1 :]
except ValueError:
fname = ""
save = True
for p in ("setup.", "PKG-INFO", "README"):
# print(fname, p)
if fname.startswith(p) or ".egg-info" in fname:
if fname.endswith("/requires.txt"):
meta["deps"] = f.extractfile(info).read()
save = False
if debug:
print("Skipping", fname)
break
if save:
outfname = prefix + fname
if info.type != tarfile.DIRTYPE:
if debug:
print("Extracting " + outfname)
_makedirs(outfname)
subf = f.extractfile(info)
save_file(outfname, subf)
return meta
def expandhome(s):
if "~/" in s:
h = os.getenv("HOME")
s = s.replace("~/", h + "/")
return s
import ussl
import usocket
warn_ussl = True
def url_open(url):
global warn_ussl
if debug:
print(url)
proto, _, host, urlpath = url.split("/", 3)
try:
port = 443
if ":" in host:
host, port = host.split(":")
port = int(port)
ai = usocket.getaddrinfo(host, port, 0, usocket.SOCK_STREAM)
except OSError as e:
fatal("Unable to resolve %s (no Internet?)" % host, e)
# print("Address infos:", ai)
ai = ai[0]
s = usocket.socket(ai[0], ai[1], ai[2])
try:
# print("Connect address:", addr)
s.connect(ai[-1])
if proto == "https:":
s = ussl.wrap_socket(s, server_hostname=host)
if warn_ussl:
print("Warning: %s SSL certificate is not validated" % host)
warn_ussl = False
# MicroPython rawsocket module supports file interface directly
s.write("GET /%s HTTP/1.0\r\nHost: %s:%s\r\n\r\n" % (urlpath, host, port))
l = s.readline()
protover, status, msg = l.split(None, 2)
if status != b"200":
if status == b"404" or status == b"301":
raise NotFoundError("Package not found")
raise ValueError(status)
while 1:
l = s.readline()
if not l:
raise ValueError("Unexpected EOF in HTTP headers")
if l == b"\r\n":
break
except Exception as e:
s.close()
raise e
return s
def get_pkg_metadata(name):
for url in index_urls:
try:
f = url_open("%s/%s/json" % (url, name))
except NotFoundError:
continue
try:
return json.load(f)
finally:
f.close()
raise NotFoundError("Package not found")
def fatal(msg, exc=None):
print("Error:", msg)
if exc and debug:
raise exc
sys.exit(1)
def install_pkg(pkg_spec, install_path):
package = pkg_spec.split("==")
data = get_pkg_metadata(package[0])
if len(package) == 1:
latest_ver = data["info"]["version"]
else:
latest_ver = package[1]
packages = data["releases"][latest_ver]
del data
gc.collect()
assert len(packages) == 1
package_url = packages[0]["url"]
print("Installing %s %s from %s" % (pkg_spec, latest_ver, package_url))
f1 = url_open(package_url)
try:
f2 = uzlib.DecompIO(f1, gzdict_sz)
f3 = tarfile.TarFile(fileobj=f2)
meta = install_tar(f3, install_path)
finally:
f1.close()
del f3
del f2
gc.collect()
return meta
def install(to_install, install_path=None):
# Calculate gzip dictionary size to use
global gzdict_sz
sz = gc.mem_free() + gc.mem_alloc()
if sz <= 65536:
gzdict_sz = 16 + 12
if install_path is None:
install_path = get_install_path()
if install_path[-1] != "/":
install_path += "/"
if not isinstance(to_install, list):
to_install = [to_install]
print("Installing to: " + install_path)
# sets would be perfect here, but don't depend on them
installed = []
try:
while to_install:
if debug:
print("Queue:", to_install)
pkg_spec = to_install.pop(0)
if pkg_spec in installed:
continue
meta = install_pkg(pkg_spec, install_path)
installed.append(pkg_spec)
if debug:
print(meta)
deps = meta.get("deps", "").rstrip()
if deps:
deps = deps.decode("utf-8").split("\n")
to_install.extend(deps)
except Exception as e:
print(
"Error installing '{}': {}, packages may be partially installed".format(pkg_spec, e),
file=sys.stderr,
)
def get_install_path():
global install_path
if install_path is None:
# sys.path[0] is current module's path
install_path = sys.path[1]
if install_path == ".frozen":
install_path = sys.path[2]
install_path = expandhome(install_path)
return install_path
def cleanup():
for fname in cleanup_files:
try:
os.unlink(fname)
except OSError:
print("Warning: Cannot delete " + fname)
def help():
print(
"""\
upip - Simple PyPI package manager for MicroPython
Usage: micropython -m upip install [-p <path>] <package>... | -r <requirements.txt>
import upip; upip.install(package_or_list, [<path>])
If <path> isn't given, packages will be installed to sys.path[1], or
sys.path[2] if the former is .frozen (path can be set from MICROPYPATH
environment variable if supported)."""
)
print("Default install path:", get_install_path())
print(
"""\
Note: only MicroPython packages (usually, named micropython-*) are supported
for installation, upip does not support arbitrary code in setup.py.
"""
)
def main():
global debug
global index_urls
global install_path
install_path = None
if len(sys.argv) < 2 or sys.argv[1] == "-h" or sys.argv[1] == "--help":
help()
return
if sys.argv[1] != "install":
fatal("Only 'install' command supported")
to_install = []
i = 2
while i < len(sys.argv) and sys.argv[i][0] == "-":
opt = sys.argv[i]
i += 1
if opt == "-h" or opt == "--help":
help()
return
elif opt == "-p":
install_path = sys.argv[i]
i += 1
elif opt == "-r":
list_file = sys.argv[i]
i += 1
with open(list_file) as f:
while True:
l = f.readline()
if not l:
break
if l[0] == "#":
continue
to_install.append(l.rstrip())
elif opt == "-i":
index_urls = [sys.argv[i]]
i += 1
elif opt == "--debug":
debug = True
else:
fatal("Unknown/unsupported option: " + opt)
to_install.extend(sys.argv[i:])
if not to_install:
help()
return
install(to_install)
if not debug:
cleanup()
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,95 @@
import uctypes
# http://www.gnu.org/software/tar/manual/html_node/Standard.html
TAR_HEADER = {
"name": (uctypes.ARRAY | 0, uctypes.UINT8 | 100),
"size": (uctypes.ARRAY | 124, uctypes.UINT8 | 11),
}
DIRTYPE = "dir"
REGTYPE = "file"
def roundup(val, align):
return (val + align - 1) & ~(align - 1)
class FileSection:
def __init__(self, f, content_len, aligned_len):
self.f = f
self.content_len = content_len
self.align = aligned_len - content_len
def read(self, sz=65536):
if self.content_len == 0:
return b""
if sz > self.content_len:
sz = self.content_len
data = self.f.read(sz)
sz = len(data)
self.content_len -= sz
return data
def readinto(self, buf):
if self.content_len == 0:
return 0
if len(buf) > self.content_len:
buf = memoryview(buf)[: self.content_len]
sz = self.f.readinto(buf)
self.content_len -= sz
return sz
def skip(self):
sz = self.content_len + self.align
if sz:
buf = bytearray(16)
while sz:
s = min(sz, 16)
self.f.readinto(buf, s)
sz -= s
class TarInfo:
def __str__(self):
return "TarInfo(%r, %s, %d)" % (self.name, self.type, self.size)
class TarFile:
def __init__(self, name=None, fileobj=None):
if fileobj:
self.f = fileobj
else:
self.f = open(name, "rb")
self.subf = None
def next(self):
if self.subf:
self.subf.skip()
buf = self.f.read(512)
if not buf:
return None
h = uctypes.struct(uctypes.addressof(buf), TAR_HEADER, uctypes.LITTLE_ENDIAN)
# Empty block means end of archive
if h.name[0] == 0:
return None
d = TarInfo()
d.name = str(h.name, "utf-8").rstrip("\0")
d.size = int(bytes(h.size), 8)
d.type = [REGTYPE, DIRTYPE][d.name[-1] == "/"]
self.subf = d.subf = FileSection(self.f, d.size, roundup(d.size, 512))
return d
def __iter__(self):
return self
def __next__(self):
v = self.next()
if v is None:
raise StopIteration
return v
def extractfile(self, tarinfo):
return tarinfo.subf

View File

@@ -0,0 +1,124 @@
#!/usr/bin/env python3
import re
import subprocess
import sys
verbosity = 0 # Show what's going on, 0 1 or 2.
suggestions = 1 # Set to 0 to not include lengthy suggestions in error messages.
def verbose(*args):
if verbosity:
print(*args)
def very_verbose(*args):
if verbosity > 1:
print(*args)
def git_log(pretty_format, *args):
# Delete pretty argument from user args so it doesn't interfere with what we do.
args = ["git", "log"] + [arg for arg in args if "--pretty" not in args]
args.append("--pretty=format:" + pretty_format)
very_verbose("git_log", *args)
# Generator yielding each output line.
for line in subprocess.Popen(args, stdout=subprocess.PIPE).stdout:
yield line.decode().rstrip("\r\n")
def verify(sha):
verbose("verify", sha)
errors = []
warnings = []
def error_text(err):
return "commit " + sha + ": " + err
def error(err):
errors.append(error_text(err))
def warning(err):
warnings.append(error_text(err))
# Author and committer email.
for line in git_log("%ae%n%ce", sha, "-n1"):
very_verbose("email", line)
if "noreply" in line:
error("Unwanted email address: " + line)
# Message body.
raw_body = list(git_log("%B", sha, "-n1"))
if not raw_body:
error("Message is empty")
return errors, warnings
# Subject line.
subject_line = raw_body[0]
very_verbose("subject_line", subject_line)
subject_line_format = r"^[^!]+: [A-Z]+.+ .+\.$"
if not re.match(subject_line_format, subject_line):
error("Subject line should match " + repr(subject_line_format) + ": " + subject_line)
if len(subject_line) >= 73:
error("Subject line should be 72 or less characters: " + subject_line)
# Second one divides subject and body.
if len(raw_body) > 1 and raw_body[1]:
error("Second message line should be empty: " + raw_body[1])
# Message body lines.
for line in raw_body[2:]:
# Long lines with URLs are exempt from the line length rule.
if len(line) >= 76 and "://" not in line:
error("Message lines should be 75 or less characters: " + line)
if not raw_body[-1].startswith("Signed-off-by: ") or "@" not in raw_body[-1]:
warning("Message should be signed-off")
return errors, warnings
def run(args):
verbose("run", *args)
has_errors = False
has_warnings = False
for sha in git_log("%h", *args):
errors, warnings = verify(sha)
has_errors |= any(errors)
has_warnings |= any(warnings)
for err in errors:
print("error:", err)
for err in warnings:
print("warning:", err)
if has_errors or has_warnings:
if suggestions:
print("See https://github.com/micropython/micropython/blob/master/CODECONVENTIONS.md")
else:
print("ok")
if has_errors:
sys.exit(1)
def show_help():
print("usage: verifygitlog.py [-v -n -h] ...")
print("-v : increase verbosity, can be speficied multiple times")
print("-n : do not print multi-line suggestions")
print("-h : print this help message and exit")
print("... : arguments passed to git log to retrieve commits to verify")
print(" see https://www.git-scm.com/docs/git-log")
print(" passing no arguments at all will verify all commits")
print("examples:")
print("verifygitlog.py -n10 # Check last 10 commits")
print("verifygitlog.py -v master..HEAD # Check commits since master")
if __name__ == "__main__":
args = sys.argv[1:]
verbosity = args.count("-v")
suggestions = args.count("-n") == 0
if "-h" in args:
show_help()
else:
args = [arg for arg in args if arg not in ["-v", "-n", "-h"]]
run(args)