Merge pull request #291 from pimoroni/experiment/multi-board-ci

Build MicroPython for multiple boards
pull/292/head
Philip Howard 2022-03-11 12:51:50 +00:00 zatwierdzone przez GitHub
commit faa83986af
Nie znaleziono w bazie danych klucza dla tego podpisu
ID klucza GPG: 4AEE18F83AFDEB23
9 zmienionych plików z 354 dodań i 211 usunięć

Wyświetl plik

@ -18,7 +18,7 @@ jobs:
name: Linux
cache-key: linux
cmake-args: '-DPICO_SDK_PATH=$GITHUB_WORKSPACE/pico-sdk -DPICO_SDK_POST_LIST_DIRS=$GITHUB_WORKSPACE/pico-extras'
apt-packages: clang-tidy gcc-arm-none-eabi libnewlib-arm-none-eabi libstdc++-arm-none-eabi-newlib
apt-packages: ccache
runs-on: ${{matrix.os}}
@ -26,6 +26,15 @@ jobs:
PICO_SDK_PATH: $GITHUB_WORKSPACE/pico-sdk
steps:
- name: Compiler Cache
uses: actions/cache@v2
with:
path: /home/runner/.ccache
key: ccache-cmake-${{github.ref}}-${{github.sha}}
restore-keys: |
ccache-cmake-${{github.ref}}
ccache-cmake
- uses: actions/checkout@v2
with:
submodules: true
@ -44,7 +53,7 @@ jobs:
with:
repository: raspberrypi/pico-extras
path: pico-extras
submodules: false # lwip breaks audio submodule fetchin
submodules: false # lwip breaks audio submodule fetching
# Linux deps
- name: Install deps
@ -52,16 +61,27 @@ jobs:
run: |
sudo apt update && sudo apt install ${{matrix.apt-packages}}
- name: Install ARM Toolchain
if: runner.os == 'Linux'
working-directory: ${{runner.workspace}}
run: |
wget -q https://developer.arm.com/-/media/Files/downloads/gnu-rm/10.3-2021.10/gcc-arm-none-eabi-10.3-2021.10-x86_64-linux.tar.bz2
tar xf gcc-*.tar.bz2
cd gcc*/bin
pwd >> $GITHUB_PATH
- name: Create Build Environment
run: cmake -E make_directory ${{runner.workspace}}/build
- name: Configure CMake
shell: bash
working-directory: ${{runner.workspace}}/build
run: cmake $GITHUB_WORKSPACE -DCMAKE_BUILD_TYPE=$BUILD_TYPE ${{matrix.cmake-args}}
run: cmake $GITHUB_WORKSPACE -DCMAKE_BUILD_TYPE=$BUILD_TYPE -DCMAKE_C_COMPILER_LAUNCHER=ccache -DCMAKE_CXX_COMPILER_LAUNCHER=ccache ${{matrix.cmake-args}}
- name: Build
working-directory: ${{runner.workspace}}/build
shell: bash
run: |
ccache --zero-stats || true
cmake --build . --config $BUILD_TYPE -j 2
ccache --show-stats || true

Wyświetl plik

@ -7,28 +7,28 @@ on:
types: [created]
env:
# Customize the CMake build type here (Release, Debug, RelWithDebInfo, etc.)
MICROPYTHON_VERSION: v1.18
BUILD_TYPE: Release
BOARD_TYPE: PICO
BOARD_TYPE: PICO
# MicroPython version will be contained in github.event.release.tag_name for releases
RELEASE_FILE: pimoroni-badger2040-${{github.event.release.tag_name || github.sha}}-micropython.uf2
jobs:
build:
name: ${{matrix.name}}
strategy:
matrix:
include:
- os: ubuntu-20.04
name: Linux
cache-key: linux
cmake-args: '-DPICO_SDK_PATH=$GITHUB_WORKSPACE/pico-sdk'
apt-packages: clang-tidy gcc-arm-none-eabi libnewlib-arm-none-eabi libstdc++-arm-none-eabi-newlib
runs-on: ${{matrix.os}}
deps:
runs-on: ubuntu-20.04
name: Dependencies
steps:
- name: Workspace Cache
id: cache
uses: actions/cache@v2
with:
path: ${{runner.workspace}}
key: workspace-micropython-${{env.MICROPYTHON_VERSION}}
restore-keys: |
workspace-micropython-${{env.MICROPYTHON_VERSION}}
# Check out MicroPython
- name: Checkout MicroPython
if: steps.cache.outputs.cache-hit != 'true'
uses: actions/checkout@v2
with:
repository: micropython/micropython
@ -36,6 +36,47 @@ jobs:
submodules: false # MicroPython submodules are hideously broken
path: micropython
- name: Fetch base MicroPython submodules
if: steps.cache.outputs.cache-hit != 'true'
shell: bash
working-directory: micropython
run: git submodule update --init
- name: Fetch Pico SDK submodules
if: steps.cache.outputs.cache-hit != 'true'
shell: bash
working-directory: micropython/lib/pico-sdk
run: git submodule update --init
- name: Build mpy-cross
if: steps.cache.outputs.cache-hit != 'true'
shell: bash
working-directory: micropython/mpy-cross
run: make
build:
needs: deps
name: Build Badger 2040
runs-on: ubuntu-20.04
steps:
- name: Compiler Cache
uses: actions/cache@v2
with:
path: /home/runner/.ccache
key: ccache-micropython-badger2040-${{github.ref}}-${{github.sha}}
restore-keys: |
ccache-micropython-badger2040-${{github.ref}}
ccache-micropython-badger2040-
- name: Workspace Cache
uses: actions/cache@v2
with:
path: ${{runner.workspace}}
key: workspace-micropython-${{env.MICROPYTHON_VERSION}}
restore-keys: |
workspace-micropython-${{env.MICROPYTHON_VERSION}}
- uses: actions/checkout@v2
with:
submodules: true
@ -43,48 +84,51 @@ jobs:
# Copy Python module files
- name: HACK - Copy board config fixup
if: env.BOARD_TYPE == 'PICO'
run: |
cp pimoroni-pico-${GITHUB_SHA}/micropython/badger2040-mpconfigboard.h micropython/ports/rp2/boards/PICO/mpconfigboard.h
# Linux deps
- name: Install deps
- name: Install Compiler & CCache
if: runner.os == 'Linux'
run: |
sudo apt update && sudo apt install ${{matrix.apt-packages}}
sudo apt update && sudo apt install ccache
python3 -m pip install pillow
- name: Fetch base MicroPython submodules
shell: bash
working-directory: micropython
run: git submodule update --init
- name: Install ARM Toolchain
if: runner.os == 'Linux'
working-directory: ${{runner.workspace}}
run: |
wget -q https://developer.arm.com/-/media/Files/downloads/gnu-rm/10.3-2021.10/gcc-arm-none-eabi-10.3-2021.10-x86_64-linux.tar.bz2
tar xf gcc-*.tar.bz2
cd gcc*/bin
pwd >> $GITHUB_PATH
- name: Fetch Pico SDK submodules
- name: Configure MicroPython
shell: bash
working-directory: micropython/lib/pico-sdk
run: git submodule update --init
- name: Build mpy-cross
shell: bash
working-directory: micropython/mpy-cross
run: make
working-directory: micropython/ports/rp2
run: |
cmake -S . -B build-${{env.BOARD_TYPE}} -DPICO_BUILD_DOCS=0 -DUSER_C_MODULES=../../../pimoroni-pico-${GITHUB_SHA}/micropython/modules/badger2040-micropython.cmake -DMICROPY_BOARD=${{env.BOARD_TYPE}} -DCMAKE_C_COMPILER_LAUNCHER=ccache -DCMAKE_CXX_COMPILER_LAUNCHER=ccache
- name: Build MicroPython
shell: bash
working-directory: micropython/ports/rp2
run: make USER_C_MODULES=../../../pimoroni-pico-${GITHUB_SHA}/micropython/modules/badger2040-micropython.cmake -j2
run: |
ccache --zero-stats || true
cmake --build build-${{env.BOARD_TYPE}} -j 2
ccache --show-stats || true
- name: Rename .uf2 for artifact
shell: bash
working-directory: micropython/ports/rp2/build-${{env.BOARD_TYPE}}
run: |
cp firmware.uf2 ${{github.event.repository.name}}-${{github.sha}}-badger2040-micropython-${{env.MICROPYTHON_VERSION}}.uf2
cp firmware.uf2 ${{github.event.repository.name}}-${{github.event.release.tag_name}}-badger2040-micropython-${{env.MICROPYTHON_VERSION}}.uf2
cp firmware.uf2 ${{env.RELEASE_FILE}}
- name: Store .uf2 as artifact
uses: actions/upload-artifact@v2
with:
name: ${{github.event.repository.name}}-${{github.sha}}-badger2040-micropython-${{env.MICROPYTHON_VERSION}}.uf2
path: micropython/ports/rp2/build-${{env.BOARD_TYPE}}/${{github.event.repository.name}}-${{github.sha}}-badger2040-micropython-${{env.MICROPYTHON_VERSION}}.uf2
name: ${{env.RELEASE_FILE}}
path: micropython/ports/rp2/build-${{env.BOARD_TYPE}}/${{env.RELEASE_FILE}}
- name: Upload .uf2
if: github.event_name == 'release'
@ -94,5 +138,5 @@ jobs:
with:
asset_path: micropython/ports/rp2/build-${{env.BOARD_TYPE}}/firmware.uf2
upload_url: ${{github.event.release.upload_url}}
asset_name: ${{github.event.repository.name}}-${{github.event.release.tag_name}}-badger2040-micropython-${{env.MICROPYTHON_VERSION}}.uf2
asset_name: ${{env.RELEASE_FILE}}
asset_content_type: application/octet-stream

Wyświetl plik

@ -24,7 +24,7 @@ jobs:
name: Linux
cache-key: linux
cmake-args: '-DPICO_SDK_PATH=$GITHUB_WORKSPACE/pico-sdk'
apt-packages: clang-tidy gcc-arm-none-eabi libnewlib-arm-none-eabi libstdc++-arm-none-eabi-newlib
apt-packages: ccache
runs-on: ${{matrix.os}}
@ -82,6 +82,15 @@ jobs:
run: |
sudo apt update && sudo apt install ${{matrix.apt-packages}}
- name: Install ARM Toolchain
if: runner.os == 'Linux'
working-directory: ${{runner.workspace}}
run: |
wget -q https://developer.arm.com/-/media/Files/downloads/gnu-rm/10.3-2021.10/gcc-arm-none-eabi-10.3-2021.10-x86_64-linux.tar.bz2
tar xf gcc-*.tar.bz2
cd gcc*/bin
pwd >> $GITHUB_PATH
- name: Fetch base MicroPython submodules
shell: bash
working-directory: micropython

Wyświetl plik

@ -7,28 +7,25 @@ on:
types: [created]
env:
# Customize the CMake build type here (Release, Debug, RelWithDebInfo, etc.)
MICROPYTHON_VERSION: v1.18
BUILD_TYPE: Release
BOARD_TYPE: PICO
jobs:
build:
name: ${{matrix.name}}
strategy:
matrix:
include:
- os: ubuntu-20.04
name: Linux
cache-key: linux
cmake-args: '-DPICO_SDK_PATH=$GITHUB_WORKSPACE/pico-sdk'
apt-packages: clang-tidy gcc-arm-none-eabi libnewlib-arm-none-eabi libstdc++-arm-none-eabi-newlib
runs-on: ${{matrix.os}}
deps:
runs-on: ubuntu-20.04
name: Dependencies
steps:
- name: Workspace Cache
id: cache
uses: actions/cache@v2
with:
path: ${{runner.workspace}}
key: workspace-micropython-${{env.MICROPYTHON_VERSION}}
restore-keys: |
workspace-micropython-${{env.MICROPYTHON_VERSION}}
# Check out MicroPython
- name: Checkout MicroPython
if: steps.cache.outputs.cache-hit != 'true'
uses: actions/checkout@v2
with:
repository: micropython/micropython
@ -36,52 +33,105 @@ jobs:
submodules: false # MicroPython submodules are hideously broken
path: micropython
- uses: actions/checkout@v2
with:
submodules: true
path: pimoroni-pico-${{ github.sha }}
# Copy Python module files
- name: Copy modules
run: |
cp -r pimoroni-pico-${GITHUB_SHA}/micropython/modules_py/* micropython/ports/rp2/modules/
# Linux deps
- name: Install deps
if: runner.os == 'Linux'
run: |
sudo apt update && sudo apt install ${{matrix.apt-packages}}
- name: Fetch base MicroPython submodules
if: steps.cache.outputs.cache-hit != 'true'
shell: bash
working-directory: micropython
run: git submodule update --init
- name: Fetch Pico SDK submodules
if: steps.cache.outputs.cache-hit != 'true'
shell: bash
working-directory: micropython/lib/pico-sdk
run: git submodule update --init
- name: Build mpy-cross
if: steps.cache.outputs.cache-hit != 'true'
shell: bash
working-directory: micropython/mpy-cross
run: make
build:
needs: deps
name: Build ${{matrix.board}}
runs-on: ubuntu-20.04
strategy:
matrix:
include:
- name: pico
board: PICO
- name: tiny2040
board: PIMORONI_TINY2040
- name: picolipo_4mb
board: PIMORONI_PICOLIPO_4MB
- name: picolipo_16mb
board: PIMORONI_PICOLIPO_16MB
env:
# MicroPython version will be contained in github.event.release.tag_name for releases
RELEASE_FILE: pimoroni-${{matrix.name}}-${{github.event.release.tag_name || github.sha}}-micropython.uf2
steps:
- name: Compiler Cache
uses: actions/cache@v2
with:
path: /home/runner/.ccache
key: ccache-micropython-${{matrix.name}}-${{github.ref}}-${{github.sha}}
restore-keys: |
ccache-micropython-${{matrix.name}}-${{github.ref}}
ccache-micropython-${{matrix.name}}-
- name: Workspace Cache
uses: actions/cache@v2
with:
path: ${{runner.workspace}}
key: workspace-micropython-${{env.MICROPYTHON_VERSION}}
restore-keys: |
workspace-micropython-${{env.MICROPYTHON_VERSION}}
- name: Install Compiler & CCache
if: runner.os == 'Linux'
run: |
sudo apt update && sudo apt install ccache
- name: Install ARM Toolchain
if: runner.os == 'Linux'
working-directory: ${{runner.workspace}}
run: |
wget -q https://developer.arm.com/-/media/Files/downloads/gnu-rm/10.3-2021.10/gcc-arm-none-eabi-10.3-2021.10-x86_64-linux.tar.bz2
tar xf gcc-*.tar.bz2
cd gcc*/bin
pwd >> $GITHUB_PATH
- uses: actions/checkout@v2
with:
submodules: true
path: pimoroni-pico-${{ github.sha }}
- name: Configure MicroPython
shell: bash
working-directory: micropython/ports/rp2
run: |
cmake -S . -B build-${{matrix.board}} -DPICO_BUILD_DOCS=0 -DUSER_C_MODULES=../../../pimoroni-pico-${GITHUB_SHA}/micropython/modules/micropython.cmake -DMICROPY_BOARD=${{matrix.board}} -DCMAKE_C_COMPILER_LAUNCHER=ccache -DCMAKE_CXX_COMPILER_LAUNCHER=ccache
- name: Build MicroPython
shell: bash
working-directory: micropython/ports/rp2
run: make USER_C_MODULES=../../../pimoroni-pico-${GITHUB_SHA}/micropython/modules/micropython.cmake -j2
run: |
ccache --zero-stats || true
cmake --build build-${{matrix.board}} -j 2
ccache --show-stats || true
- name: Rename .uf2 for artifact
shell: bash
working-directory: micropython/ports/rp2/build-${{env.BOARD_TYPE}}
run: cp firmware.uf2 ${{github.event.repository.name}}-${{github.event.release.tag_name}}-micropython-${{env.MICROPYTHON_VERSION}}.uf2
working-directory: micropython/ports/rp2/build-${{matrix.board}}
run: cp firmware.uf2 $RELEASE_FILE
- name: Store .uf2 as artifact
uses: actions/upload-artifact@v2
with:
name: ${{github.event.repository.name}}-${{github.event.release.tag_name}}-micropython-${{env.MICROPYTHON_VERSION}}.uf2
path: micropython/ports/rp2/build-${{env.BOARD_TYPE}}/${{github.event.repository.name}}-${{github.event.release.tag_name}}-micropython-${{env.MICROPYTHON_VERSION}}.uf2
name: ${{env.RELEASE_FILE}}
path: micropython/ports/rp2/build-${{matrix.board}}/${{env.RELEASE_FILE}}
- name: Upload .uf2
if: github.event_name == 'release'
@ -89,7 +139,7 @@ jobs:
env:
GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}}
with:
asset_path: micropython/ports/rp2/build-${{env.BOARD_TYPE}}/firmware.uf2
asset_path: micropython/ports/rp2/build-${{matrix.board}}/firmware.uf2
upload_url: ${{github.event.release.upload_url}}
asset_name: ${{github.event.repository.name}}-${{github.event.release.tag_name}}-micropython-${{env.MICROPYTHON_VERSION}}.uf2
asset_name: ${{env.RELEASE_FILE}}
asset_content_type: application/octet-stream

Wyświetl plik

@ -6,30 +6,25 @@ on:
jobs:
build:
name: ${{matrix.name}}
strategy:
matrix:
include:
- os: ubuntu-20.04
name: Linux
cache-key: linux
apt-packages: python3 python3-pip
python-packages: flake8
runs-on: ${{matrix.os}}
name: Python Linting
runs-on: ubuntu-20.04
steps:
- uses: actions/checkout@v2
# Linux deps
- name: Install deps
if: runner.os == 'Linux'
run: |
sudo apt update && sudo apt install ${{matrix.apt-packages}}
- name: Install Python Deps
run: python3 -m pip install ${{matrix.python-packages}}
run: python3 -m pip install flake8
- name: Lint
- name: Lint micropython/modules_py
shell: bash
run: |
python3 -m flake8 --ignore E501 micropython/modules_py micropython/examples
python3 -m flake8 --ignore E501 micropython/modules_py
- name: Lint micropython/examples
shell: bash
run: |
python3 -m flake8 --ignore E501 micropython/examples
- name: Lint .py tools in C++ examples
shell: bash
run: |
python3 -m flake8 --ignore E501 examples

Wyświetl plik

@ -3,156 +3,157 @@
# converts Hershey fonts into a format easier to consume by the Badger2040
# e-ink badge library - the result can be piped directly into a .hpp file.
import argparse, sys, os, glob
import argparse
from pathlib import Path
parser = argparse.ArgumentParser(
description='Converts Hershey fonts into the format used by Badger2040.')
parser.add_argument('file', nargs="+", help='input files to convert')
description="Converts Hershey fonts into the format used by Badger2040."
)
parser.add_argument("file", nargs="+", help="input files to convert")
options = parser.parse_args()
options = None
try:
options = parser.parse_args()
except:
parser.print_help()
sys.exit(0)
def convert_font(data):
# parse hershey font data file, the file format contains one record per
# character with the format:
#
# 2761 8MXP[OZPYQZQ[P]N_
#
# first 5 characters: represent the character id (or a dummy value
# of 12345). if it is a character id then a separate file contains the
# mapping information to convert that into an ascii table entry. if all of
# the ids are 12345 then it seems that the files contain 95 entries that
# start from ascii character 32 (space) in order
#
# next 3 characters: number of vertices (including special left/right
# bounds value)
#
# next 2 characters: represent the "left" and "right" offsets for this
# character, to convert you must subtract 'R' from them. this pair counts
# as the first "vertex"
#
# subsequent pairs of characters: represent each vertex, as before converted
# into numbers by subtracting 'R'. there are vertex count (-1) of these
#
# note! within the vertex data can be extra newline characters which must
# be ignored.
# parse hershey font data file, the file format contains one record per
# character with the format:
#
# 2761 8MXP[OZPYQZQ[P]N_
#
# first 5 characters: represent the character id (or a dummy value
# of 12345). if it is a character id then a separate file contains the
# mapping information to convert that into an ascii table entry. if all of
# the ids are 12345 then it seems that the files contain 95 entries that
# start from ascii character 32 (space) in order
#
# next 3 characters: number of vertices (including special left/right
# bounds value)
#
# next 2 characters: represent the "left" and "right" offsets for this
# character, to convert you must subtract 'R' from them. this pair counts
# as the first "vertex"
#
# subsequent pairs of characters: represent each vertex, as before converted
# into numbers by subtracting 'R'. there are vertex count (-1) of these
#
# note! within the vertex data can be extra newline characters which must
# be ignored.
vertex_data = []
char_data = {}
vertex_data = []
char_data = {}
auto_char_code = 0
auto_char_code = 0
# we'll loop, extracting one character per iteration, until all of the
# data is consumed
while(len(data) > 0):
# extract character id
char_id = int(data[0:5])
# we'll loop, extracting one character per iteration, until all of the
# data is consumed
while len(data) > 0:
char_code = auto_char_code
auto_char_code += 1
char_code = auto_char_code
auto_char_code += 1
# extract vertex count
vertex_count = int(data[5:8])
vertex_count -= 1 # remove one for the bounds data
# extract vertex count
vertex_count = int(data[5:8])
vertex_count -= 1 # remove one for the bounds data
# extract bounds
left = ord(data[8:9]) - ord("R")
right = ord(data[9:10]) - ord("R")
# extract bounds
left = ord(data[8:9]) - ord("R")
right = ord(data[9:10]) - ord("R")
char_data[char_code] = {
"width": right - left,
"vertex_count": vertex_count,
"first_vertex_offset": len(vertex_data),
}
char_data[char_code] = {
"width": right - left,
"vertex_count": vertex_count,
"first_vertex_offset": len(vertex_data)
}
# work out expected total length of character data
char_data_len = 10 + (vertex_count * 2)
# work out expected total length of character data
char_data_len = 10 + (vertex_count * 2)
# if there are any newlines within this data then remove them before we
# move on the parse the vertices
while data.find("\n", char_data_len) != -1:
data = data.replace("\n", "", 1)
# if there are any newlines within this data then remove them before we
# move on the parse the vertices
while data.find('\n', char_data_len) != -1:
data = data.replace('\n', '', 1)
for i in range(0, vertex_count):
offset = 10 + (i * 2)
if data[offset:offset + 2] == " R":
# pen up for one
x = -128
y = -128
else:
x = ord(data[offset + 0:offset + 1]) - ord("R") - left
y = ord(data[offset + 1:offset + 2]) - ord("R")
for i in range(0, vertex_count):
offset = 10 + (i * 2)
if data[offset:offset + 2] == " R":
# pen up for one
x = -128
y = -128
else:
x = ord(data[offset + 0:offset + 1]) - ord("R") - left
y = ord(data[offset + 1:offset + 2]) - ord("R")
vertex_data.append(str(x))
vertex_data.append(str(y))
vertex_data.append(str(x))
vertex_data.append(str(y))
data = data[char_data_len:]
data = data[char_data_len:]
font_name = Path(input_filename).stem
font_name = Path(input_filename).stem
# create code for all of the vertices
vertex_code = '''\
# create code for all of the vertices
vertex_code = """\
static const int8_t {font_name}_vertices[{count}] = {{
{vertices}
}};
'''.format(font_name=font_name, count=len(vertex_data), vertices=", ".join(vertex_data))
""".format(
font_name=font_name, count=len(vertex_data), vertices=", ".join(vertex_data)
)
# create code for chars and font
#
# struct hershey_font_glyph_t {
# uint32_t width; // character width
# uint32_t vertex_count; // number of vertices
# int8_t *vertices; // vertex data (indices: even = x, odd = y)
# };
chars = []
for i in range(0, 95):
if i in char_data:
chars.append(
" {{.width={width}, .vertex_count={vertex_count}, .vertices=&{font_name}_vertices[{offset}]}}".format(
width=char_data[i]["width"],
vertex_count=char_data[i]["vertex_count"],
font_name=font_name,
offset=char_data[i]["first_vertex_offset"]
))
else:
chars.append(" {.width=0, .vertex_count=0, .vertices=nullptr}")
# create code for chars and font
#
# struct hershey_font_glyph_t {
# uint32_t width; // character width
# uint32_t vertex_count; // number of vertices
# int8_t *vertices; // vertex data (indices: even = x, odd = y)
# };
chars = []
for i in range(0, 95):
if i in char_data:
chars.append(
" {{.width={width}, .vertex_count={vertex_count}, .vertices=&{font_name}_vertices[{offset}]}}".format(
width=char_data[i]["width"],
vertex_count=char_data[i]["vertex_count"],
font_name=font_name,
offset=char_data[i]["first_vertex_offset"],
)
)
else:
chars.append(" {.width=0, .vertex_count=0, .vertices=nullptr}")
# struct hershey_font_t {
# hershey_font_glyph_t chars[95];
# };
font_code = '''\
# struct hershey_font_t {
# hershey_font_glyph_t chars[95];
# };
font_code = """\
static const hershey_font_t {font_name} {{
.chars = {{
{chars}
}}
}};
'''.format(font_name=font_name, chars=",\n".join(chars))
""".format(
font_name=font_name, chars=",\n".join(chars)
)
print(vertex_code)
print(vertex_code)
print(font_code)
print(font_code)
return """ {{ \"{font_name}\", &{font_name} }}""".format(font_name=font_name)
return """ {{ \"{font_name}\", &{font_name} }}""".format(font_name=font_name)
# create map of font name to font structure
font_list = []
for input_filename in options.file:
input_file = open(input_filename, mode='r')
data = input_file.read()
input_file.close()
input_file = open(input_filename, mode="r")
data = input_file.read()
input_file.close()
font_list.append(convert_font(data))
font_list.append(convert_font(data))
map_code = '''\
map_code = """\
std::map<std::string, const hershey_font_t*> fonts = {{
{font_list}
}};
'''.format(font_list=",\n".join(font_list))
""".format(
font_list=",\n".join(font_list)
)
print(map_code)
print(map_code)

Wyświetl plik

@ -85,9 +85,10 @@ STR02 = """_mvdata = memoryview(_data)
def data():
return _mvdata
"""
def write_func(stream, name, arg):
stream.write('def {}():\n return {}\n\n'.format(name, arg))
@ -123,6 +124,7 @@ def quit(msg):
print(msg)
sys.exit(1)
DESC = """data_to_py.py
Utility to convert an arbitrary binary file to Python source.
Sample usage:
@ -137,7 +139,6 @@ if __name__ == "__main__":
parser.add_argument('outfile', type=str,
help='Path and name of output file. Must have .py extension.')
args = parser.parse_args()
if not os.path.isfile(args.infile):

Wyświetl plik

@ -1,6 +1,7 @@
include_directories(${CMAKE_CURRENT_LIST_DIR}/../../)
list(APPEND CMAKE_MODULE_PATH "${CMAKE_CURRENT_LIST_DIR}")
list(APPEND CMAKE_MODULE_PATH "${CMAKE_CURRENT_LIST_DIR}/../")
list(APPEND CMAKE_MODULE_PATH "${CMAKE_CURRENT_LIST_DIR}/../../")
include(pimoroni_i2c/micropython)
@ -41,3 +42,5 @@ include(pico_wireless/micropython)
include(plasma/micropython)
include(hub75/micropython)
include(ulab/code/micropython)
include(modules_py/modules_py)

Wyświetl plik

@ -0,0 +1,20 @@
function (copy_module TARGET SRC DST)
add_custom_command(
OUTPUT ${CMAKE_CURRENT_BINARY_DIR}/../modules/${DST}.py
COMMAND
cp ${SRC} ${CMAKE_CURRENT_BINARY_DIR}/../modules/${DST}.py
DEPENDS ${src}
)
target_sources(${TARGET} INTERFACE ${CMAKE_CURRENT_BINARY_DIR}/../modules/${DST}.py)
endfunction()
# Create a dummy usermod to hang our .py copies from
add_library(usermod_modules_py INTERFACE)
target_link_libraries(usermod INTERFACE usermod_modules_py)
# .py files to copy from modules_py to ports/rp2/modules
copy_module(usermod_modules_py ${CMAKE_CURRENT_LIST_DIR}/picosystem.py picosystem)
copy_module(usermod_modules_py ${CMAKE_CURRENT_LIST_DIR}/pimoroni.py pimoroni)