Merge branch 'master' into ota_reuse_fix_master

pull/12462/head
Konstantin Hartwich 2024-02-20 09:52:15 +01:00 zatwierdzone przez GitHub
commit 5b36832a2c
Nie znaleziono w bazie danych klucza dla tego podpisu
ID klucza GPG: B5690EEEBB952194
5125 zmienionych plików z 304316 dodań i 139446 usunięć

Wyświetl plik

@ -1,5 +0,0 @@
# Transpiled JavaScript (if any)
dist
# Installed dependencies
node_modules

Wyświetl plik

@ -1,47 +0,0 @@
# DangerJS pull request automatic review tool - GitHub
## Implementation
The main development is done in Espressif Gitlab project.
Espressif [GitHub project espressif/esp-idf](https://github.com/espressif/esp-idf) is only a public mirror.
Therefore, all changes and updates to DangerJS files (`.github/dangerjs`) must be made via MR in the **Gitlab** repository by Espressif engineer.
When adding a new Danger rule or updating existing one, might be a good idea to test it on the developer's fork of GitHub project. This way, the new feature can be tested using a GitHub action without concern of damaging Espressif's GitHub repository.
Danger for Espressif GitHub is implemented in TypeScript. This makes the code more readable and robust than plain JavaScript.
Compilation to JavaScript code (using `tsc`) is not necessary; Danger handles TypeScript natively.
A good practice is to store each Danger rule in a separate module, and then import these modules into the main Danger file `.github/dangerjs/dangerfile.ts` (see how this is done for currently present modules when adding a new one).
If the Danger module (new check/rule) uses an external NPM module (e.g. `axios`), be sure to add this dependency to `.github/dangerjs/package.json` and also update `.github/dangerjs/package-lock.json`.
In the GitHub action, `danger` is not installed globally (nor are its dependencies) and the `npx` call is used to start the `danger` checks in CI.
## Adding new Danger rule
For local development you can use following strategy
#### Install dependencies
```sh
cd .github/dangerjs
npm install
```
(If the IDE still shows compiler/typing errors, reload the IDE window.)
#### Add new code as needed or make updates
#### Test locally
Danger rules can be tested locally (without running the GitHub action pipeline).
To do this, you have to first export the ENV variables used by Danger in the local terminal:
```sh
export GITHUB_TOKEN='**************************************'
```
Then you can call Danger by:
```sh
cd .github/dangerjs
danger pr https://github.com/espressif/esp-idf/pull/<number_of_pull_request>
```
The result will be displayed in your terminal.

Wyświetl plik

@ -1,48 +0,0 @@
import { DangerResults } from "danger";
declare const results: DangerResults;
declare const message: (message: string, results?: DangerResults) => void;
declare const markdown: (message: string, results?: DangerResults) => void;
// Import modules with danger rules
// (Modules with checks are stored in ".github/dangerjs/<module_name>.ts". To import them, use path relative to "dangerfile.ts")
import prCommitsTooManyCommits from "./prCommitsTooManyCommits";
import prDescription from "./prDescription";
import prTargetBranch from "./prTargetBranch";
import prInfoContributor from "./prInfoContributor";
import prCommitMessage from "./prCommitMessage";
async function runDangerRules(): Promise<void> {
// Message to contributor about review and merge process
const prInfoContributorMessage: string = await prInfoContributor();
markdown(prInfoContributorMessage);
// Run danger checks
prCommitsTooManyCommits();
prDescription();
prTargetBranch();
prCommitMessage();
// Add success log if no issues
const dangerFails: number = results.fails.length;
const dangerWarns: number = results.warnings.length;
const dangerInfos: number = results.messages.length;
if (!dangerFails && !dangerWarns && !dangerInfos) {
return message("Good Job! All checks are passing!");
}
// Add retry link
addRetryLink();
}
runDangerRules();
function addRetryLink(): void {
const serverUrl: string | undefined = process.env.GITHUB_SERVER_URL;
const repoName: string | undefined = process.env.GITHUB_REPOSITORY;
const runId: string | undefined = process.env.GITHUB_RUN_ID;
const retryLinkUrl: string = `${serverUrl}/${repoName}/actions/runs/${runId}`;
const retryLink: string = `<sub>:repeat: You can re-run automatic PR checks by retrying the <a href="${retryLinkUrl}">DangerJS action</a></sub>`;
markdown(retryLink);
}

1999
.github/dangerjs/package-lock.json wygenerowano vendored

Plik diff jest za duży Load Diff

Wyświetl plik

@ -1,18 +0,0 @@
{
"name": "dangerjs-github",
"description": "GitHub PR reviewing with DangerJS",
"main": "dangerfile.ts",
"keywords": [],
"author": "",
"license": "ISC",
"dependencies": {
"axios": "^1.3.3",
"danger": "^11.2.3",
"request": "^2.88.2",
"sync-request": "^6.1.0",
"typescript": "^5.0.3"
},
"devDependencies": {
"@types/node": "^18.15.11"
}
}

Wyświetl plik

@ -1,67 +0,0 @@
import { DangerDSLType, DangerResults } from "danger";
declare const danger: DangerDSLType;
declare const warn: (message: string, results?: DangerResults) => void;
interface Commit {
message: string;
}
/**
* Check if commit messages are sufficiently descriptive (not too short).
*
* Search for commit messages that appear to be automatically generated or temporary messages and report them.
*
* @dangerjs WARN
*/
export default function (): void {
const prCommits: Commit[] = danger.git.commits;
const detectRegexes: RegExp[] = [
/^Merge pull request #\d+ from .*/i, // Automatically generated message by GitHub
/^Merged .+:.+ into .+/i, // Automatically generated message by GitHub
/^Automatic merge by GitHub Action/i, // Automatically generated message by GitHub
/^Merge branch '.*' of .+ into .+/i, // Automatically generated message by GitHub
/^Create\s[a-zA-Z0-9_.-]+(\.[a-zA-Z0-9]{1,4})?(?=\s|$)/, // Automatically generated message by GitHub using UI
/^Delete\s[a-zA-Z0-9_.-]+(\.[a-zA-Z0-9]{1,4})?(?=\s|$)/, // Automatically generated message by GitHub using UI
/^Update\s[a-zA-Z0-9_.-]+(\.[a-zA-Z0-9]{1,4})?(?=\s|$)/, // Automatically generated message by GitHub using UI
/^Initial commit/i, // Automatically generated message by GitHub
/^WIP.*/i, // Message starts with prefix "WIP"
/^Cleaned.*/i, // Message starts "Cleaned", , probably temporary
/^Test:.*/i, // Message starts with "test" prefix, probably temporary
/clean ?up/i, // Message contains "clean up", probably temporary
/^[^A-Za-z0-9\s].*/, // Message starts with special characters
];
let partMessages: string[] = [];
for (const commit of prCommits) {
const commitMessage: string = commit.message;
const commitMessageTitle: string = commit.message.split("\n")[0];
// Check if the commit message matches any regex from "detectRegexes"
if (detectRegexes.some((regex) => commitMessage.match(regex))) {
partMessages.push(
`- the commit message \`${commitMessageTitle}\` appears to be a temporary or automatically generated message`
);
continue;
}
// Check if the commit message is not too short
const shortCommitMessageThreshold: number = 20; // commit message is considered too short below this number of characters
if (commitMessage.length < shortCommitMessageThreshold) {
partMessages.push(
`- the commit message \`${commitMessageTitle}\` may not be sufficiently descriptive`
);
}
}
// Create report
if (partMessages.length) {
partMessages.sort();
let dangerMessage = `\nSome issues found for the commit messages in this MR:\n${partMessages.join(
"\n"
)}
\nPlease consider updating these commit messages.`;
warn(dangerMessage);
}
}

Wyświetl plik

@ -1,19 +0,0 @@
import { DangerDSLType, DangerResults } from "danger";
declare const danger: DangerDSLType;
declare const message: (message: string, results?: DangerResults) => void;
/**
* Check if pull request has not an excessive numbers of commits (if squashed)
*
* @dangerjs INFO
*/
export default function (): void {
const tooManyCommitThreshold: number = 2; // above this number of commits, squash commits is suggested
const prCommits: number = danger.github.commits.length;
if (prCommits > tooManyCommitThreshold) {
return message(
`You might consider squashing your ${prCommits} commits (simplifying branch history).`
);
}
}

Wyświetl plik

@ -1,19 +0,0 @@
import { DangerDSLType, DangerResults } from "danger";
declare const danger: DangerDSLType;
declare const warn: (message: string, results?: DangerResults) => void;
/**
* Check if pull request has has a sufficiently accurate description
*
* @dangerjs WARN
*/
export default function (): void {
const prDescription: string = danger.github.pr.body;
const shortPrDescriptionThreshold: number = 100; // Description is considered too short below this number of characters
if (prDescription.length < shortPrDescriptionThreshold) {
return warn(
"The PR description looks very brief, please check if more details can be added."
);
}
}

Wyświetl plik

@ -1,58 +0,0 @@
import { DangerDSLType } from "danger";
declare const danger: DangerDSLType;
interface Contributor {
login?: string;
}
const authorLogin = danger.github.pr.user.login;
const messageKnownContributor: string = `
***
👋 **Hi ${authorLogin}**, thank you for your another contribution to \`espressif/esp-idf\` project!
If the change is approved and passes the tests in our internal git repository, it will appear in this public Github repository on the next sync.
***
`;
const messageFirstContributor: string = `
***
👋 **Welcome ${authorLogin}**, thank you for your first contribution to \`espressif/esp-idf\` project!
📘 Please check [Contributions Guide](https://docs.espressif.com/projects/esp-idf/en/latest/esp32/contribute/index.html#contributions-guide) for the contribution checklist, information regarding code and documentation style, testing and other topics.
🖊 Please also make sure you have **read and signed** the [Contributor License Agreement for espressif/esp-idf project](https://cla-assistant.io/espressif/esp-idf).
#### Pull request review and merge process you can expect
Espressif develops the ESP-IDF project in an internal repository (Gitlab). We do welcome contributions in the form of bug reports, feature requests and pull requests via this public GitHub repository.
1. An internal issue has been created for the PR, we assign it to the relevant engineer
2. They review the PR and either approve it or ask you for changes or clarifications
3. Once the Github PR is approved, we synchronize it into our internal git repository
4. In the internal git repository we do the final review, collect approvals from core owners and make sure all the automated tests are passing
- At this point we may do some adjustments to the proposed change, or extend it by adding tests or documentation.
5. If the change is approved and passes the tests it is merged into the \`master\` branch
6. On next sync from the internal git repository merged change will appear in this public Github repository
***
`;
/**
* Check whether the author of the pull request is known or a first-time contributor, and add a message to the PR with information about the review and merge process.
*/
export default async function (): Promise<string> {
const contributors = await danger.github.api.repos.listContributors({
owner: danger.github.thisPR.owner,
repo: danger.github.thisPR.repo,
});
const contributorsData: Contributor[] = contributors.data;
const knownContributors: (string | undefined)[] = contributorsData.map(
(contributor: Contributor) => contributor.login
);
if (knownContributors.includes(authorLogin)) {
return messageKnownContributor;
} else {
return messageFirstContributor;
}
}

Wyświetl plik

@ -1,19 +0,0 @@
import { DangerDSLType, DangerResults } from "danger";
declare const danger: DangerDSLType;
declare const fail: (message: string, results?: DangerResults) => void;
/**
* Check if the target branch is "master"
*
* @dangerjs FAIL
*/
export default function (): void {
const prTargetBranch: string = danger.github?.pr?.base?.ref;
if (prTargetBranch !== "master") {
return fail(`
The target branch for this pull request should be \`master\`.\n
If you would like to add this feature to the release branch, please state this in the PR description and we will consider backporting it.
`);
}
}

Wyświetl plik

@ -1,17 +0,0 @@
{
"compilerOptions": {
"module": "commonjs",
"moduleResolution": "node",
"esModuleInterop": true,
"target": "es6",
"noImplicitAny": true,
"noUnusedParameters": true,
"strictNullChecks": true,
"sourceMap": true,
"removeComments": true,
"outDir": "./dist"
},
"include": [
"./*.ts"
]
}

Wyświetl plik

@ -1,15 +0,0 @@
version: 2
updates:
- package-ecosystem: "all"
directory: "/"
schedule:
interval: "weekly"
ignore:
- directory: ".gitlab/dangerjs"
patterns:
- "package-lock.json"
- directory: ".github/dangerjs"
patterns:
- "package-lock.json"
# Disable "version updates" (keep only "security updates")
open-pull-requests-limit: 0

Wyświetl plik

@ -9,28 +9,19 @@ permissions:
contents: write
jobs:
danger-check:
pull-request-style-linter:
runs-on: ubuntu-latest
defaults:
run:
working-directory: .github/dangerjs
steps:
- name: Check out PR head
uses: actions/checkout@v3
uses: actions/checkout@v4
with:
ref: ${{ github.event.pull_request.head.sha }}
- name: Setup NodeJS environment
uses: actions/setup-node@v3
with:
node-version: 18
cache: npm
cache-dependency-path: .github/dangerjs/package-lock.json
- name: Install DangerJS dependencies
run: npm install
- name: Run DangerJS
run: npx danger ci --failOnErrors -v
- name: DangerJS pull request linter
uses: espressif/shared-github-dangerjs@v1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
instructions-gitlab-mirror: 'true'
instructions-contributions-file: 'CONTRIBUTING.md'
instructions-cla-link: 'https://cla-assistant.io/espressif/esp-idf'

Wyświetl plik

@ -12,7 +12,7 @@ jobs:
name: Sync Issue Comments to Jira
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/checkout@v3
- name: Sync issue comments to JIRA
uses: espressif/github-actions/sync_issues_to_jira@master
env:

Wyświetl plik

@ -12,7 +12,7 @@ jobs:
name: Sync issues to Jira
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/checkout@v3
- name: Sync GitHub issues to Jira project
uses: espressif/github-actions/sync_issues_to_jira@master
env:

Wyświetl plik

@ -15,7 +15,7 @@ jobs:
name: Sync PRs to Jira
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/checkout@v3
- name: Sync PRs to Jira project
uses: espressif/github-actions/sync_issues_to_jira@master
with:

Wyświetl plik

@ -11,7 +11,7 @@ jobs:
(github.event.label.name == 'PR-Sync-Rebase') ||
(github.event.label.name == 'PR-Sync-Update')
steps:
- uses: actions/checkout@v2
- uses: actions/checkout@v3
- name: Sync approved PRs to internal codebase
uses: espressif/github-actions/github_pr_to_internal_pr@master
env:

Wyświetl plik

@ -10,9 +10,11 @@ permissions:
jobs:
pre_commit_check:
runs-on: ubuntu-latest
env:
SKIP: "cleanup-ignore-lists" # Comma-separated string of ignored pre-commit check IDs
steps:
- name: Checkout
uses: actions/checkout@v2
uses: actions/checkout@v3
- name: Fetch head and base refs
# This is necessary for pre-commit to check the changes in the PR branch
run: |

Wyświetl plik

@ -0,0 +1,34 @@
name: Vulnerability scan
on:
schedule:
- cron: '0 0 * * *'
workflow_dispatch:
jobs:
vulnerability-scan:
strategy:
# We don't want to run all jobs in parallel, because this would
# overload NVD and we would get 503
max-parallel: 1
matrix:
# References/branches which should be scanned for vulnerabilities are
# defined in the VULNERABILITY_SCAN_REFS variable as json list.
# For example: ['master', 'release/v5.2', 'release/v5.1', 'release/v5.0', 'release/v4.4']
ref: ${{ fromJSON(vars.VULNERABILITY_SCAN_REFS) }}
name: Vulnerability scan
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
submodules: recursive
ref: ${{ matrix.ref }}
- name: Vulnerability scan
env:
SBOM_MATTERMOST_WEBHOOK: ${{ secrets.SBOM_MATTERMOST_WEBHOOK }}
NVDAPIKEY: ${{ secrets.NVDAPIKEY }}
uses: espressif/esp-idf-sbom-action@master
with:
ref: ${{ matrix.ref }}

44
.gitignore vendored
Wyświetl plik

@ -24,18 +24,6 @@ GPATH
# cache dir
.cache/
# Components Unit Test Apps files
components/**/build/
components/**/build_*_*/
components/**/sdkconfig
components/**/sdkconfig.old
# Example project files
examples/**/build/
examples/**/build_esp*_*/
examples/**/sdkconfig
examples/**/sdkconfig.old
# Doc build artifacts
docs/_build/
docs/doxygen_sqlite3.db
@ -44,16 +32,23 @@ docs/doxygen_sqlite3.db
docs/_static/DejaVuSans.ttf
docs/_static/NotoSansSC-Regular.otf
# Components Unit Test Apps files
components/**/build/
components/**/build_*_*/
components/**/sdkconfig
components/**/sdkconfig.old
# Example project files
examples/**/build/
examples/**/build_*_*/
examples/**/sdkconfig
examples/**/sdkconfig.old
# Unit test app files
tools/unit-test-app/sdkconfig
tools/unit-test-app/sdkconfig.old
tools/unit-test-app/build
tools/unit-test-app/build_*_*/
tools/unit-test-app/output
tools/unit-test-app/test_configs
# Unit Test CMake compile log folder
log_ut_cmake
tools/unit-test-app/sdkconfig
tools/unit-test-app/sdkconfig.old
# test application build files
tools/test_apps/**/build/
@ -61,7 +56,8 @@ tools/test_apps/**/build_*_*/
tools/test_apps/**/sdkconfig
tools/test_apps/**/sdkconfig.old
TEST_LOGS
TEST_LOGS/
build_summary_*.xml
# gcov coverage reports
*.gcda
@ -101,8 +97,12 @@ managed_components
# pytest log
pytest_embedded_log/
list_job_*.txt
size_info.txt
list_job*.txt
size_info*.txt
XUNIT_RESULT*.xml
# clang config (for LSP)
.clangd
# Vale
.vale/styles/*

Wyświetl plik

@ -1,22 +1,10 @@
stages:
- upload_cache
- pre_check
- build
- assign_test
- build_doc
- target_test
- host_test
- test_deploy
- deploy
- post_deploy
workflow:
rules:
# Disable those non-protected push triggered pipelines
- if: '$CI_COMMIT_REF_NAME != "master" && $CI_COMMIT_BRANCH !~ /^release\/v/ && $CI_COMMIT_TAG !~ /^v\d+\.\d+(\.\d+)?($|-)/ && $CI_PIPELINE_SOURCE == "push"'
- if: '$CI_COMMIT_REF_NAME != "master" && $CI_COMMIT_BRANCH !~ /^release\/v/ && $CI_COMMIT_TAG !~ /^v\d+\.\d+(\.\d+)?($|-)/ && $CI_COMMIT_TAG !~ /^qa-test/ && $CI_PIPELINE_SOURCE == "push"'
when: never
# when running merged result pipelines, it would create a temp commit id. use $CI_MERGE_REQUEST_SOURCE_BRANCH_SHA instead of $CI_COMMIT_SHA.
# Please use PIPELINE_COMMIT_SHA at all places that require a commit sha
# when running merged result pipelines, CI_COMMIT_SHA represents the temp commit it created.
# Please use PIPELINE_COMMIT_SHA at all places that require a commit sha of the original commit.
- if: $CI_OPEN_MERGE_REQUESTS != null
variables:
PIPELINE_COMMIT_SHA: $CI_MERGE_REQUEST_SOURCE_BRANCH_SHA
@ -27,223 +15,19 @@ workflow:
IS_MR_PIPELINE: 0
- when: always
variables:
# System environment
# Common parameters for the 'make' during CI tests
MAKEFLAGS: "-j5 --no-keep-going"
# GitLab-CI environment
# XXX_ATTEMPTS variables (https://docs.gitlab.com/ce/ci/yaml/README.html#job-stages-attempts) are not defined here.
# Use values from "CI / CD Settings" - "Variables".
# GIT_STRATEGY is not defined here.
# Use an option from "CI / CD Settings" - "General pipelines".
# we will download archive for each submodule instead of clone.
# we don't do "recursive" when fetch submodule as they're not used in CI now.
GIT_SUBMODULE_STRATEGY: none
SUBMODULE_FETCH_TOOL: "tools/ci/ci_fetch_submodule.py"
# by default we will fetch all submodules
# jobs can overwrite this variable to only fetch submodules they required
# set to "none" if don't need to fetch submodules
SUBMODULES_TO_FETCH: "all"
# tell build system do not check submodule update as we download archive instead of clone
IDF_SKIP_CHECK_SUBMODULES: 1
IDF_PATH: "$CI_PROJECT_DIR"
BATCH_BUILD: "1"
V: "0"
CHECKOUT_REF_SCRIPT: "$CI_PROJECT_DIR/tools/ci/checkout_project_ref.py"
PYTHON_VER: 3.8.17
# Docker images
BOT_DOCKER_IMAGE_TAG: ":latest"
ESP_ENV_IMAGE: "$CI_DOCKER_REGISTRY/esp-env-v5.2:2"
ESP_IDF_DOC_ENV_IMAGE: "$CI_DOCKER_REGISTRY/esp-idf-doc-env-v5.2:2-1"
QEMU_IMAGE: "${CI_DOCKER_REGISTRY}/qemu-v5.2:2-20230522"
TARGET_TEST_ENV_IMAGE: "$CI_DOCKER_REGISTRY/target-test-env-v5.2:2"
SONARQUBE_SCANNER_IMAGE: "${CI_DOCKER_REGISTRY}/sonarqube-scanner:5"
PRE_COMMIT_IMAGE: "$CI_DOCKER_REGISTRY/esp-idf-pre-commit:1"
# target test config file, used by assign test job
CI_TARGET_TEST_CONFIG_FILE: "$CI_PROJECT_DIR/.gitlab/ci/target-test.yml"
# target test repo parameters
TEST_ENV_CONFIG_REPO: "https://gitlab-ci-token:${BOT_TOKEN}@${CI_SERVER_HOST}:${CI_SERVER_PORT}/qa/ci-test-runner-configs.git"
# cache python dependencies
PIP_CACHE_DIR: "$CI_PROJECT_DIR/.cache/pip"
# Set this variable to the branch of idf-constraints repo in order to test a custom Python constraint file. The
# branch name must be without the remote part ("origin/"). Keep the variable empty in order to use the constraint
# file from https://dl.espressif.com/dl/esp-idf.
CI_PYTHON_CONSTRAINT_BRANCH: ""
# Update the filename for a specific ESP-IDF release. It is used only with CI_PYTHON_CONSTRAINT_BRANCH.
CI_PYTHON_CONSTRAINT_FILE: "espidf.constraints.v5.2.txt"
# Set this variable to repository name of a Python tool you wish to install and test in the context of ESP-IDF CI.
# Keep the variable empty when not used.
CI_PYTHON_TOOL_REPO: ""
# Set this variable to the branch of a Python tool repo specified in CI_PYTHON_TOOL_REPO. The
# branch name must be without the remote part ("origin/"). Keep the variable empty when not used.
# This is used only if CI_PYTHON_TOOL_REPO is not empty.
CI_PYTHON_TOOL_BRANCH: ""
IDF_CI_BUILD: 1
cache:
# pull only for most of the use cases since it's cache dir.
# Only set "push" policy for "upload_cache" stage jobs
- key: pip-cache
paths:
- .cache/pip
policy: pull
- key: submodule-cache
paths:
- .cache/submodule_archives
policy: pull
.common_before_scripts: &common-before_scripts |
source tools/ci/utils.sh
is_based_on_commits $REQUIRED_ANCESTOR_COMMITS
if [[ -n "$IDF_DONT_USE_MIRRORS" ]]; then
export IDF_MIRROR_PREFIX_MAP=
fi
if echo "$CI_MERGE_REQUEST_LABELS" | egrep "(^|,)include_nightly_run(,|$)"; then
export INCLUDE_NIGHTLY_RUN="1"
fi
# configure cmake related flags
source tools/ci/configure_ci_environment.sh
# add extra python packages
export PYTHONPATH="$IDF_PATH/tools:$IDF_PATH/tools/esp_app_trace:$IDF_PATH/components/partition_table:$IDF_PATH/tools/ci/python_packages:$PYTHONPATH"
.setup_tools_and_idf_python_venv: &setup_tools_and_idf_python_venv |
# must use after setup_tools_except_target_test
# otherwise the export.sh won't work properly
# download constraint file for dev
if [[ -n "$CI_PYTHON_CONSTRAINT_BRANCH" ]]; then
wget -O /tmp/constraint.txt --header="Authorization:Bearer ${ESPCI_TOKEN}" ${GITLAB_HTTP_SERVER}/api/v4/projects/2581/repository/files/${CI_PYTHON_CONSTRAINT_FILE}/raw?ref=${CI_PYTHON_CONSTRAINT_BRANCH}
mkdir -p ~/.espressif
mv /tmp/constraint.txt ~/.espressif/${CI_PYTHON_CONSTRAINT_FILE}
fi
# Mirror
if [[ -n "$IDF_DONT_USE_MIRRORS" ]]; then
export IDF_MIRROR_PREFIX_MAP=
fi
# install latest python packages
# target test jobs
if [[ "${CI_JOB_STAGE}" == "target_test" ]]; then
# ttfw jobs
if ! echo "${CI_JOB_NAME}" | egrep ".*pytest.*"; then
run_cmd bash install.sh --enable-ci --enable-ttfw
else
run_cmd bash install.sh --enable-ci --enable-pytest
fi
elif [[ "${CI_JOB_STAGE}" == "build_doc" ]]; then
run_cmd bash install.sh --enable-ci --enable-docs
elif [[ "${CI_JOB_STAGE}" == "build" ]]; then
run_cmd bash install.sh --enable-ci --enable-pytest
else
if ! echo "${CI_JOB_NAME}" | egrep ".*pytest.*"; then
run_cmd bash install.sh --enable-ci
else
run_cmd bash install.sh --enable-ci --enable-pytest
fi
fi
# Install esp-clang if necessary
if [[ "$IDF_TOOLCHAIN" == "clang" ]]; then
$IDF_PATH/tools/idf_tools.py --non-interactive install esp-clang
fi
source ./export.sh
# Custom clang
if [[ ! -z "$CI_CLANG_DISTRO_URL" ]]; then
echo "Using custom clang from ${CI_CLANG_DISTRO_URL}"
wget $CI_CLANG_DISTRO_URL
ARCH_NAME=$(basename $CI_CLANG_DISTRO_URL)
tar -x -f $ARCH_NAME
export PATH=$PWD/esp-clang/bin:$PATH
fi
# Custom OpenOCD
if [[ ! -z "$OOCD_DISTRO_URL" && "$CI_JOB_STAGE" == "target_test" ]]; then
echo "Using custom OpenOCD from ${OOCD_DISTRO_URL}"
wget $OOCD_DISTRO_URL
ARCH_NAME=$(basename $OOCD_DISTRO_URL)
tar -x -f $ARCH_NAME
export OPENOCD_SCRIPTS=$PWD/openocd-esp32/share/openocd/scripts
export PATH=$PWD/openocd-esp32/bin:$PATH
fi
if [[ -n "$CI_PYTHON_TOOL_REPO" ]]; then
git clone --quiet --depth=1 -b ${CI_PYTHON_TOOL_BRANCH} https://gitlab-ci-token:${ESPCI_TOKEN}@${GITLAB_HTTPS_HOST}/espressif/${CI_PYTHON_TOOL_REPO}.git
pip install ./${CI_PYTHON_TOOL_REPO}
rm -rf ${CI_PYTHON_TOOL_REPO}
fi
before_script:
- *common-before_scripts
- *setup_tools_and_idf_python_venv
- add_gitlab_ssh_keys
- fetch_submodules
.before_script_minimal:
before_script:
- *common-before_scripts
.before_script_macos:
before_script:
- *common-before_scripts
# On macOS, these tools need to be installed
- export IDF_TOOLS_PATH="${HOME}/.espressif_runner_${CI_RUNNER_ID}_${CI_CONCURRENT_ID}"
- $IDF_PATH/tools/idf_tools.py --non-interactive install cmake ninja
# This adds tools (compilers) and the version-specific Python environment to PATH
- *setup_tools_and_idf_python_venv
- fetch_submodules
.before_script_build_jobs:
before_script:
- *common-before_scripts
- *setup_tools_and_idf_python_venv
- add_gitlab_ssh_keys
- fetch_submodules
- export EXTRA_CFLAGS=${PEDANTIC_CFLAGS}
- export EXTRA_CXXFLAGS=${PEDANTIC_CXXFLAGS}
default:
retry:
max: 2
when:
# In case of a runner failure we could hop to another one, or a network error could go away.
- runner_system_failure
# Job execution timeout may be caused by a network issue.
- job_execution_timeout
# Place the default settings in `.gitlab/ci/common.yml` instead
include:
- '.gitlab/ci/danger.yml'
- '.gitlab/ci/common.yml'
- '.gitlab/ci/rules.yml'
- '.gitlab/ci/upload_cache.yml'
- '.gitlab/ci/docs.yml'
- '.gitlab/ci/static-code-analysis.yml'
- '.gitlab/ci/pre_commit.yml'
- '.gitlab/ci/pre_check.yml'
- '.gitlab/ci/build.yml'
- '.gitlab/ci/assign-test.yml'
- '.gitlab/ci/integration_test.yml'
- '.gitlab/ci/host-test.yml'
- '.gitlab/ci/target-test.yml'
- '.gitlab/ci/deploy.yml'
- '.gitlab/ci/test-win.yml'

Wyświetl plik

@ -47,14 +47,12 @@
* @esp-idf-codeowners/other
/.* @esp-idf-codeowners/tools
/.github/dangerjs/ @esp-idf-codeowners/ci @esp-idf-codeowners/tools
/.github/dependabot.yml @esp-idf-codeowners/ci
/.github/workflows/ @esp-idf-codeowners/ci
/.gitlab-ci.yml @esp-idf-codeowners/ci
/.gitlab/ci/ @esp-idf-codeowners/ci
/.gitlab/dangerjs/ @esp-idf-codeowners/ci @esp-idf-codeowners/tools
/.pre-commit-config.yaml @esp-idf-codeowners/ci
/.readthedocs.yml @esp-idf-codeowners/docs
/.vale.ini @esp-idf-codeowners/docs
/CMakeLists.txt @esp-idf-codeowners/build-config
/COMPATIBILITY*.md @esp-idf-codeowners/peripherals
/CONTRIBUTING.md @esp-idf-codeowners/docs
@ -86,6 +84,8 @@
/components/esp_bootloader_format/ @esp-idf-codeowners/system @esp-idf-codeowners/app-utilities
/components/esp_coex/ @esp-idf-codeowners/wifi @esp-idf-codeowners/bluetooth @esp-idf-codeowners/ieee802154
/components/esp_common/ @esp-idf-codeowners/system
/components/esp_driver_*/ @esp-idf-codeowners/peripherals
/components/esp_driver_sdmmc/ @esp-idf-codeowners/peripherals @esp-idf-codeowners/storage
/components/esp_eth/ @esp-idf-codeowners/network
/components/esp_event/ @esp-idf-codeowners/system
/components/esp_gdbstub/ @esp-idf-codeowners/debugging
@ -151,18 +151,19 @@
/components/wpa_supplicant/ @esp-idf-codeowners/wifi @esp-idf-codeowners/app-utilities/mbedtls
/components/xtensa/ @esp-idf-codeowners/system
/docs/ @esp-idf-codeowners/docs
/docs/en/api-guides/jtag-debugging/ @esp-idf-codeowners/debugging
/docs/**/api-reference/bluetooth/ @esp-idf-codeowners/bluetooth
/docs/**/api-reference/network/ @esp-idf-codeowners/network @esp-idf-codeowners/wifi
/docs/**/api-reference/peripherals/ @esp-idf-codeowners/peripherals
/docs/**/api-reference/peripherals/usb* @esp-idf-codeowners/peripherals @esp-idf-codeowners/peripherals/usb
/docs/**/api-reference/protocols/ @esp-idf-codeowners/network @esp-idf-codeowners/app-utilities
/docs/**/api-reference/provisioning/ @esp-idf-codeowners/app-utilities/provisioning
/docs/**/api-reference/storage/ @esp-idf-codeowners/storage
/docs/**/api-reference/system/ @esp-idf-codeowners/system
/docs/**/security/ @esp-idf-codeowners/security
/docs/**/migration-guides/ @esp-idf-codeowners/docs @esp-idf-codeowners/all-maintainers
/docs/ @esp-idf-codeowners/docs
/docs/**/api-guides/tools/ @esp-idf-codeowners/tools
/docs/en/api-guides/jtag-debugging/ @esp-idf-codeowners/debugging
/docs/**/api-reference/bluetooth/ @esp-idf-codeowners/bluetooth
/docs/**/api-reference/network/ @esp-idf-codeowners/network @esp-idf-codeowners/wifi
/docs/**/api-reference/peripherals/ @esp-idf-codeowners/peripherals
/docs/**/api-reference/peripherals/usb* @esp-idf-codeowners/peripherals @esp-idf-codeowners/peripherals/usb
/docs/**/api-reference/protocols/ @esp-idf-codeowners/network @esp-idf-codeowners/app-utilities
/docs/**/api-reference/provisioning/ @esp-idf-codeowners/app-utilities/provisioning
/docs/**/api-reference/storage/ @esp-idf-codeowners/storage
/docs/**/api-reference/system/ @esp-idf-codeowners/system
/docs/**/security/ @esp-idf-codeowners/security
/docs/**/migration-guides/ @esp-idf-codeowners/docs @esp-idf-codeowners/all-maintainers
/examples/README.md @esp-idf-codeowners/docs @esp-idf-codeowners/ci
/examples/**/*.py @esp-idf-codeowners/ci @esp-idf-codeowners/tools
@ -173,6 +174,7 @@
/examples/cxx/ @esp-idf-codeowners/system
/examples/ethernet/ @esp-idf-codeowners/network
/examples/get-started/ @esp-idf-codeowners/system
/examples/ieee802154/ @esp-idf-codeowners/ieee802154
/examples/mesh/ @esp-idf-codeowners/wifi
/examples/network/ @esp-idf-codeowners/network @esp-idf-codeowners/wifi
/examples/openthread/ @esp-idf-codeowners/ieee802154

Wyświetl plik

@ -22,6 +22,13 @@
- [Manifest File to Control the Build/Test apps](#manifest-file-to-control-the-buildtest-apps)
- [Grammar](#grammar)
- [Special Rules](#special-rules)
- [Upload/Download Artifacts to Internal Minio Server](#uploaddownload-artifacts-to-internal-minio-server)
- [Users Without Access to Minio](#users-without-access-to-minio)
- [Users With Access to Minio](#users-with-access-to-minio)
- [Env Vars for Minio](#env-vars-for-minio)
- [Artifacts Types and File Patterns](#artifacts-types-and-file-patterns)
- [Upload](#upload)
- [Download](#download)
## General Workflow
@ -52,19 +59,17 @@
- `example_test[_esp32/esp32s2/...]`
- `fuzzer_test`
- `host_test`
- `integration_test[_wifi/ble]`
- `integration_test`
- `iperf_stress_test`
- `macos`
- `macos_test`
- `nvs_coverage`
- `submodule`
- `unit_test[_esp32/esp32s2/...]`
- `weekend_test`
- `windows`
There are two general labels (not recommended since these two labels will trigger a lot of jobs)
- `target_test`: includes all target for `example_test`, `custom_test`, `component_ut`, `unit_test`, `integration_test`
- `target_test`: includes all target for `example_test`, `custom_test`, `component_ut`, `integration_test`
- `all_test`: includes all test labels
### How to trigger a `detached` pipeline without pushing new commits?
@ -140,10 +145,11 @@ check if there's a suitable `.if-<if-anchor-you-need>` anchor
1. if there is, create a rule following [`rules` Template Naming Rules](#rules-template-naming-rules).For detail information, please refer to [GitLab Documentation `rules-if`](https://docs.gitlab.com/ee/ci/yaml/README.html#rulesif). Here's an example.
```yaml
.rules:dev:
.rules:patterns:python-files:
rules:
- <<: *if-trigger
- <<: *if-protected
- <<: *if-dev-push
changes: *patterns-python-files
```
2. if there isn't
@ -192,7 +198,7 @@ if a name has multi phrases, use `-` to concatenate them.
- `target_test`
a combination of `example_test`, `custom_test`, `unit_test`, `component_ut`, `integration_test` and all targets
a combination of `example_test`, `custom_test`, `component_ut`, `integration_test` and all targets
#### `rules` Template Naming Rules
@ -245,3 +251,75 @@ In ESP-IDF CI, there's a few more special rules are additionally supported to di
- Add MR labels `BUILD_AND_TEST_ALL_APPS`
- Run in protected branches
## Upload/Download Artifacts to Internal Minio Server
### Users Without Access to Minio
If you don't have access to the internal Minio server, you can still download the artifacts from the shared link in the job log.
The log will look like this:
```shell
Pipeline ID : 587355
Job name : build_clang_test_apps_esp32
Job ID : 40272275
Created archive file: 40272275.zip, uploading as 587355/build_dir_without_map_and_elf_files/build_clang_test_apps_esp32/40272275.zip
Please download the archive file includes build_dir_without_map_and_elf_files from [INTERNAL_URL]
```
### Users With Access to Minio
#### Env Vars for Minio
Minio takes these env vars to connect to the server:
- `IDF_S3_SERVER`
- `IDF_S3_ACCESS_KEY`
- `IDF_S3_SECRET_KEY`
- `IDF_S3_BUCKET`
#### Artifacts Types and File Patterns
The artifacts types and corresponding file patterns are defined in tools/ci/artifacts_handler.py, inside `ArtifactType` and `TYPE_PATTERNS_DICT`.
#### Upload
```shell
python tools/ci/artifacts_handler.py upload
```
will upload the files that match the file patterns to minio object storage with name:
`<pipeline_id>/<artifact_type>/<job_name>/<job_id>.zip`
For example, job 39043328 will upload these four files:
- `575500/map_and_elf_files/build_pytest_examples_esp32/39043328.zip`
- `575500/build_dir_without_map_and_elf_files/build_pytest_examples_esp32/39043328.zip`
- `575500/logs/build_pytest_examples_esp32/39043328.zip`
- `575500/size_reports/build_pytest_examples_esp32/39043328.zip`
#### Download
You may run
```shell
python tools/ci/artifacts_handler.py download --pipeline_id <pipeline_id>
```
to download all files of the pipeline, or
```shell
python tools/ci/artifacts_handler.py download --pipeline_id <pipeline_id> --job_name <job_name_or_pattern>
```
to download all files with the specified job name or pattern, or
```shell
python tools/ci/artifacts_handler.py download --pipeline_id <pipeline_id> --job_name <job_name_or_pattern> --type <artifact_type> <artifact_type> ...
```
to download all files with the specified job name or pattern and artifact type(s).
You may check all detailed documentation with `python tools/ci/artifacts_handler.py download -h`

Wyświetl plik

@ -1,69 +0,0 @@
.assign_test_template:
image: $TARGET_TEST_ENV_IMAGE
stage: assign_test
tags:
- assign_test
variables:
SUBMODULES_TO_FETCH: "none"
artifacts:
paths:
- ${TEST_DIR}/test_configs/
- artifact_index.json
when: always
expire_in: 1 week
script:
- run_cmd python tools/ci/python_packages/ttfw_idf/IDFAssignTest.py $TEST_TYPE $TEST_DIR -c $CI_TARGET_TEST_CONFIG_FILE -o $TEST_DIR/test_configs
assign_unit_test:
extends:
- .assign_test_template
- .rules:build:unit_test
needs:
- job: build_esp_idf_tests_cmake_esp32
optional: true
- job: build_esp_idf_tests_cmake_esp32s2
optional: true
- job: build_esp_idf_tests_cmake_esp32c2
optional: true
- job: build_esp_idf_tests_cmake_esp32c3
optional: true
- job: build_esp_idf_tests_cmake_esp32c6
optional: true
- job: build_esp_idf_tests_cmake_esp32h2
optional: true
- job: build_esp_idf_tests_cmake_esp32s3
optional: true
variables:
TEST_TYPE: unit_test
TEST_DIR: components/idf_test/unit_test
assign_integration_test:
extends:
- .assign_test_template
- .rules:test:integration_test
- .before_script_minimal
image: ${CI_INTEGRATION_TEST_ENV_IMAGE}
needs:
- build_ssc_esp32
- build_ssc_esp32c3
- build_ssc_esp32c2
artifacts:
paths:
- $TEST_DIR/test_configs
expire_in: 1 week
variables:
TEST_DIR: ${CI_PROJECT_DIR}/tools/ci/integration_test
BUILD_DIR: ${CI_PROJECT_DIR}/SSC/ssc_bin
INTEGRATION_TEST_CASE_PATH: "${CI_PROJECT_DIR}/auto_test_script/TestCaseFiles"
ASSIGN_TEST_CASE_SCRIPT: "${CI_PROJECT_DIR}/auto_test_script/bin/CIAssignTestCases.py"
PYTHONPATH: ${CI_PROJECT_DIR}/auto_test_script/packages
GIT_LFS_SKIP_SMUDGE: 1
script:
- add_gitlab_ssh_keys
# clone test script to assign tests
- retry_failed git clone ${CI_AUTO_TEST_SCRIPT_REPO_URL} auto_test_script
- python $CHECKOUT_REF_SCRIPT auto_test_script auto_test_script
- cd auto_test_script
- ./tools/ci/setup_idfci.sh
# assign integration test cases
- python ${ASSIGN_TEST_CASE_SCRIPT} -t ${INTEGRATION_TEST_CASE_PATH} -c $CI_TARGET_TEST_CONFIG_FILE -b ${BUILD_DIR} -o $TEST_DIR/test_configs

Wyświetl plik

@ -1,5 +1,7 @@
.build_template:
stage: build
extends:
- .after_script:build:ccache:upload-when-fail
image: $ESP_ENV_IMAGE
tags:
- build
@ -8,65 +10,28 @@
variables:
# Enable ccache for all build jobs. See configure_ci_environment.sh for more ccache related settings.
IDF_CCACHE_ENABLE: "1"
after_script:
# Show ccache statistics if enabled globally
- test "$CI_CCACHE_STATS" == 1 && test -n "$(which ccache)" && ccache --show-stats || true
dependencies: []
.build_cmake_template:
extends:
- .build_template
- .before_script_build_jobs
- .before_script:build
- .after_script:build:ccache
dependencies: # set dependencies to null to avoid missing artifacts issue
needs:
- job: fast_template_app
artifacts: false
- job: mr_variables
optional: true # only MR pipelines would have this
- pipeline_variables
artifacts:
paths:
- "**/build*/size.json"
# The other artifacts patterns are defined under tools/ci/artifacts_handler.py
# Now we're uploading/downloading the binary files from our internal storage server
#
# keep the log file to help debug
- "**/build*/build_log.txt"
- "**/build*/*.bin"
# upload to s3 server to save the artifacts size
# - "**/build*/*.map"
# ttfw tests require elf files
- "**/build*/*.elf"
- "**/build*/flasher_args.json"
- "**/build*/flash_project_args"
- "**/build*/config/sdkconfig.json"
# ttfw tests require sdkconfig file
- "**/build*/sdkconfig"
- "**/build*/bootloader/*.bin"
- "**/build*/partition_table/*.bin"
- list_job_*.txt
# keep the size info to help track the binary size
- size_info.txt
# unit test specific
- components/idf_test/unit_test/*.yml
when: always
expire_in: 4 days
after_script:
# Show ccache statistics if enabled globally
- test "$CI_CCACHE_STATS" == 1 && test -n "$(which ccache)" && ccache --show-stats || true
# upload the binary files to s3 server
- echo -e "\e[0Ksection_start:`date +%s`:upload_binaries_to_s3_server[collapsed=true]\r\e[0KUploading binaries to s3 Server"
- shopt -s globstar
# use || true to bypass the no-file error
- zip ${CI_JOB_ID}.zip **/build*/*.bin || true
- zip ${CI_JOB_ID}.zip **/build*/*.elf || true
- zip ${CI_JOB_ID}.zip **/build*/*.map || true
- zip ${CI_JOB_ID}.zip **/build*/flasher_args.json || true
- zip ${CI_JOB_ID}.zip **/build*/flash_project_args || true
- zip ${CI_JOB_ID}.zip **/build*/config/sdkconfig.json || true
- zip ${CI_JOB_ID}.zip **/build*/sdkconfig || true
- zip ${CI_JOB_ID}.zip **/build*/bootloader/*.bin || true
- zip ${CI_JOB_ID}.zip **/build*/partition_table/*.bin || true
- mc alias set shiny-s3 ${SHINY_S3_SERVER} ${SHINY_S3_ACCESS_KEY} ${SHINY_S3_SECRET_KEY}
- mc cp ${CI_JOB_ID}.zip shiny-s3/idf-artifacts/${CI_PIPELINE_ID}/${CI_JOB_ID}.zip
- echo -e "\e[0Ksection_end:`date +%s`:upload_binaries_to_s3_server\r\e[0K"
- echo "Please download the full binary files (including *.elf and *.map files) from the following share link"
# would be clean up after 4 days
- mc share download shiny-s3/idf-artifacts/${CI_PIPELINE_ID}/${CI_JOB_ID}.zip --expire=96h
- "**/build*/size.json"
script:
# CI specific options start from "--parallel-count xxx". could ignore when running locally
- run_cmd python tools/ci/ci_build_apps.py $TEST_DIR -v
@ -80,6 +45,8 @@
examples/wifi/iperf
--modified-components ${MR_MODIFIED_COMPONENTS}
--modified-files ${MR_MODIFIED_FILES}
# for detailed documents, please refer to .gitlab/ci/README.md#uploaddownload-artifacts-to-internal-minio-server
- python tools/ci/artifacts_handler.py upload
.build_cmake_clang_template:
extends:
@ -98,333 +65,15 @@
--modified-components ${MR_MODIFIED_COMPONENTS}
--modified-files ${MR_MODIFIED_FILES}
$TEST_BUILD_OPTS_EXTRA
- python tools/ci/artifacts_handler.py upload
.build_pytest_template:
extends:
- .build_cmake_template
- .before_script_build_jobs
artifacts:
paths:
- "**/build*/size.json"
- "**/build*/build_log.txt"
- "**/build*/*.bin"
# upload to s3 server to save the artifacts size
# - "**/build*/*.map"
# - "**/build*/*.elf"
- "**/build*/flasher_args.json"
- "**/build*/flash_project_args"
- "**/build*/config/sdkconfig.json"
- "**/build*/bootloader/*.bin"
- "**/build*/partition_table/*.bin"
- list_job_*.txt
- size_info.txt
when: always
expire_in: 4 days
script:
# CI specific options start from "--parallel-count xxx". could ignore when running locally
- run_cmd python tools/ci/ci_build_apps.py $TEST_DIR -v
-t $IDF_TARGET
--pytest-apps
--parallel-count ${CI_NODE_TOTAL:-1}
--parallel-index ${CI_NODE_INDEX:-1}
--collect-app-info "list_job_${CI_JOB_NAME_SLUG}.txt"
--modified-components ${MR_MODIFIED_COMPONENTS}
--modified-files ${MR_MODIFIED_FILES}
.build_pytest_no_jtag_template:
extends: .build_pytest_template
script:
# CI specific options start from "--parallel-count xxx". could ignore when running locally
- run_cmd python tools/ci/ci_build_apps.py $TEST_DIR -v
-t $IDF_TARGET
-m \"not host_test and not jtag\"
--pytest-apps
--parallel-count ${CI_NODE_TOTAL:-1}
--parallel-index ${CI_NODE_INDEX:-1}
--collect-app-info "list_job_${CI_JOB_NAME_SLUG}.txt"
--modified-components ${MR_MODIFIED_COMPONENTS}
--modified-files ${MR_MODIFIED_FILES}
.build_pytest_jtag_template:
extends:
- .build_cmake_template
- .before_script_build_jobs
artifacts:
paths:
- "**/build*/size.json"
- "**/build*/build_log.txt"
- "**/build*/*.bin"
# upload to s3 server to save the artifacts size
# - "**/build*/*.map"
- "**/build*/*.elf" # need elf for gdb
- "**/build*/flasher_args.json"
- "**/build*/flash_project_args"
- "**/build*/config/sdkconfig.json"
- "**/build*/bootloader/*.bin"
- "**/build*/partition_table/*.bin"
- list_job_*.txt
- size_info.txt
when: always
expire_in: 4 days
script:
# CI specific options start from "--parallel-count xxx". could ignore when running locally
- run_cmd python tools/ci/ci_build_apps.py $TEST_DIR -v
-t $IDF_TARGET
-m \"not host_test and jtag\"
--pytest-apps
--parallel-count ${CI_NODE_TOTAL:-1}
--parallel-index ${CI_NODE_INDEX:-1}
--collect-app-info "list_job_${CI_JOB_NAME_SLUG}.txt"
--modified-components ${MR_MODIFIED_COMPONENTS}
--modified-files ${MR_MODIFIED_FILES}
build_pytest_examples_esp32:
extends:
- .build_pytest_no_jtag_template
- .rules:build:example_test-esp32
parallel: 6
variables:
IDF_TARGET: esp32
TEST_DIR: examples
build_pytest_examples_esp32s2:
extends:
- .build_pytest_no_jtag_template
- .rules:build:example_test-esp32s2
parallel: 3
variables:
IDF_TARGET: esp32s2
TEST_DIR: examples
build_pytest_examples_esp32s3:
extends:
- .build_pytest_no_jtag_template
- .rules:build:example_test-esp32s3
parallel: 4
variables:
IDF_TARGET: esp32s3
TEST_DIR: examples
build_pytest_examples_esp32c3:
extends:
- .build_pytest_no_jtag_template
- .rules:build:example_test-esp32c3
parallel: 4
variables:
IDF_TARGET: esp32c3
TEST_DIR: examples
build_pytest_examples_esp32c2:
extends:
- .build_pytest_no_jtag_template
- .rules:build:example_test-esp32c2
parallel: 2
variables:
IDF_TARGET: esp32c2
TEST_DIR: examples
build_pytest_examples_jtag: # for all targets
extends:
- .build_pytest_jtag_template
- .rules:build:example_test-esp32
variables:
IDF_TARGET: all
TEST_DIR: examples
build_pytest_examples_esp32c6:
extends:
- .build_pytest_template
- .rules:build:example_test-esp32c6
parallel: 2
variables:
IDF_TARGET: esp32c6
TEST_DIR: examples
build_pytest_examples_esp32h2:
extends:
- .build_pytest_template
- .rules:build:example_test-esp32h2
parallel: 2
variables:
IDF_TARGET: esp32h2
TEST_DIR: examples
build_pytest_components_esp32:
extends:
- .build_pytest_template
- .rules:build:component_ut-esp32
parallel: 5
variables:
IDF_TARGET: esp32
TEST_DIR: components
build_pytest_components_esp32s2:
extends:
- .build_pytest_template
- .rules:build:component_ut-esp32s2
parallel: 4
variables:
IDF_TARGET: esp32s2
TEST_DIR: components
build_pytest_components_esp32s3:
extends:
- .build_pytest_template
- .rules:build:component_ut-esp32s3
parallel: 4
variables:
IDF_TARGET: esp32s3
TEST_DIR: components
build_pytest_components_esp32c3:
extends:
- .build_pytest_template
- .rules:build:component_ut-esp32c3
parallel: 4
variables:
IDF_TARGET: esp32c3
TEST_DIR: components
build_pytest_components_esp32c2:
extends:
- .build_pytest_template
- .rules:build:component_ut-esp32c2
parallel: 3
variables:
IDF_TARGET: esp32c2
TEST_DIR: components
build_pytest_components_esp32c6:
extends:
- .build_pytest_template
- .rules:build:component_ut-esp32c6
parallel: 3
variables:
IDF_TARGET: esp32c6
TEST_DIR: components
build_pytest_components_esp32h2:
extends:
- .build_pytest_template
- .rules:build:component_ut-esp32h2
parallel: 4
variables:
IDF_TARGET: esp32h2
TEST_DIR: components
build_only_components_apps:
extends:
- .build_cmake_template
- .rules:build:component_ut
parallel: 5
script:
- set_component_ut_vars
# CI specific options start from "--parallel-count xxx". could ignore when running locally
- run_cmd python tools/ci/ci_build_apps.py $COMPONENT_UT_DIRS -v
-t all
--parallel-count ${CI_NODE_TOTAL:-1}
--parallel-index ${CI_NODE_INDEX:-1}
--modified-components ${MR_MODIFIED_COMPONENTS}
--modified-files ${MR_MODIFIED_FILES}
.build_pytest_test_apps_template:
extends: .build_pytest_template
artifacts:
paths:
- "**/build*/size.json"
- "**/build*/build_log.txt"
- "**/build*/*.bin"
# upload to s3 server to save the artifacts size
# - "**/build*/*.map"
# pytest test apps requires elf files for coredump tests
- "**/build*/*.elf"
- "**/build*/flasher_args.json"
- "**/build*/flash_project_args"
- "**/build*/config/sdkconfig.json"
- "**/build*/bootloader/*.elf"
- "**/build*/bootloader/*.bin"
- "**/build*/partition_table/*.bin"
- "**/build*/project_description.json"
- list_job_*.txt
- size_info.txt
when: always
expire_in: 4 days
build_pytest_test_apps_esp32:
extends:
- .build_pytest_test_apps_template
- .rules:build:custom_test-esp32
variables:
IDF_TARGET: esp32
TEST_DIR: tools/test_apps
build_pytest_test_apps_esp32s2:
extends:
- .build_pytest_test_apps_template
- .rules:build:custom_test-esp32s2
variables:
IDF_TARGET: esp32s2
TEST_DIR: tools/test_apps
build_pytest_test_apps_esp32s3:
extends:
- .build_pytest_test_apps_template
- .rules:build:custom_test-esp32s3
parallel: 2
variables:
IDF_TARGET: esp32s3
TEST_DIR: tools/test_apps
build_pytest_test_apps_esp32c3:
extends:
- .build_pytest_test_apps_template
- .rules:build:custom_test-esp32c3
variables:
IDF_TARGET: esp32c3
TEST_DIR: tools/test_apps
build_pytest_test_apps_esp32c2:
extends:
- .build_pytest_test_apps_template
- .rules:build:custom_test-esp32c2
variables:
IDF_TARGET: esp32c2
TEST_DIR: tools/test_apps
build_pytest_test_apps_esp32c6:
extends:
- .build_pytest_test_apps_template
- .rules:build:custom_test-esp32c6
variables:
IDF_TARGET: esp32c6
TEST_DIR: tools/test_apps
build_pytest_test_apps_esp32h2:
extends:
- .build_pytest_test_apps_template
- .rules:build:custom_test-esp32h2
variables:
IDF_TARGET: esp32h2
TEST_DIR: tools/test_apps
build_only_tools_test_apps:
extends:
- .build_cmake_template
- .rules:build:custom_test
parallel: 9
script:
# CI specific options start from "--parallel-count xxx". could ignore when running locally
- run_cmd python tools/ci/ci_build_apps.py tools/test_apps -v
-t all
--parallel-count ${CI_NODE_TOTAL:-1}
--parallel-index ${CI_NODE_INDEX:-1}
--modified-components ${MR_MODIFIED_COMPONENTS}
--modified-files ${MR_MODIFIED_FILES}
######################
# build_template_app #
######################
.build_template_app_template:
extends:
- .build_template
- .before_script_build_jobs
- .before_script:build
variables:
LOG_PATH: "${CI_PROJECT_DIR}/log_template_app"
BUILD_PATH: "${CI_PROJECT_DIR}/build_template_app"
@ -432,12 +81,10 @@ build_only_tools_test_apps:
BUILD_LOG_CMAKE: "${LOG_PATH}/cmake_@t_@w.txt"
BUILD_COMMAND_ARGS: ""
artifacts:
when: always
paths:
- log_template_app/*
- size_info.txt
- build_template_app/**/size.json
expire_in: 1 week
script:
# Set the variable for 'esp-idf-template' testing
- ESP_IDF_TEMPLATE_GIT=${ESP_IDF_TEMPLATE_GIT:-"https://github.com/espressif/esp-idf-template.git"}
@ -456,254 +103,32 @@ fast_template_app:
- .build_template_app_template
- .rules:build:target_test
stage: pre_check
tags: [fast_run, shiny]
variables:
BUILD_COMMAND_ARGS: "-p"
#------------------------------------------------------------------------------
.build_ssc_template:
extends:
- .build_template
- .rules:build:integration_test
needs:
- job: fast_template_app
artifacts: false
artifacts:
paths:
- SSC/build/log/*
- SSC/build/**/*.log
- SSC/build/*log.txt
- SSC/ssc_bin
when: always
expire_in: 1 week
script:
- retry_failed git clone $SSC_REPOSITORY
- python $CHECKOUT_REF_SCRIPT SSC SSC
- cd SSC
- MAKEFLAGS= ./ci_build_ssc.sh $TARGET_NAME
build_ssc_esp32:
extends: .build_ssc_template
parallel: 3
variables:
TARGET_NAME: "ESP32"
build_ssc_esp32s2:
extends: .build_ssc_template
parallel: 2
variables:
TARGET_NAME: "ESP32S2"
build_ssc_esp32c2:
extends: .build_ssc_template
parallel: 2
variables:
TARGET_NAME: "ESP32C2"
build_ssc_esp32c3:
extends: .build_ssc_template
parallel: 3
variables:
TARGET_NAME: "ESP32C3"
build_ssc_esp32s3:
extends: .build_ssc_template
parallel: 3
variables:
TARGET_NAME: "ESP32S3"
build_ssc_esp32c6:
extends: .build_ssc_template
parallel: 3
variables:
TARGET_NAME: "ESP32C6"
build_ssc_esp32h2:
extends: .build_ssc_template
parallel: 2
variables:
TARGET_NAME: "ESP32H2"
.build_esp_idf_tests_cmake_template:
extends:
- .build_cmake_template
- .before_script_build_jobs
artifacts:
paths:
- "**/build*/size.json"
- "**/build*/build_log.txt"
- "**/build*/*.bin"
# upload to s3 server to save the artifacts size
# - "**/build*/*.map"
# ttfw tests require elf files
- "**/build*/*.elf"
- "**/build*/flasher_args.json"
- "**/build*/flash_project_args"
- "**/build*/config/sdkconfig.json"
- "**/build*/sdkconfig"
- "**/build*/bootloader/*.bin"
- "**/build*/partition_table/*.bin"
- list_job_*.txt
- size_info.txt
- components/idf_test/unit_test/*.yml
when: always
expire_in: 4 days
script:
# CI specific options start from "--parallel-count xxx". could ignore when running locally
- run_cmd python tools/ci/ci_build_apps.py tools/unit-test-app -v
-t $IDF_TARGET
--config "configs/*="
--copy-sdkconfig
--preserve-all
--parallel-count ${CI_NODE_TOTAL:-1}
--parallel-index ${CI_NODE_INDEX:-1}
- run_cmd python tools/unit-test-app/tools/UnitTestParser.py tools/unit-test-app ${CI_NODE_INDEX:-1}
build_esp_idf_tests_cmake_esp32:
extends:
- .build_esp_idf_tests_cmake_template
- .rules:build:unit_test-esp32
variables:
IDF_TARGET: esp32
build_esp_idf_tests_cmake_esp32s2:
extends:
- .build_esp_idf_tests_cmake_template
- .rules:build:unit_test-esp32s2
variables:
IDF_TARGET: esp32s2
build_esp_idf_tests_cmake_esp32s3:
extends:
- .build_esp_idf_tests_cmake_template
- .rules:build:unit_test-esp32s3
variables:
IDF_TARGET: esp32s3
build_esp_idf_tests_cmake_esp32c2:
extends:
- .build_esp_idf_tests_cmake_template
- .rules:build:unit_test-esp32c2
variables:
IDF_TARGET: esp32c2
build_esp_idf_tests_cmake_esp32c3:
extends:
- .build_esp_idf_tests_cmake_template
- .rules:build:unit_test-esp32c3
variables:
IDF_TARGET: esp32c3
build_esp_idf_tests_cmake_esp32c6:
extends:
- .build_esp_idf_tests_cmake_template
- .rules:build:unit_test-esp32c6
variables:
IDF_TARGET: esp32c6
build_esp_idf_tests_cmake_esp32h2:
extends:
- .build_esp_idf_tests_cmake_template
- .rules:build:unit_test-esp32h2
variables:
IDF_TARGET: esp32h2
build_esp_idf_tests_cmake_esp32p4:
extends:
- .build_esp_idf_tests_cmake_template
- .rules:build:unit_test-esp32p4
variables:
IDF_TARGET: esp32p4
build_examples_cmake_esp32:
extends:
- .build_cmake_template
- .rules:build:example_test-esp32
parallel: 8
variables:
IDF_TARGET: esp32
TEST_DIR: examples
build_examples_cmake_esp32s2:
extends:
- .build_cmake_template
- .rules:build:example_test-esp32s2
parallel: 7
variables:
IDF_TARGET: esp32s2
TEST_DIR: examples
build_examples_cmake_esp32s3:
extends:
- .build_cmake_template
- .rules:build:example_test-esp32s3
parallel: 7
variables:
IDF_TARGET: esp32s3
TEST_DIR: examples
build_examples_cmake_esp32c2:
extends:
- .build_cmake_template
- .rules:build:example_test-esp32c2
parallel: 6
variables:
IDF_TARGET: esp32c2
TEST_DIR: examples
build_examples_cmake_esp32c3:
extends:
- .build_cmake_template
- .rules:build:example_test-esp32c3
parallel: 6
variables:
IDF_TARGET: esp32c3
TEST_DIR: examples
build_examples_cmake_esp32c6:
extends:
- .build_cmake_template
- .rules:build:example_test-esp32c6
parallel: 6
variables:
IDF_TARGET: esp32c6
TEST_DIR: examples
build_examples_cmake_esp32h2:
extends:
- .build_cmake_template
- .rules:build:example_test-esp32h2
parallel: 4
variables:
IDF_TARGET: esp32h2
TEST_DIR: examples
build_examples_cmake_esp32p4:
extends:
- .build_cmake_template
- .rules:build:example_test-esp32p4
parallel: 2
variables:
IDF_TARGET: esp32p4
TEST_DIR: examples
########################################
# Clang Build Apps Without Tests Cases #
########################################
build_clang_test_apps_esp32:
extends:
- .build_cmake_clang_template
- .rules:build:custom_test-esp32
- .rules:build
variables:
IDF_TARGET: esp32
build_clang_test_apps_esp32s2:
extends:
- .build_cmake_clang_template
- .rules:build:custom_test-esp32s2
- .rules:build
variables:
IDF_TARGET: esp32s2
build_clang_test_apps_esp32s3:
extends:
- .build_cmake_clang_template
- .rules:build:custom_test-esp32s3
- .rules:build
variables:
IDF_TARGET: esp32s3
@ -720,31 +145,35 @@ build_clang_test_apps_esp32s3:
build_clang_test_apps_esp32c3:
extends:
- .build_clang_test_apps_riscv
- .rules:build:custom_test-esp32c3
- .rules:build
variables:
IDF_TARGET: esp32c3
build_clang_test_apps_esp32c2:
extends:
- .build_clang_test_apps_riscv
- .rules:build:custom_test-esp32c2
- .rules:build
variables:
IDF_TARGET: esp32c2
build_clang_test_apps_esp32c6:
extends:
- .build_clang_test_apps_riscv
- .rules:build:custom_test-esp32c6
- .rules:build
# TODO: c6 builds fail in master due to missing headers
allow_failure: true
variables:
IDF_TARGET: esp32c6
######################
# Build System Tests #
######################
.test_build_system_template:
stage: host_test
extends:
- .build_template
- .rules:build:check
dependencies: # set dependencies to null to avoid missing artifacts issue
needs:
- job: fast_template_app
artifacts: false
@ -752,8 +181,10 @@ build_clang_test_apps_esp32c6:
script:
- ${IDF_PATH}/tools/ci/test_configure_ci_environment.sh
- cd ${IDF_PATH}/tools/test_build_system
- python ${IDF_PATH}/tools/ci/get_known_failure_cases_file.py
- pytest --parallel-count ${CI_NODE_TOTAL:-1} --parallel-index ${CI_NODE_INDEX:-1}
--work-dir ${CI_PROJECT_DIR}/test_build_system --junitxml=${CI_PROJECT_DIR}/XUNIT_RESULT.xml
--ignore-result-files ${KNOWN_FAILURE_CASES_FILE_NAME}
pytest_build_system:
extends: .test_build_system_template
@ -762,7 +193,6 @@ pytest_build_system:
paths:
- XUNIT_RESULT.xml
- test_build_system
when: always
expire_in: 2 days
reports:
junit: XUNIT_RESULT.xml
@ -770,7 +200,7 @@ pytest_build_system:
pytest_build_system_macos:
extends:
- .test_build_system_template
- .before_script_macos
- .before_script:build:macos
- .rules:build:macos
tags:
- macos_shell
@ -779,14 +209,13 @@ pytest_build_system_macos:
paths:
- XUNIT_RESULT.xml
- test_build_system
when: always
expire_in: 2 days
reports:
junit: XUNIT_RESULT.xml
build_docker:
extends:
- .before_script_minimal
- .before_script:minimal
- .rules:build:docker
stage: host_test
needs: []
@ -814,6 +243,51 @@ build_template_app:
- .build_template_app_template
- .rules:build
stage: host_test
dependencies: # set dependencies to null to avoid missing artifacts issue
needs:
- job: fast_template_app
artifacts: false
####################
# Dynamic Pipeline #
####################
generate_build_child_pipeline:
extends:
- .build_template
tags: [fast_run, shiny]
dependencies: # set dependencies to null to avoid missing artifacts issue
needs:
- pipeline_variables
- check_test_cases_env_markers_and_required_runners
artifacts:
paths:
- build_child_pipeline.yml
- test_related_apps.txt
- non_test_related_apps.txt
script:
- run_cmd python tools/ci/dynamic_pipelines/scripts/generate_build_child_pipeline.py
--modified-components ${MR_MODIFIED_COMPONENTS}
--modified-files ${MR_MODIFIED_FILES}
build_child_pipeline:
stage: build
needs:
- job: fast_template_app
optional: true
artifacts: false
- pipeline_variables
- generate_build_child_pipeline
variables:
IS_MR_PIPELINE: $IS_MR_PIPELINE
MR_MODIFIED_COMPONENTS: $MR_MODIFIED_COMPONENTS
MR_MODIFIED_FILES: $MR_MODIFIED_FILES
PARENT_PIPELINE_ID: $CI_PIPELINE_ID
BUILD_AND_TEST_ALL_APPS: $BUILD_AND_TEST_ALL_APPS
# https://gitlab.com/gitlab-org/gitlab/-/issues/214340
inherit:
variables: false
trigger:
include:
- artifact: build_child_pipeline.yml
job: generate_build_child_pipeline
strategy: depend

Wyświetl plik

@ -0,0 +1,365 @@
#####################
# Default Variables #
#####################
stages:
- upload_cache
- pre_check
- build
- assign_test
- target_test
- host_test
- build_doc
- test_deploy
- deploy
- post_deploy
variables:
# System environment
# Common parameters for the 'make' during CI tests
MAKEFLAGS: "-j5 --no-keep-going"
# GitLab-CI environment
# now we have pack-objects cache, so clone strategy is faster than fetch
GIT_STRATEGY: clone
# we will download archive for each submodule instead of clone.
# we don't do "recursive" when fetch submodule as they're not used in CI now.
GIT_SUBMODULE_STRATEGY: none
# since we're using merged-result pipelines, the last commit should work for most cases
GIT_DEPTH: 1
# --no-recurse-submodules: we use cache for submodules
# --prune --prune-tags: in case remote branch or tag is force pushed
GIT_FETCH_EXTRA_FLAGS: "--no-recurse-submodules --prune --prune-tags"
# we're using .cache folder for caches
GIT_CLEAN_FLAGS: -ffdx -e .cache/
LATEST_GIT_TAG: v5.3-dev
SUBMODULE_FETCH_TOOL: "tools/ci/ci_fetch_submodule.py"
# by default we will fetch all submodules
# jobs can overwrite this variable to only fetch submodules they required
# set to "none" if don't need to fetch submodules
SUBMODULES_TO_FETCH: "all"
# tell build system do not check submodule update as we download archive instead of clone
IDF_SKIP_CHECK_SUBMODULES: 1
IDF_PATH: "$CI_PROJECT_DIR"
V: "0"
CHECKOUT_REF_SCRIPT: "$CI_PROJECT_DIR/tools/ci/checkout_project_ref.py"
PYTHON_VER: 3.8.17
# Docker images
ESP_ENV_IMAGE: "${CI_DOCKER_REGISTRY}/esp-env-v5.3:1"
ESP_IDF_DOC_ENV_IMAGE: "${CI_DOCKER_REGISTRY}/esp-idf-doc-env-v5.3:1-1"
TARGET_TEST_ENV_IMAGE: "${CI_DOCKER_REGISTRY}/target-test-env-v5.3:1"
SONARQUBE_SCANNER_IMAGE: "${CI_DOCKER_REGISTRY}/sonarqube-scanner:5"
PRE_COMMIT_IMAGE: "${CI_DOCKER_REGISTRY}/esp-idf-pre-commit:1"
# target test repo parameters
TEST_ENV_CONFIG_REPO: "https://gitlab-ci-token:${BOT_TOKEN}@${CI_SERVER_HOST}:${CI_SERVER_PORT}/qa/ci-test-runner-configs.git"
# cache python dependencies
PIP_CACHE_DIR: "$CI_PROJECT_DIR/.cache/pip"
# Set this variable to the branch of idf-constraints repo in order to test a custom Python constraint file. The
# branch name must be without the remote part ("origin/"). Keep the variable empty in order to use the constraint
# file from https://dl.espressif.com/dl/esp-idf.
CI_PYTHON_CONSTRAINT_BRANCH: ""
# Update the filename for a specific ESP-IDF release. It is used only with CI_PYTHON_CONSTRAINT_BRANCH.
CI_PYTHON_CONSTRAINT_FILE: "espidf.constraints.v5.3.txt"
# Set this variable to repository name of a Python tool you wish to install and test in the context of ESP-IDF CI.
# Keep the variable empty when not used.
CI_PYTHON_TOOL_REPO: ""
# Set this variable to the branch of a Python tool repo specified in CI_PYTHON_TOOL_REPO. The
# branch name must be without the remote part ("origin/"). Keep the variable empty when not used.
# This is used only if CI_PYTHON_TOOL_REPO is not empty.
CI_PYTHON_TOOL_BRANCH: ""
# Set this variable to specify the file name for the known failure cases.
KNOWN_FAILURE_CASES_FILE_NAME: "master.txt"
IDF_CI_BUILD: 1
################################################
# `before_script` and `after_script` Templates #
################################################
.common_before_scripts: &common-before_scripts |
source tools/ci/utils.sh
is_based_on_commits $REQUIRED_ANCESTOR_COMMITS
if [[ -n "$IDF_DONT_USE_MIRRORS" ]]; then
export IDF_MIRROR_PREFIX_MAP=
fi
if echo "$CI_MERGE_REQUEST_LABELS" | egrep "(^|,)include_nightly_run(,|$)"; then
export INCLUDE_NIGHTLY_RUN="1"
export NIGHTLY_RUN="1"
fi
# configure cmake related flags
source tools/ci/configure_ci_environment.sh
# add extra python packages
export PYTHONPATH="$IDF_PATH/tools:$IDF_PATH/tools/esp_app_trace:$IDF_PATH/components/partition_table:$IDF_PATH/tools/ci/python_packages:$PYTHONPATH"
.setup_tools_and_idf_python_venv: &setup_tools_and_idf_python_venv |
# must use after setup_tools_except_target_test
# otherwise the export.sh won't work properly
# download constraint file for dev
if [[ -n "$CI_PYTHON_CONSTRAINT_BRANCH" ]]; then
wget -O /tmp/constraint.txt --header="Authorization:Bearer ${ESPCI_TOKEN}" ${GITLAB_HTTP_SERVER}/api/v4/projects/2581/repository/files/${CI_PYTHON_CONSTRAINT_FILE}/raw?ref=${CI_PYTHON_CONSTRAINT_BRANCH}
mkdir -p ~/.espressif
mv /tmp/constraint.txt ~/.espressif/${CI_PYTHON_CONSTRAINT_FILE}
fi
# Mirror
if [[ -n "$IDF_DONT_USE_MIRRORS" ]]; then
export IDF_MIRROR_PREFIX_MAP=
fi
# install latest python packages
# target test jobs
if [[ "${CI_JOB_STAGE}" == "target_test" ]]; then
run_cmd bash install.sh --enable-ci --enable-pytest
elif [[ "${CI_JOB_STAGE}" == "build_doc" ]]; then
run_cmd bash install.sh --enable-ci --enable-docs
elif [[ "${CI_JOB_STAGE}" == "build" ]]; then
run_cmd bash install.sh --enable-ci --enable-pytest
else
if ! echo "${CI_JOB_NAME}" | egrep ".*pytest.*"; then
run_cmd bash install.sh --enable-ci
else
run_cmd bash install.sh --enable-ci --enable-pytest
fi
fi
# Install esp-clang if necessary
if [[ "$IDF_TOOLCHAIN" == "clang" ]]; then
$IDF_PATH/tools/idf_tools.py --non-interactive install esp-clang
fi
# Install QEMU if necessary
if [[ ! -z "$INSTALL_QEMU" ]]; then
$IDF_PATH/tools/idf_tools.py --non-interactive install qemu-xtensa qemu-riscv32
fi
# Since the version 3.21 CMake passes source files and include dirs to ninja using absolute paths.
# Needed for pytest junit reports.
$IDF_PATH/tools/idf_tools.py --non-interactive install cmake
source ./export.sh
# Custom clang
if [[ ! -z "$CI_CLANG_DISTRO_URL" ]]; then
echo "Using custom clang from ${CI_CLANG_DISTRO_URL}"
wget $CI_CLANG_DISTRO_URL
ARCH_NAME=$(basename $CI_CLANG_DISTRO_URL)
tar -x -f $ARCH_NAME
export PATH=$PWD/esp-clang/bin:$PATH
fi
# Custom OpenOCD
if [[ ! -z "$OOCD_DISTRO_URL" && "$CI_JOB_STAGE" == "target_test" ]]; then
echo "Using custom OpenOCD from ${OOCD_DISTRO_URL}"
wget $OOCD_DISTRO_URL
ARCH_NAME=$(basename $OOCD_DISTRO_URL)
tar -x -f $ARCH_NAME
export OPENOCD_SCRIPTS=$PWD/openocd-esp32/share/openocd/scripts
export PATH=$PWD/openocd-esp32/bin:$PATH
fi
if [[ -n "$CI_PYTHON_TOOL_REPO" ]]; then
git clone --quiet --depth=1 -b ${CI_PYTHON_TOOL_BRANCH} https://gitlab-ci-token:${ESPCI_TOKEN}@${GITLAB_HTTPS_HOST}/espressif/${CI_PYTHON_TOOL_REPO}.git
pip install ./${CI_PYTHON_TOOL_REPO}
rm -rf ${CI_PYTHON_TOOL_REPO}
fi
.show_ccache_statistics: &show_ccache_statistics |
# Show ccache statistics if enabled globally
test "$CI_CCACHE_STATS" == 1 && test -n "$(which ccache)" && ccache --show-stats || true
.upload_failed_job_log_artifacts: &upload_failed_job_log_artifacts |
if [ $CI_JOB_STATUS = "failed" ]; then
python tools/ci/artifacts_handler.py upload --type logs
fi
.before_script:minimal:
before_script:
- *common-before_scripts
.before_script:build:macos:
before_script:
- *common-before_scripts
# On macOS, these tools need to be installed
- export IDF_TOOLS_PATH="${HOME}/.espressif_runner_${CI_RUNNER_ID}_${CI_CONCURRENT_ID}"
- $IDF_PATH/tools/idf_tools.py --non-interactive install cmake ninja
# This adds tools (compilers) and the version-specific Python environment to PATH
- *setup_tools_and_idf_python_venv
- fetch_submodules
.before_script:build:
before_script:
- *common-before_scripts
- *setup_tools_and_idf_python_venv
- add_gitlab_ssh_keys
- fetch_submodules
- export EXTRA_CFLAGS=${PEDANTIC_CFLAGS}
- export EXTRA_CXXFLAGS=${PEDANTIC_CXXFLAGS}
.after_script:build:ccache:
after_script:
- *show_ccache_statistics
.after_script:build:ccache:upload-when-fail:
after_script:
- *show_ccache_statistics
- *upload_failed_job_log_artifacts
##############################
# Git Strategy Job Templates #
##############################
.git_init: &git_init |
mkdir -p "${CI_PROJECT_DIR}"
cd "${CI_PROJECT_DIR}"
git init
.git_fetch_from_mirror_url_if_exists: &git_fetch_from_mirror_url_if_exists |
# check if set mirror
if [ -n "${LOCAL_GITLAB_HTTPS_HOST:-}" ] && [ -n "${ESPCI_TOKEN:-}" ]; then
MIRROR_REPO_URL="https://bot:${ESPCI_TOKEN}@${LOCAL_GITLAB_HTTPS_HOST}/${CI_PROJECT_PATH}"
elif [ -n "${LOCAL_GIT_MIRROR:-}" ]; then
MIRROR_REPO_URL="${LOCAL_GIT_MIRROR}/${CI_PROJECT_PATH}"
fi
# fetch from mirror first if set
if [ -n "${MIRROR_REPO_URL:-}" ]; then
if git remote -v | grep origin; then
git remote set-url origin "${MIRROR_REPO_URL}"
else
git remote add origin "${MIRROR_REPO_URL}"
fi
# mirror url may fail with authentication issue
git fetch origin --no-recurse-submodules || true
fi
# set remote url to CI_REPOSITORY_URL
if git remote -v | grep origin; then
git remote set-url origin "${CI_REPOSITORY_URL}"
else
git remote add origin "${CI_REPOSITORY_URL}"
fi
.git_checkout_fetch_head: &git_checkout_fetch_head |
git checkout FETCH_HEAD
git clean ${GIT_CLEAN_FLAGS}
# git diff requires two commits, with different CI env var
#
# By default, we use git strategy "clone" with depth 1 to speed up the clone process.
# But for jobs requires running `git diff`, we need to fetch more commits to get the correct diffs.
#
# Since there's no way to get the correct git_depth before the job starts,
# we can't set `GIT_DEPTH` in the job definition.
#
# Set git strategy to "none" and fetch manually instead.
.before_script:fetch:git_diff:
variables:
GIT_STRATEGY: none
before_script:
- *git_init
- *git_fetch_from_mirror_url_if_exists
- |
# merged results pipelines, by default
if [[ -n $CI_MERGE_REQUEST_SOURCE_BRANCH_SHA ]]; then
git fetch origin $CI_MERGE_REQUEST_DIFF_BASE_SHA --depth=1 ${GIT_FETCH_EXTRA_FLAGS}
git fetch origin $CI_MERGE_REQUEST_SOURCE_BRANCH_SHA --depth=1 ${GIT_FETCH_EXTRA_FLAGS}
export GIT_DIFF_OUTPUT=$(git diff --name-only $CI_MERGE_REQUEST_DIFF_BASE_SHA $CI_MERGE_REQUEST_SOURCE_BRANCH_SHA)
# merge request pipelines, when the mr got conflicts
elif [[ -n $CI_MERGE_REQUEST_DIFF_BASE_SHA ]]; then
git fetch origin $CI_MERGE_REQUEST_DIFF_BASE_SHA --depth=1 ${GIT_FETCH_EXTRA_FLAGS}
git fetch origin $CI_COMMIT_SHA --depth=1 ${GIT_FETCH_EXTRA_FLAGS}
export GIT_DIFF_OUTPUT=$(git diff --name-only $CI_MERGE_REQUEST_DIFF_BASE_SHA $CI_COMMIT_SHA)
# other pipelines, like the protected branches pipelines
elif [[ "$CI_COMMIT_BEFORE_SHA" != "0000000000000000000000000000000000000000" ]]; then
git fetch origin $CI_COMMIT_BEFORE_SHA --depth=1 ${GIT_FETCH_EXTRA_FLAGS}
git fetch origin $CI_COMMIT_SHA --depth=1 ${GIT_FETCH_EXTRA_FLAGS}
export GIT_DIFF_OUTPUT=$(git diff --name-only $CI_COMMIT_BEFORE_SHA $CI_COMMIT_SHA)
else
# pipeline source could be web, scheduler, etc.
git fetch origin $CI_COMMIT_SHA --depth=2 ${GIT_FETCH_EXTRA_FLAGS}
export GIT_DIFF_OUTPUT=$(git diff --name-only $CI_COMMIT_SHA~1 $CI_COMMIT_SHA)
fi
- *git_checkout_fetch_head
- *common-before_scripts
- *setup_tools_and_idf_python_venv
- add_gitlab_ssh_keys
# git describe requires commit history until the latest tag
.before_script:fetch:git_describe:
variables:
GIT_STRAEGY: none
before_script:
- *git_init
- *git_fetch_from_mirror_url_if_exists
- git fetch origin refs/tags/"${LATEST_GIT_TAG}":refs/tags/"${LATEST_GIT_TAG}" --depth=1
- git repack -d
- git fetch origin $PIPELINE_COMMIT_SHA --shallow-since=$(git log -1 --format=%as "${LATEST_GIT_TAG}")
- *git_checkout_fetch_head
- *common-before_scripts
- *setup_tools_and_idf_python_venv
- add_gitlab_ssh_keys
# target test runners may locate in different places
# for runners set git mirror, we fetch from the mirror first, then fetch the HEAD commit
.before_script:fetch:target_test:
variables:
GIT_STRATEGY: none
before_script:
- *git_init
- *git_fetch_from_mirror_url_if_exists
- git fetch origin "${CI_COMMIT_SHA}" --depth=1 ${GIT_FETCH_EXTRA_FLAGS}
- *git_checkout_fetch_head
- *common-before_scripts
- *setup_tools_and_idf_python_venv
- add_gitlab_ssh_keys
# no submodules
#############
# `default` #
#############
default:
cache:
# pull only for most of the use cases since it's cache dir.
# Only set "push" policy for "upload_cache" stage jobs
- key: pip-cache-${LATEST_GIT_TAG}
fallback_keys:
- pip-cache
paths:
- .cache/pip
policy: pull
- key: submodule-cache-${LATEST_GIT_TAG}
fallback_keys:
- submodule-cache
paths:
- .cache/submodule_archives
policy: pull
before_script:
- *common-before_scripts
- *setup_tools_and_idf_python_venv
- add_gitlab_ssh_keys
- fetch_submodules
artifacts:
expire_in: 1 week
when: always
retry:
max: 2
when:
# In case of a runner failure we could hop to another one, or a network error could go away.
- runner_system_failure
# Job execution timeout may be caused by a network issue.
- job_execution_timeout

Wyświetl plik

@ -0,0 +1,18 @@
# Extenal DangerJS
include:
- project: espressif/shared-ci-dangerjs
ref: master
file: danger.yaml
run-danger-mr-linter:
stage: pre_check
variables:
GIT_STRATEGY: none # no repo checkout
ENABLE_CHECK_AREA_LABELS: 'true'
ENABLE_CHECK_DOCS_TRANSLATION: 'true'
ENABLE_CHECK_RELEASE_NOTES_DESCRIPTION: 'true'
ENABLE_CHECK_UPDATED_CHANGELOG: 'false'
before_script: []
cache: []
tags:
- dangerjs

Wyświetl plik

@ -10,7 +10,8 @@ extra_default_build_targets:
- esp32p4
bypass_check_test_targets:
- esp32p4
- esp32c5
# - esp32p4
#
# These lines would
# - enable the README.md check for esp32c6. Don't forget to add the build jobs in .gitlab/ci/build.yml

Wyświetl plik

@ -5,13 +5,13 @@
- esp32c3
- esp32c2
- esp32c6
- esp32c5
- esp32h2
- esp32p4
.target_test: &target_test
- example_test
- custom_test
- unit_test
- component_ut
##############
@ -66,147 +66,6 @@
included_in:
- build:check
# ---------------
# Build Test Jobs
# ---------------
"build:{0}-{1}":
matrix:
- *target_test
- *all_targets
labels:
- build
patterns:
- build_components
- build_system
- build_target_test
- downloadable-tools
included_in:
- "build:{0}"
- build:target_test
build:integration_test:
labels:
- build
patterns:
- build_components
- build_system
included_in:
- build:target_test
####################
# Target Test Jobs #
####################
"test:{0}-{1}":
matrix:
- *target_test
- *all_targets
labels: # For each rule, use labels <test_type> and <test_type>-<target>
- "{0}"
- "{0}_{1}"
- target_test
patterns: # For each rule, use patterns <test_type> and build-<test_type>
- "{0}"
- "build-{0}"
included_in: # Parent rules
- "build:{0}"
- "build:{0}-{1}"
- build:target_test
# -------------
# Special Cases
# -------------
"test:component_ut-{0}": # component_ut will trigger by unit_test as well, since now we have 2 kinds of UT
matrix:
- *all_targets
labels:
- component_ut
- "component_ut_{0}"
- unit_test
- "unit_test_{0}"
- target_test
patterns:
- component_ut
- "build-component_ut-{0}"
included_in:
- build:component_ut
- "build:component_ut-{0}"
- build:target_test
# To reduce the specific runners' usage.
# Do not create these jobs by default patterns on development branches
# Can be triggered by labels or related changes
"test:{0}-{1}-{2}":
matrix:
- *target_test
- *all_targets
- - wifi # pytest*wifi*
- ethernet # pytest*ethernet*
- sdio # pytest*sdio*
- usb # USB Device & Host tests
- adc # pytest*adc*
- i154
- flash_multi
- ecdsa
- nvs_encr_hmac
patterns:
- "{0}-{1}-{2}"
- "{0}-{2}"
- "target_test-{2}"
labels:
- "{0}_{1}"
- "{0}"
- target_test
included_in:
- "build:{0}-{1}"
- "build:{0}"
- build:target_test
# For example_test*flash_encryption_wifi_high_traffic jobs
# set `INCLUDE_NIGHTLY_RUN` variable when triggered on development branches
"test:example_test-{0}-include_nightly_run-rule":
matrix:
- - esp32
- esp32c3
specific_rules:
- "if-example_test-ota-include_nightly_run-rule"
included_in:
- "build:example_test-{0}"
- "build:example_test"
- build:target_test
# For i154 runners
"test:example_test-i154":
patterns:
- "example_test-i154"
- "target_test-i154"
labels:
- target_test
- example_test
included_in:
- "build:example_test-esp32s3"
- "build:example_test-esp32c6"
- "build:example_test-esp32h2"
- "build:example_test"
- build:target_test
"test:integration_test_{0}":
matrix:
- - wifi
- ble
labels:
- integration_test_{0}
- integration_test
- target_test
patterns:
- integration_test-{0}
- target_test-{0}
# - maybe others
included_in:
- test:integration_test
- build:integration_test
- build:target_test
"test:host_test":
labels:
- host_test
@ -226,12 +85,8 @@ build:integration_test:
labels:
- nvs_coverage
"labels-protected:lan8720": # UT # FIXME: IDFCI-1176 temporary run this on master/release or with label
"labels:windows_pytest_build_system":
labels:
- lan8720
included_in:
- build:unit_test
- build:unit_test-esp32
- build:target_test
- build:component_ut
- build:component_ut-esp32
- windows
specific_rules:
- if-schedule-test-build-system-windows

Wyświetl plik

@ -1,8 +1,7 @@
.deploy_job_template:
stage: deploy
image: $ESP_ENV_IMAGE
tags:
- deploy
tags: [ deploy ]
# Check this before push_to_github
check_submodule_sync:
@ -10,11 +9,11 @@ check_submodule_sync:
- .deploy_job_template
- .rules:test:submodule
stage: test_deploy
tags:
- github_sync
tags: [ brew, github_sync ]
retry: 2
variables:
GIT_STRATEGY: clone
# for brew runners, we always set GIT_STRATEGY to fetch
GIT_STRATEGY: fetch
SUBMODULES_TO_FETCH: "none"
PUBLIC_IDF_URL: "https://github.com/espressif/esp-idf.git"
dependencies: []
@ -31,9 +30,16 @@ check_submodule_sync:
push_to_github:
extends:
- .deploy_job_template
- .before_script_minimal
- .rules:protected-no_label
dependencies: []
- .before_script:minimal
- .rules:push_to_github
needs:
- check_submodule_sync
tags: [ brew, github_sync ]
variables:
# for brew runners, we always set GIT_STRATEGY to fetch
GIT_STRATEGY: fetch
# github also need full record of commits
GIT_DEPTH: 0
script:
- add_github_ssh_keys
- git remote remove github &>/dev/null || true
@ -43,48 +49,13 @@ push_to_github:
deploy_update_SHA_in_esp-dockerfiles:
extends:
- .deploy_job_template
- .before_script:minimal
- .rules:protected-no_label-always
dependencies: []
variables:
GIT_DEPTH: 2
tags: [ shiny, build ]
script:
- 'curl --header "PRIVATE-TOKEN: ${ESPCI_SCRIPTS_TOKEN}" -o create_MR_in_esp_dockerfile.sh $GITLAB_HTTP_SERVER/api/v4/projects/1260/repository/files/create_MR_in_esp_dockerfile%2Fcreate_MR_in_esp_dockerfile.sh/raw\?ref\=master'
- chmod +x create_MR_in_esp_dockerfile.sh
- ./create_MR_in_esp_dockerfile.sh
deploy_test_result:
extends:
- .deploy_job_template
- .before_script_minimal
- .rules:ref:master-always
image: $CI_DOCKER_REGISTRY/bot-env:1
dependencies: []
tags:
- deploy_test
artifacts:
when: always
paths:
- ${CI_PROJECT_DIR}/test-management/*.log
expire_in: 1 week
variables:
BOT_ACCOUNT_CONFIG_FILE: "${CI_PROJECT_DIR}/test-management/Config/Account.local.yml"
TEST_RESULTS_PATH: "${CI_PROJECT_DIR}/TEST_RESULTS"
script:
- add_gitlab_ssh_keys
- export GIT_SHA=$(echo ${CI_COMMIT_SHA} | cut -c 1-8)
- export REV_COUNT=$(git rev-list --count ${GIT_SHA} --)
- export SUMMARY="IDF CI test result for $GIT_SHA (r${REV_COUNT})"
# Download test result
- export PYTHONPATH="$IDF_PATH/tools:$IDF_PATH/tools/ci/python_packages:$PYTHONPATH"
- python3 ${IDF_PATH}/tools/ci/get_all_test_results.py --path ${TEST_RESULTS_PATH} --include_retried
- if [[ -z $(find ${TEST_RESULTS_PATH} -name "*.xml") ]]; then exit 0; fi
# Clone test-management repo
- retry_failed git clone $TEST_MANAGEMENT_REPO
- python3 $CHECKOUT_REF_SCRIPT test-management test-management
- cd test-management
- echo $BOT_JIRA_ACCOUNT > ${BOT_ACCOUNT_CONFIG_FILE}
# Make sure all requirements are installed
- pip3 install -r requirements.txt
# Update test cases
- python3 ImportTestCase.py $JIRA_TEST_MANAGEMENT_PROJECT from_xml -d ${TEST_RESULTS_PATH} -r $GIT_SHA -l IDFCI
# update test results
- python3 ImportTestResult.py -r "$GIT_SHA (r${REV_COUNT})" -j $JIRA_TEST_MANAGEMENT_PROJECT -s "$SUMMARY" -l IDFCI -p ${TEST_RESULTS_PATH} --pipeline_url ${CI_PIPELINE_URL}
# May need a long time to upload all test results.
timeout: 4 hours

Wyświetl plik

@ -25,6 +25,9 @@
.if-protected-no_label: &if-protected-no_label
if: '($CI_COMMIT_REF_NAME == "master" || $CI_COMMIT_BRANCH =~ /^release\/v/ || $CI_COMMIT_TAG =~ /^v\d+\.\d+(\.\d+)?($|-)/) && $BOT_TRIGGER_WITH_LABEL == null'
.if-qa-test-tag: &if-qa-test-tag
if: '$CI_COMMIT_TAG =~ /^qa-test/'
.if-label-build_docs: &if-label-build_docs
if: '$BOT_LABEL_BUILD_DOCS || $CI_MERGE_REQUEST_LABELS =~ /^(?:[^,\n\r]+,)*build_docs(?:,[^,\n\r]+)*$/i'
@ -32,18 +35,31 @@
if: '$BOT_LABEL_DOCS_FULL || $CI_MERGE_REQUEST_LABELS =~ /^(?:[^,\n\r]+,)*docs_full(?:,[^,\n\r]+)*$/i'
.if-dev-push: &if-dev-push
if: '$CI_COMMIT_REF_NAME != "master" && $CI_COMMIT_BRANCH !~ /^release\/v/ && $CI_COMMIT_TAG !~ /^v\d+\.\d+(\.\d+)?($|-)/ && ($CI_PIPELINE_SOURCE == "push" || $CI_PIPELINE_SOURCE == "merge_request_event")'
if: '$CI_COMMIT_REF_NAME != "master" && $CI_COMMIT_BRANCH !~ /^release\/v/ && $CI_COMMIT_TAG !~ /^v\d+\.\d+(\.\d+)?($|-)/ && $CI_COMMIT_TAG !~ /^qa-test/ && ($CI_PIPELINE_SOURCE == "push" || $CI_PIPELINE_SOURCE == "merge_request_event")'
.if-schedule: &if-schedule
if: '$CI_PIPELINE_SOURCE == "schedule"'
.doc-rules:build:docs-full:
rules:
- <<: *if-protected
- <<: *if-qa-test-tag
when: never
- <<: *if-schedule
- <<: *if-label-build_docs
- <<: *if-label-docs_full
- <<: *if-dev-push
changes: *patterns-docs-full
.doc-rules:build:docs-full-prod:
rules:
- <<: *if-qa-test-tag
when: never
- <<: *if-protected-no_label
.doc-rules:build:docs-partial:
rules:
- <<: *if-qa-test-tag
when: never
- <<: *if-dev-push
changes: *patterns-docs-full
when: never
@ -76,17 +92,14 @@ check_docs_lang_sync:
stage: build_doc
tags:
- build_docs
needs:
- job: fast_template_app
artifacts: false
optional: true
script:
- if [ -n "${BREATHE_ALT_INSTALL_URL}" ]; then pip uninstall -y breathe && pip install -U ${BREATHE_ALT_INSTALL_URL}; fi
- cd docs
- build-docs -t $DOCTGT -bs $DOC_BUILDERS -l $DOCLANG build
parallel:
matrix:
- DOCLANG: ["en", "zh_CN"]
DOCTGT: ["esp32", "esp32s2", "esp32s3", "esp32c3", "esp32c2", "esp32c6", "esp32h2", "esp32p4"]
DOCTGT: ["esp32", "esp32s2", "esp32s3", "esp32c3", "esp32c2", "esp32c5", "esp32c6", "esp32h2", "esp32p4"]
check_docs_gh_links:
image: $ESP_IDF_DOC_ENV_IMAGE
@ -103,8 +116,24 @@ build_docs_html_full:
extends:
- .build_docs_template
- .doc-rules:build:docs-full
needs:
- job: fast_template_app
artifacts: false
optional: true
artifacts:
paths:
- docs/_build/*/*/*.txt
- docs/_build/*/*/html/*
expire_in: 4 days
variables:
DOC_BUILDERS: "html"
build_docs_html_full_prod:
extends:
- .build_docs_template
- .doc-rules:build:docs-full-prod
dependencies: [] # Stop build_docs jobs from downloading all previous job's artifacts
artifacts:
when: always
paths:
- docs/_build/*/*/*.txt
- docs/_build/*/*/html/*
@ -116,8 +145,11 @@ build_docs_html_partial:
extends:
- .build_docs_template
- .doc-rules:build:docs-partial
needs:
- job: fast_template_app
artifacts: false
optional: true
artifacts:
when: always
paths:
- docs/_build/*/*/*.txt
- docs/_build/*/*/html/*
@ -135,8 +167,23 @@ build_docs_pdf:
extends:
- .build_docs_template
- .doc-rules:build:docs-full
needs:
- job: fast_template_app
artifacts: false
optional: true
artifacts:
paths:
- docs/_build/*/*/latex/*
expire_in: 4 days
variables:
DOC_BUILDERS: "latex"
build_docs_pdf_prod:
extends:
- .build_docs_template
- .doc-rules:build:docs-full-prod
dependencies: [] # Stop build_docs jobs from downloading all previous job's artifacts
artifacts:
when: always
paths:
- docs/_build/*/*/latex/*
expire_in: 4 days
@ -187,13 +234,12 @@ deploy_docs_production:
# The DOCS_PROD_* variables used by this job are "Protected" so these branches must all be marked "Protected" in Gitlab settings
extends:
- .deploy_docs_template
rules:
- <<: *if-protected-no_label
- .doc-rules:build:docs-full-prod
stage: post_deploy
dependencies: # set dependencies to null to avoid missing artifacts issue
needs: # ensure runs after push_to_github succeeded
- build_docs_html_full
- build_docs_pdf
- build_docs_html_full_prod
- build_docs_pdf_prod
- job: push_to_github
artifacts: false
variables:
@ -208,19 +254,16 @@ deploy_docs_production:
check_doc_links:
extends:
- .build_docs_template
rules:
- <<: *if-protected-no_label
- .doc-rules:build:docs-full-prod
stage: post_deploy
needs:
- job: deploy_docs_production
artifacts: false
tags: ["build", "amd64", "internet"]
artifacts:
when: always
paths:
- docs/_build/*/*/*.txt
- docs/_build/*/*/linkcheck/*.txt
expire_in: 1 week
allow_failure: true
script:
- cd docs

Wyświetl plik

@ -13,8 +13,7 @@
- job: upload-submodules-cache
optional: true
artifacts: false
- job: mr_variables
optional: true # only MR pipelines would have this
- pipeline_variables
test_nvs_on_host:
extends: .host_test_template
@ -29,7 +28,6 @@ test_nvs_coverage:
artifacts:
paths:
- components/nvs_flash/test_nvs_host/coverage_report
expire_in: 1 week
script:
- cd components/nvs_flash/test_nvs_host
- make coverage_report
@ -66,7 +64,6 @@ test_reproducible_build:
- "**/build*/*.bin"
- "**/build*/bootloader/*.bin"
- "**/build*/partition_table/*.bin"
expire_in: 1 week
test_spiffs_on_host:
extends: .host_test_template
@ -100,36 +97,26 @@ test_gdbstub_on_host:
- cd components/esp_gdbstub/test_gdbstub_host
- make test
test_idf_py:
extends: .host_test_template
variables:
LC_ALL: C.UTF-8
script:
- cd ${IDF_PATH}/tools/test_idf_py
- ./test_idf_py.py
- ./test_hints.py
# Test for create virtualenv. It must be invoked from Python, not from virtualenv.
# Use docker image system python without any extra dependencies
test_idf_tools:
test_cli_installer:
extends:
- .host_test_template
- .before_script_minimal
- .before_script:minimal
artifacts:
when: on_failure
paths:
- tools/tools.new.json
- tools/test_idf_tools/test_python_env_logs.txt
expire_in: 1 week
image:
name: $ESP_ENV_IMAGE
entrypoint: [""] # use system python3. no extra pip package installed
script:
# Tools must be downloaded for testing
- python3 ${IDF_PATH}/tools/idf_tools.py download
- python3 ${IDF_PATH}/tools/idf_tools.py download required qemu-riscv32 qemu-xtensa
- cd ${IDF_PATH}/tools/test_idf_tools
- python3 -m pip install jsonschema
- python3 ./test_idf_tools.py
- python3 ./test_idf_tools.py -v
- python3 ./test_idf_tools_python_env.py
.test_efuse_table_on_host_template:
@ -140,7 +127,6 @@ test_idf_tools:
when: on_failure
paths:
- components/efuse/${IDF_TARGET}/esp_efuse_table.c
expire_in: 1 week
script:
- cd ${IDF_PATH}/components/efuse/
- ./efuse_table_gen.py -t "${IDF_TARGET}" ${IDF_PATH}/components/efuse/${IDF_TARGET}/esp_efuse_table.csv
@ -183,7 +169,6 @@ test_logtrace_proc:
paths:
- tools/esp_app_trace/test/logtrace/output
- tools/esp_app_trace/test/logtrace/.coverage
expire_in: 1 week
script:
- cd ${IDF_PATH}/tools/esp_app_trace/test/logtrace
- ./test.sh
@ -195,56 +180,41 @@ test_sysviewtrace_proc:
paths:
- tools/esp_app_trace/test/sysview/output
- tools/esp_app_trace/test/sysview/.coverage
expire_in: 1 week
script:
- cd ${IDF_PATH}/tools/esp_app_trace/test/sysview
- ./test.sh
test_mkdfu:
extends: .host_test_template
variables:
LC_ALL: C.UTF-8
script:
- cd ${IDF_PATH}/tools/test_mkdfu
- ./test_mkdfu.py
test_mkuf2:
extends: .host_test_template
script:
- cd ${IDF_PATH}/tools/test_mkuf2
- ./test_mkuf2.py
test_sbom:
extends:
- .host_test_template
- .rules:patterns:sbom
script:
- cd ${IDF_PATH}/tools/test_sbom
- pytest
test_autocomplete:
test_tools:
extends:
- .host_test_template
artifacts:
when: on_failure
paths:
- ${IDF_PATH}/*.out
expire_in: 1 week
script:
- ${IDF_PATH}/tools/ci/test_autocomplete.py
test_detect_python:
extends:
- .host_test_template
- ${IDF_PATH}/XUNIT_*.xml
reports:
junit: ${IDF_PATH}/XUNIT_*.xml
variables:
LC_ALL: C.UTF-8
INSTALL_QEMU: 1 # for test_idf_qemu.py
script:
- stat=0
- cd ${IDF_PATH}/tools/ci/test_autocomplete
- pytest --noconftest test_autocomplete.py --junitxml=${IDF_PATH}/XUNIT_AUTOCOMP.xml || stat=1
- cd ${IDF_PATH}/tools/test_idf_py
- pytest --noconftest test_idf_py.py --junitxml=${IDF_PATH}/XUNIT_IDF_PY.xml || stat=1
- pytest --noconftest test_hints.py --junitxml=${IDF_PATH}/XUNIT_HINTS.xml || stat=1
- pytest --noconftest test_idf_qemu.py --junitxml=${IDF_PATH}/XUNIT_IDF_PY_QEMU.xml || stat=1
- cd ${IDF_PATH}/tools/test_mkdfu
- pytest --noconftest test_mkdfu.py --junitxml=${IDF_PATH}/XUNIT_MKDFU.xml || stat=1
- cd ${IDF_PATH}
- shellcheck -s sh tools/detect_python.sh
- shellcheck -s bash tools/detect_python.sh
- shellcheck -s dash tools/detect_python.sh
- shellcheck -s sh tools/detect_python.sh || stat=1
- shellcheck -s bash tools/detect_python.sh || stat=1
- shellcheck -s dash tools/detect_python.sh || stat=1
- "bash -c '. tools/detect_python.sh && echo Our Python: ${ESP_PYTHON?Python is not set}'"
- "dash -c '. tools/detect_python.sh && echo Our Python: ${ESP_PYTHON?Python is not set}'"
- "zsh -c '. tools/detect_python.sh && echo Our Python: ${ESP_PYTHON?Python is not set}'"
- "fish -c 'source tools/detect_python.fish && echo Our Python: $ESP_PYTHON'"
- exit "$stat"
test_split_path_by_spaces:
extends: .host_test_template
@ -300,20 +270,19 @@ test_gen_soc_caps_kconfig:
test_pytest_qemu:
extends:
- .host_test_template
- .before_script_build_jobs
image: $QEMU_IMAGE
- .before_script:build
artifacts:
when: always
paths:
- XUNIT_RESULT.xml
- pytest_embedded_log/
reports:
junit: XUNIT_RESULT.xml
expire_in: 1 week
allow_failure: true # IDFCI-1752
parallel:
matrix:
- IDF_TARGET: [esp32, esp32c3]
variables:
INSTALL_QEMU: 1
script:
- run_cmd python tools/ci/ci_build_apps.py . -vv
--target $IDF_TARGET
@ -322,27 +291,27 @@ test_pytest_qemu:
--collect-app-info "list_job_${CI_JOB_NAME_SLUG}.txt"
--modified-components ${MR_MODIFIED_COMPONENTS}
--modified-files ${MR_MODIFIED_FILES}
- retry_failed git clone $KNOWN_FAILURE_CASES_REPO known_failure_cases
- python tools/ci/get_known_failure_cases_file.py
- run_cmd pytest
--target $IDF_TARGET
-m qemu
--embedded-services idf,qemu
--junitxml=XUNIT_RESULT.xml
--ignore-result-files known_failure_cases/known_failure_cases.txt
--ignore-result-files ${KNOWN_FAILURE_CASES_FILE_NAME}
--app-info-filepattern \"list_job_*.txt\"
--qemu-extra-args \"-global driver=timer.$IDF_TARGET.timg,property=wdt_disable,value=true\"
test_pytest_linux:
extends:
- .host_test_template
- .before_script_build_jobs
- .before_script:build
artifacts:
when: always
paths:
- XUNIT_RESULT.xml
- pytest_embedded_log/
- "**/build*/build_log.txt"
reports:
junit: XUNIT_RESULT.xml
expire_in: 1 week
script:
- run_cmd python tools/ci/ci_build_apps.py components examples tools/test_apps -vv
--target linux
@ -351,10 +320,23 @@ test_pytest_linux:
--collect-app-info "list_job_${CI_JOB_NAME_SLUG}.txt"
--modified-components ${MR_MODIFIED_COMPONENTS}
--modified-files ${MR_MODIFIED_FILES}
- retry_failed git clone $KNOWN_FAILURE_CASES_REPO known_failure_cases
- python tools/ci/get_known_failure_cases_file.py
- run_cmd pytest
--target linux
-m host_test
--junitxml=XUNIT_RESULT.xml
--ignore-result-files known_failure_cases/known_failure_cases.txt
--ignore-result-files ${KNOWN_FAILURE_CASES_FILE_NAME}
--app-info-filepattern \"list_job_*.txt\"
test_idf_pytest_plugin:
extends:
- .host_test_template
- .rules:patterns:idf-pytest-plugin
variables:
SUBMODULES_TO_FETCH: "none"
artifacts:
reports:
junit: XUNIT_RESULT.xml
script:
- cd tools/ci/idf_pytest
- pytest --junitxml=${CI_PROJECT_DIR}/XUNIT_RESULT.xml

Wyświetl plik

@ -0,0 +1,68 @@
# generate dynamic integration pipeline by `idf-integration-ci` project
.patterns-integration_test: &patterns-integration_test
# add all possible patterns to make sure `gen_integration_pipeline` can be triggered.
# fine-grained control will be done while generating the pipeline
# find `patterns` in `idf-integration-ci` project
- "components/**/*"
- "tools/**/*"
- ".gitlab-ci.yml"
- ".gitlab/ci/common.yml"
- ".gitlab/ci/integration_test.yml"
- ".gitmodules"
- "CMakeLists.txt"
- "install.sh"
- "export.sh"
- "Kconfig"
- "sdkconfig.rename"
# Simplify the rules
.integration_test_rules:
rules:
- if: '$CI_PIPELINE_SOURCE != "merge_request_event"'
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
changes: *patterns-integration_test
# support trigger by ci labels
- if: '$CI_MERGE_REQUEST_LABELS =~ /^(?:[^,\n\r]+,)*target_test(?:,[^,\n\r]+)*$/i'
- if: '$CI_MERGE_REQUEST_LABELS =~ /^(?:[^,\n\r]+,)*integration_test(?:,[^,\n\r]+)*$/i'
- if: '$CI_MERGE_REQUEST_LABELS =~ /^(?:[^,\n\r]+,)*build(?:,[^,\n\r]+)*$/i'
gen_integration_pipeline:
extends:
- .before_script:minimal
- .integration_test_rules
image: ${CI_INTEGRATION_ASSIGN_ENV}
stage: assign_test
cache: []
tags: [fast_run, shiny]
variables:
SUBMODULES_TO_FETCH: "none"
GIT_LFS_SKIP_SMUDGE: 1
needs:
- job: fast_template_app
artifacts: false
optional: true
artifacts:
paths:
- idf-integration-ci/child_pipeline/
expire_in: 2 weeks
script:
- add_gitlab_ssh_keys
- retry_failed git clone ${CI_GEN_INTEGRATION_PIPELINE_REPO} idf-integration-ci
- python $CHECKOUT_REF_SCRIPT idf-integration-ci idf-integration-ci
- cd idf-integration-ci
- python tools/generate_child_pipeline.py -o child_pipeline/
child_integration_test_pipeline:
extends:
- .integration_test_rules
stage: assign_test
needs:
- gen_integration_pipeline
trigger:
include:
- artifact: idf-integration-ci/child_pipeline/pipeline.yml
job: gen_integration_pipeline
forward:
yaml_variables: false
strategy: depend

Wyświetl plik

@ -3,56 +3,7 @@
image: $ESP_ENV_IMAGE
tags:
- host_test
dependencies: []
.check_pre_commit_template:
extends:
- .pre_check_template
- .before_script_minimal
image: $PRE_COMMIT_IMAGE
check_pre_commit_master_release:
extends:
- .check_pre_commit_template
- .rules:protected
script:
- fetch_submodules
- git diff-tree --no-commit-id --name-only -r $PIPELINE_COMMIT_SHA | xargs pre-commit run --files
check_pre_commit_MR:
extends:
- .check_pre_commit_template
- .rules:dev
script:
- fetch_submodules
- python ${CI_PROJECT_DIR}/tools/ci/ci_get_mr_info.py files ${CI_MERGE_REQUEST_SOURCE_BRANCH_NAME} | xargs pre-commit run --files
check_MR_style_dangerjs:
extends:
- .pre_check_template
image: node:18.15.0-alpine3.16
variables:
DANGER_GITLAB_API_TOKEN: ${ESPCI_TOKEN}
DANGER_GITLAB_HOST: ${GITLAB_HTTP_SERVER}
DANGER_GITLAB_API_BASE_URL: ${GITLAB_HTTP_SERVER}/api/v4
DANGER_JIRA_USER: ${DANGER_JIRA_USER}
DANGER_JIRA_PASSWORD: ${DANGER_JIRA_PASSWORD}
cache:
# pull only for most of the use cases since it's cache dir.
# Only set "push" policy for "upload_cache" stage jobs
key:
files:
- .gitlab/dangerjs/package-lock.json
paths:
- .gitlab/dangerjs/node_modules/
policy: pull
before_script:
- cd .gitlab/dangerjs
- npm install --no-progress --no-update-notifier # Install danger dependencies
script:
- npx danger ci --failOnErrors -v
rules:
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
dependencies: # set dependencies to null to avoid missing artifacts issue
check_version:
# Don't run this for feature/bugfix branches, so that it is possible to modify
@ -60,6 +11,7 @@ check_version:
extends:
- .pre_check_template
- .rules:protected
- .before_script:fetch:git_describe
script:
- export IDF_PATH=$PWD
- tools/ci/check_idf_version.sh
@ -71,21 +23,6 @@ check_api_usage:
- tools/ci/check_api_violation.sh
- tools/ci/check_examples_extra_component_dirs.sh
test_check_kconfigs:
extends: .pre_check_template
artifacts:
when: on_failure
paths:
- components/*/Kconfig*.new
- examples/*/*/*/Kconfig*.new
- examples/*/*/*/*/Kconfig*.new
- tools/*/Kconfig*.new
- tools/*/*/Kconfig*.new
- tools/*/*/*/Kconfig*.new
expire_in: 1 week
script:
- python ${IDF_PATH}/tools/ci/test_check_kconfigs.py
check_blobs:
extends:
- .pre_check_template
@ -122,6 +59,7 @@ check_public_headers:
- IDF_TARGET=esp32c3 python tools/ci/check_public_headers.py --jobs 4 --prefix riscv32-esp-elf-
- IDF_TARGET=esp32c2 python tools/ci/check_public_headers.py --jobs 4 --prefix riscv32-esp-elf-
- IDF_TARGET=esp32c6 python tools/ci/check_public_headers.py --jobs 4 --prefix riscv32-esp-elf-
- IDF_TARGET=esp32c5 python tools/ci/check_public_headers.py --jobs 4 --prefix riscv32-esp-elf-
- IDF_TARGET=esp32h2 python tools/ci/check_public_headers.py --jobs 4 --prefix riscv32-esp-elf-
- IDF_TARGET=esp32p4 python tools/ci/check_public_headers.py --jobs 4 --prefix riscv32-esp-elf-
@ -134,7 +72,6 @@ check_chip_support_components:
paths:
- esp_hw_support_part.h
- bootloader_support_part.h
expire_in: 1 week
script:
- python tools/ci/check_soc_headers_leak.py
- find ${IDF_PATH}/components/soc/*/include/soc/ -name "*_struct.h" -print0 | xargs -0 -n1 ./tools/ci/check_soc_struct_headers.py
@ -148,7 +85,6 @@ check_esp_err_to_name:
when: on_failure
paths:
- components/esp_common/esp_err_to_name.c
expire_in: 1 week
script:
- cd ${IDF_PATH}/tools/
- ./gen_esp_err_to_name.py
@ -163,23 +99,19 @@ check_esp_system:
# For release tag pipelines only, make sure the tag was created with 'git tag -a' so it will update
# the version returned by 'git describe'
# Don't forget to update the env var `LATEST_GIT_TAG` in .gitlab/ci/common.yml
check_version_tag:
extends:
- .pre_check_template
- .rules:tag:release
- .before_script:fetch:git_describe
script:
- (git cat-file -t $CI_COMMIT_REF_NAME | grep tag) || (echo "ESP-IDF versions must be annotated tags." && exit 1)
check_artifacts_expire_time:
extends: .pre_check_template
script:
# check if we have set expire time for all artifacts
- python tools/ci/check_artifacts_expire_time.py
check_test_scripts_build_test_rules:
extends:
- .pre_check_template
- .before_script_build_jobs
- .before_script:build
script:
# required pytest related packages
- run_cmd bash install.sh --enable-pytest
@ -188,28 +120,47 @@ check_test_scripts_build_test_rules:
check_configure_ci_environment_parsing:
extends:
- .pre_check_template
- .before_script_build_jobs
- .before_script:build
- .rules:build
script:
- cd tools/ci
- python -m unittest ci_build_apps.py
mr_variables:
pipeline_variables:
extends:
- .pre_check_template
- .rules:mr
- .before_script_minimal
tags:
- build
- .before_script:fetch:git_diff
tags: [fast_run, shiny]
script:
- echo "MR_MODIFIED_FILES=$(python tools/ci/ci_get_mr_info.py files ${CI_MERGE_REQUEST_SOURCE_BRANCH_NAME} | xargs)" >> mr.env
- echo "MR_MODIFIED_COMPONENTS=$(python tools/ci/ci_get_mr_info.py components ${CI_MERGE_REQUEST_SOURCE_BRANCH_NAME} | xargs)" >> mr.env
- >
if echo "$CI_MERGE_REQUEST_LABELS" | egrep "(^|,)BUILD_AND_TEST_ALL_APPS(,|$)"; then
echo "BUILD_AND_TEST_ALL_APPS=1" >> mr.env
# MODIFIED_FILES is a list of files that changed, could be used everywhere
- MODIFIED_FILES=$(echo "$GIT_DIFF_OUTPUT" | xargs)
- echo "MODIFIED_FILES=$MODIFIED_FILES" >> pipeline.env
# MR_MODIFIED_FILES and MR_MODIFIED_COMPONENTS are semicolon separated lists that is used in MR only
# for non MR pipeline, these are empty lists
- |
if [ $IS_MR_PIPELINE == "0" ]; then
echo "MR_MODIFIED_FILES=\"\"" >> pipeline.env
echo "MR_MODIFIED_COMPONENTS=\"\"" >> pipeline.env
else
MR_MODIFIED_FILES=$(echo "$GIT_DIFF_OUTPUT" | tr '\n' ';')
echo "MR_MODIFIED_FILES=\"$MR_MODIFIED_FILES\"" >> pipeline.env
MR_MODIFIED_COMPONENTS=$(run_cmd python tools/ci/ci_get_mr_info.py components --modified-files $MODIFIED_FILES | tr '\n' ';')
echo "MR_MODIFIED_COMPONENTS=\"$MR_MODIFIED_COMPONENTS\"" >> pipeline.env
fi
- cat mr.env
- |
if echo "$CI_MERGE_REQUEST_LABELS" | egrep "(^|,)BUILD_AND_TEST_ALL_APPS(,|$)"; then
echo "BUILD_AND_TEST_ALL_APPS=1" >> pipeline.env
fi
- cat pipeline.env
- python tools/ci/artifacts_handler.py upload --type modified_files_and_components_report
artifacts:
reports:
dotenv: mr.env
expire_in: 4 days
dotenv: pipeline.env
check_test_cases_env_markers_and_required_runners:
extends:
- .pre_check_template
tags: [fast_run, shiny]
script:
- python tools/ci/dynamic_pipelines/scripts/generate_target_test_child_pipeline.py --check

Wyświetl plik

@ -0,0 +1,47 @@
.check_pre_commit_template:
extends:
- .before_script:minimal
stage: pre_check
image: $PRE_COMMIT_IMAGE
tags: [cache, shiny]
needs:
- pipeline_variables
variables:
# cache pre_commit
PRE_COMMIT_HOME: "$CI_PROJECT_DIR/.cache/pre-commit"
script:
- fetch_submodules
- pre-commit run --files $MODIFIED_FILES
- pre-commit run --hook-stage post-commit validate-sbom-manifest
check_pre_commit_upload_cache:
extends:
- .check_pre_commit_template
rules:
- if: '($CI_COMMIT_REF_NAME == "master" || $CI_COMMIT_BRANCH =~ /^release\/v/) && $CI_PIPELINE_SOURCE == "push"'
cache:
- key: pre_commit-cache-${LATEST_GIT_TAG}
paths:
- .cache/pre-commit
policy: pull-push
- key: submodule-cache-${LATEST_GIT_TAG}
paths:
- .cache/submodule_archives
policy: pull
check_pre_commit:
extends:
- .check_pre_commit_template
rules:
- if: '($CI_COMMIT_REF_NAME == "master" || $CI_COMMIT_BRANCH =~ /^release\/v/) && $CI_PIPELINE_SOURCE == "push"'
when: never
- when: on_success
cache:
- key: pre_commit-cache-${LATEST_GIT_TAG}
paths:
- .cache/pre-commit
policy: pull
- key: submodule-cache-${LATEST_GIT_TAG}
paths:
- .cache/submodule_archives
policy: pull

Plik diff jest za duży Load Diff

Wyświetl plik

@ -6,8 +6,6 @@ clang_tidy_check:
artifacts:
paths:
- clang_tidy_reports/
when: always
expire_in: 1 day
variables:
IDF_TOOLCHAIN: clang
script:
@ -20,17 +18,17 @@ check_pylint:
extends:
- .pre_check_template
- .rules:patterns:python-files
needs:
- pipeline_variables
artifacts:
when: always
reports:
codequality: pylint.json
expire_in: 1 week
script:
- |
if [ -n "$CI_MERGE_REQUEST_IID" ]; then
export files=$(python ${CI_PROJECT_DIR}/tools/ci/ci_get_mr_info.py files ${CI_MERGE_REQUEST_SOURCE_BRANCH_NAME} | grep ".py$");
export files=$(echo "$GIT_DIFF_OUTPUT" | grep ".py$" | xargs);
else
export files=$(find . -iname "*.py" -print);
export files=$(git ls-files "*.py" | xargs);
fi
- if [ -z "$files" ]; then echo "No python files found"; exit 0; fi
- run_cmd pylint --exit-zero --load-plugins=pylint_gitlab --output-format=gitlab-codeclimate:pylint.json $files
@ -51,8 +49,10 @@ check_pylint:
.sonar_scan_template:
stage: build
extends: .pre_check_template
image:
name: $SONARQUBE_SCANNER_IMAGE
# full clone since this image does not support fetch --shallow-since-cutoff
# shiny runners are used for full clone
tags: [build, shiny]
image: $SONARQUBE_SCANNER_IMAGE
before_script:
- source tools/ci/utils.sh
- export PYTHONPATH="$CI_PROJECT_DIR/tools:$CI_PROJECT_DIR/tools/ci/python_packages:$PYTHONPATH"
@ -66,12 +66,10 @@ check_pylint:
- export SONAR_SCANNER_OPTS="-Xmx2048m"
variables:
GIT_DEPTH: 0
REPORT_PATTERN: clang_tidy_reports/*.txt
REPORT_PATTERN: clang_tidy_reports/**/*.txt
artifacts:
when: always
paths:
- $REPORT_PATTERN
expire_in: 1 week
dependencies: # Here is not a hard dependency relationship, could be skipped when only python files changed. so we do not use "needs" here.
- clang_tidy_check
@ -79,10 +77,10 @@ code_quality_check:
extends:
- .sonar_scan_template
- .rules:patterns:static-code-analysis-preview
allow_failure: true # since now it's using exit code to indicate the code analysis result,
allow_failure: true # it's using exit code to indicate the code analysis result,
# we don't want to block ci when critical issues founded
script:
- export CI_MERGE_REQUEST_COMMITS=$(python ${CI_PROJECT_DIR}/tools/ci/ci_get_mr_info.py commits ${CI_COMMIT_REF_NAME} | tr '\n' ',')
- export CI_MERGE_REQUEST_COMMITS=$(python ${CI_PROJECT_DIR}/tools/ci/ci_get_mr_info.py commits --src-branch ${CI_COMMIT_REF_NAME} | tr '\n' ',')
# test if this branch have merge request, if not, exit 0
- test -n "$CI_MERGE_REQUEST_IID" || exit 0
- test -n "$CI_MERGE_REQUEST_COMMITS" || exit 0
@ -102,7 +100,7 @@ code_quality_report:
extends:
- .sonar_scan_template
- .rules:protected
allow_failure: true # since now it's using exit code to indicate the code analysis result,
allow_failure: true # it's using exit code to indicate the code analysis result,
# we don't want to block ci when critical issues founded
script:
- sonar-scanner

Plik diff jest za duży Load Diff

Wyświetl plik

@ -0,0 +1,92 @@
# Host tests
.host_test_win_template:
extends: .rules:test:host_test
stage: host_test
image: $ESP_ENV_IMAGE
tags:
- windows-build
dependencies: # set dependencies to null to avoid missing artifacts issue
# run host_test jobs immediately, only after upload cache
needs:
- job: upload-pip-cache
optional: true
artifacts: false
- job: upload-submodules-cache
optional: true
artifacts: false
before_script: []
after_script: []
test_cli_installer_win:
extends:
- .host_test_win_template
- .rules:labels:windows_pytest_build_system
artifacts:
when: on_failure
paths:
- tools/tools.new.json
- tools/test_idf_tools/test_python_env_logs.txt
variables:
IDF_PATH: "$CI_PROJECT_DIR"
script:
# Tools must be downloaded for testing
- python ${IDF_PATH}\tools\idf_tools.py download required qemu-riscv32 qemu-xtensa
- cd ${IDF_PATH}\tools\test_idf_tools
- python -m pip install jsonschema
- python .\test_idf_tools.py
- python .\test_idf_tools_python_env.py
test_tools_win:
extends:
- .host_test_win_template
- .rules:labels:windows_pytest_build_system
artifacts:
paths:
- ${IDF_PATH}/*.out
- ${IDF_PATH}/XUNIT_*.xml
reports:
junit: ${IDF_PATH}/XUNIT_*.xml
variables:
LC_ALL: C.UTF-8
PYTHONPATH: "$PYTHONPATH;$IDF_PATH\\tools;$IDF_PATH\\tools\\esp_app_trace;$IDF_PATH\\components\\partition_table;$IDF_PATH\\tools\\ci\\python_packages"
script:
- python -m pip install jsonschema
- .\install.ps1 --enable-ci --enable-pytest
- .\export.ps1
- python "${SUBMODULE_FETCH_TOOL}" -s "all"
- cd ${IDF_PATH}/tools/test_idf_py
- pytest --noconftest test_idf_py.py --junitxml=${IDF_PATH}/XUNIT_IDF_PY.xml
- pytest --noconftest test_hints.py --junitxml=${IDF_PATH}/XUNIT_HINTS.xml
# Build tests
.test_build_system_template_win:
stage: host_test
variables:
# Enable ccache for all build jobs. See configure_ci_environment.sh for more ccache related settings.
IDF_CCACHE_ENABLE: "1"
PYTHONPATH: "$PYTHONPATH;$IDF_PATH\\tools;$IDF_PATH\\tools\\esp_app_trace;$IDF_PATH\\components\\partition_table;$IDF_PATH\\tools\\ci\\python_packages"
before_script: []
after_script: []
timeout: 4 hours
script:
- .\install.ps1 --enable-ci --enable-pytest
- . .\export.ps1
- python "${SUBMODULE_FETCH_TOOL}" -s "all"
- cd ${IDF_PATH}\tools\test_build_system
- pytest --parallel-count ${CI_NODE_TOTAL} --parallel-index ${CI_NODE_INDEX} --junitxml=${CI_PROJECT_DIR}\XUNIT_RESULT.xml
pytest_build_system_win:
extends:
- .test_build_system_template_win
- .rules:labels:windows_pytest_build_system
parallel: 2
needs: []
tags:
- windows-build
artifacts:
paths:
- XUNIT_RESULT.xml
- test_build_system
expire_in: 2 days
reports:
junit: XUNIT_RESULT.xml

Wyświetl plik

@ -9,13 +9,13 @@
upload-pip-cache:
extends:
- .upload_cache_template
- .before_script_minimal
- .rules:patterns:python-cache
- .before_script:minimal
- .rules:upload-python-cache
tags:
- $GEO
- cache
cache:
key: pip-cache
key: pip-cache-${LATEST_GIT_TAG}
paths:
- .cache/pip
policy: push
@ -29,13 +29,13 @@ upload-pip-cache:
upload-submodules-cache:
extends:
- .upload_cache_template
- .before_script_minimal
- .rules:patterns:submodule
- .before_script:minimal
- .rules:upload-submodule-cache
tags:
- $GEO
- cache
cache:
key: submodule-cache
key: submodule-cache-${LATEST_GIT_TAG}
paths:
- .cache/submodule_archives
policy: push
@ -48,27 +48,3 @@ upload-submodules-cache:
parallel:
matrix:
- GEO: [ 'shiny', 'brew' ]
upload-danger-npm-cache:
stage: upload_cache
image: node:18.15.0-alpine3.16
extends:
- .rules:patterns:dangerjs
tags:
- $GEO
- cache
cache:
key:
files:
- .gitlab/dangerjs/package-lock.json
paths:
- .gitlab/dangerjs/node_modules/
policy: push
before_script:
- echo "Skip before scripts ...."
script:
- cd .gitlab/dangerjs
- npm install --no-progress --no-update-notifier
parallel:
matrix:
- GEO: [ 'shiny', 'brew' ]

Wyświetl plik

@ -1,172 +0,0 @@
const {
minimumSummaryChars,
maximumSummaryChars,
maximumBodyLineChars,
allowedTypes,
} = require("./mrCommitsConstants.js");
const { gptStandardModelTokens } = require("./mrCommitsConstants.js");
const { ChatPromptTemplate } = require("langchain/prompts");
const { SystemMessagePromptTemplate } = require("langchain/prompts");
const { LLMChain } = require("langchain/chains");
const { ChatOpenAI } = require("langchain/chat_models/openai");
const openAiTokenCount = require("openai-gpt-token-counter");
module.exports = async function () {
let outputDangerMessage = `\n\nPerhaps you could use an AI-generated suggestion for your commit message. Here is one `;
let mrDiff = await getMrGitDiff(danger.git.modified_files);
const mrCommitMessages = getCommitMessages(danger.gitlab.commits);
const inputPrompt = getInputPrompt();
const inputLlmTokens = getInputLlmTokens(
inputPrompt,
mrDiff,
mrCommitMessages
);
console.log(`Input tokens for LLM: ${inputLlmTokens}`);
if (inputLlmTokens >= gptStandardModelTokens) {
mrDiff = ""; // If the input mrDiff is larger than 16k model, don't use mrDiff, use only current commit messages
outputDangerMessage += `(based only on your current commit messages, git-diff of this MR is too big (${inputLlmTokens} tokens) for the AI models):\n\n`;
} else {
outputDangerMessage += `(based on your MR git-diff and your current commit messages):\n\n`;
}
// Generate AI commit message
let generatedCommitMessage = "";
try {
const rawCommitMessage = await createAiGitMessage(
inputPrompt,
mrDiff,
mrCommitMessages
);
generatedCommitMessage = postProcessCommitMessage(rawCommitMessage);
} catch (error) {
console.error("Error in generating AI commit message: ", error);
outputDangerMessage +=
"\nCould not generate commit message due to an error.\n";
}
// Append closing statements ("Closes https://github.com/espressif/esp-idf/issues/XXX") to the generated commit message
let closingStatements = extractClosingStatements(mrCommitMessages);
if (closingStatements.length > 0) {
generatedCommitMessage += "\n\n" + closingStatements;
}
// Add the generated git message, format to the markdown code block
outputDangerMessage += `\n\`\`\`\n${generatedCommitMessage}\n\`\`\`\n`;
outputDangerMessage +=
"\n**NOTE: AI-generated suggestions may not always be correct, please review the suggestion before using it.**"; // Add disclaimer
return outputDangerMessage;
};
async function getMrGitDiff(mrModifiedFiles) {
const fileDiffs = await Promise.all(
mrModifiedFiles.map((file) => danger.git.diffForFile(file))
);
return fileDiffs.map((fileDiff) => fileDiff.diff.trim()).join(" ");
}
function getCommitMessages(mrCommits) {
return mrCommits.map((commit) => commit.message);
}
function getInputPrompt() {
return `You are a helpful assistant that creates suggestions for single git commit message, that user can use to describe all the changes in their merge request.
Use git diff: {mrDiff} and users current commit messages: {mrCommitMessages} to get the changes made in the commit.
Output should be git commit message following the conventional commit format.
Output only git commit message in desired format, without comments and other text.
Do not include the closing statements ("Closes https://....") in the output.
Here are the strict rules you must follow:
- Avoid mentioning any JIRA tickets (e.g., "Closes JIRA-123").
- Be specific. Don't use vague terms (e.g., "some checks", "add new ones", "few changes").
- The commit message structure should be: <type><(scope/component)>: <summary>
- Types allowed: ${allowedTypes.join(", ")}
- If 'scope/component' is used, it must start with a lowercase letter.
- The 'summary' must NOT end with a period.
- The 'summary' must be between ${minimumSummaryChars} and ${maximumSummaryChars} characters long.
If a 'body' of commit message is used:
- Each line must be no longer than ${maximumBodyLineChars} characters.
- It must be separated from the 'summary' by a blank line.
Examples of correct commit messages:
- With scope and body:
fix(freertos): Fix startup timeout issue
This is a text of commit message body...
- adds support for wifi6
- adds validations for logging script
- Without scope and body:
ci: added target test job for ESP32-Wifi6`;
}
function getInputLlmTokens(inputPrompt, mrDiff, mrCommitMessages) {
const mrCommitMessagesTokens = openAiTokenCount(mrCommitMessages.join(" "));
const gitDiffTokens = openAiTokenCount(mrDiff);
const promptTokens = openAiTokenCount(inputPrompt);
return mrCommitMessagesTokens + gitDiffTokens + promptTokens;
}
async function createAiGitMessage(inputPrompt, mrDiff, mrCommitMessages) {
const chat = new ChatOpenAI({ engine: "gpt-3.5-turbo", temperature: 0 });
const chatPrompt = ChatPromptTemplate.fromPromptMessages([
SystemMessagePromptTemplate.fromTemplate(inputPrompt),
]);
const chain = new LLMChain({ prompt: chatPrompt, llm: chat });
const response = await chain.call({
mrDiff: mrDiff,
mrCommitMessages: mrCommitMessages,
});
return response.text;
}
function postProcessCommitMessage(rawCommitMessage) {
// Split the result into lines
let lines = rawCommitMessage.split("\n");
// Format each line
for (let i = 0; i < lines.length; i++) {
let line = lines[i].trim();
// If the line is longer than maximumBodyLineChars, split it into multiple lines
if (line.length > maximumBodyLineChars) {
let newLines = [];
while (line.length > maximumBodyLineChars) {
let lastSpaceIndex = line.lastIndexOf(
" ",
maximumBodyLineChars
);
newLines.push(line.substring(0, lastSpaceIndex));
line = line.substring(lastSpaceIndex + 1);
}
newLines.push(line);
lines[i] = newLines.join("\n");
}
}
// Join the lines back into a single string with a newline between each one
return lines.join("\n");
}
function extractClosingStatements(mrCommitMessages) {
let closingStatements = [];
mrCommitMessages.forEach((message) => {
const lines = message.split("\n");
lines.forEach((line) => {
if (line.startsWith("Closes")) {
closingStatements.push(line);
}
});
});
return closingStatements.join("\n");
}

Wyświetl plik

@ -1,56 +0,0 @@
let outputStatuses = [];
/**
* Logs the status of a rule with padded formatting and stores it in the `outputStatuses` array.
* If the rule already exists in the array, its status is updated.
* @param message The name of the rule
* @param status The output (exit) status of the rule
*/
function recordRuleExitStatus(message, status) {
// Check if the rule already exists in the array
const existingRecord = outputStatuses.find(
(rule) => rule.message === message
);
if (existingRecord) {
// Update the status of the existing rule
existingRecord.status = status;
} else {
// If the rule doesn't exist, add it to the array
outputStatuses.push({ message, status });
}
}
/**
* Displays all the rule output statuses stored in the `outputStatuses` array.
* Filters out any empty lines, sorts them alphabetically, and prints the statuses
* with a header and separator.
* These statuses are later displayed in CI job tracelog.
*/
function displayAllOutputStatuses() {
const lineLength = 100;
const sortedStatuses = outputStatuses.sort((a, b) =>
a.message.localeCompare(b.message)
);
const formattedLines = sortedStatuses.map((statusObj) => {
const paddingLength =
lineLength - statusObj.message.length - statusObj.status.length;
const paddedMessage = statusObj.message.padEnd(
statusObj.message.length + paddingLength,
"."
);
return `${paddedMessage} ${statusObj.status}`;
});
console.log(
"DangerJS checks (rules) output states:\n" + "=".repeat(lineLength + 2)
);
console.log(formattedLines.join("\n"));
console.log("=".repeat(lineLength + 2));
}
module.exports = {
displayAllOutputStatuses,
recordRuleExitStatus,
};

Wyświetl plik

@ -1,51 +0,0 @@
const { displayAllOutputStatuses } = require("./configParameters.js");
/*
* Modules with checks are stored in ".gitlab/dangerjs/<module_name>". To import them, use path relative to "dangerfile.js"
*/
async function runChecks() {
// Checks for merge request title
require("./mrTitleNoDraftOrWip.js")();
// Checks for merge request description
require("./mrDescriptionLongEnough.js")();
require("./mrDescriptionReleaseNotes.js")();
await require("./mrDescriptionJiraLinks.js")();
// Checks for documentation
await require("./mrDocsTranslation.js")();
// Checks for MR commits
require("./mrCommitsTooManyCommits.js")();
await require("./mrCommitsCommitMessage.js")();
require("./mrCommitsEmail.js")();
// Checks for MR code
require("./mrSizeTooLarge.js")();
// Checks for MR area labels
await require("./mrAreaLabels.js")();
// Checks for Source branch name
require("./mrSourceBranchName.js")();
// Show DangerJS individual checks statuses - visible in CI job tracelog
displayAllOutputStatuses();
// Add success log if no issues
if (
results.fails.length === 0 &&
results.warnings.length === 0 &&
results.messages.length === 0
) {
return message("🎉 Good Job! All checks are passing!");
}
}
runChecks();
// Add retry link
const retryLink = `${process.env.DANGER_GITLAB_HOST}/${process.env.CI_PROJECT_PATH}/-/jobs/${process.env.CI_JOB_ID}`;
markdown(
`***\n#### :repeat: You can enforce automatic MR checks by retrying the [DangerJS job](${retryLink})\n***`
);

Wyświetl plik

@ -1,27 +0,0 @@
const { recordRuleExitStatus } = require("./configParameters.js");
/**
* Check if MR has area labels (light blue labels)
*
* @dangerjs WARN
*/
module.exports = async function () {
const ruleName = "Merge request area labels";
const projectId = 103; // ESP-IDF
const areaLabelColor = /^#d2ebfa$/i; // match color code (case-insensitive)
const projectLabels = await danger.gitlab.api.Labels.all(projectId); // Get all project labels
const areaLabels = projectLabels
.filter((label) => areaLabelColor.test(label.color))
.map((label) => label.name); // Filter only area labels
const mrLabels = danger.gitlab.mr.labels; // Get MR labels
if (!mrLabels.some((label) => areaLabels.includes(label))) {
recordRuleExitStatus(ruleName, "Failed");
return warn(
`Please add some [area labels](${process.env.DANGER_GITLAB_HOST}/espressif/esp-idf/-/labels) to this MR.`
);
}
// At this point, the rule has passed
recordRuleExitStatus(ruleName, "Passed");
};

Wyświetl plik

@ -1,165 +0,0 @@
const {
minimumSummaryChars,
maximumSummaryChars,
maximumBodyLineChars,
allowedTypes,
} = require("./mrCommitsConstants.js");
const { recordRuleExitStatus } = require("./configParameters.js");
/**
* Check that commit messages are based on the Espressif ESP-IDF project's rules for git commit messages.
*
* @dangerjs WARN
*/
module.exports = async function () {
const ruleName = "Commit messages style";
const mrCommits = danger.gitlab.commits;
const lint = require("@commitlint/lint").default;
const lintingRules = {
// rule definition: [(0-1 = off/on), (always/never = must be/mustn't be), (value)]
"body-max-line-length": [1, "always", maximumBodyLineChars], // Max length of the body line
"footer-leading-blank": [1, "always"], // Always have a blank line before the footer section
"footer-max-line-length": [1, "always", maximumBodyLineChars], // Max length of the footer line
"subject-max-length": [1, "always", maximumSummaryChars], // Max length of the "Summary"
"subject-min-length": [1, "always", minimumSummaryChars], // Min length of the "Summary"
"scope-case": [1, "always", "lower-case"], // "scope/component" must start with lower-case
"subject-full-stop": [1, "never", "."], // "Summary" must not end with a full stop (period)
"subject-empty": [1, "never"], // "Summary" is mandatory
"type-case": [1, "always", "lower-case"], // "type/action" must start with lower-case
"type-empty": [1, "never"], // "type/action" is mandatory
"type-enum": [1, "always", allowedTypes], // "type/action" must be one of the allowed types
"body-leading-blank": [1, "always"], // Always have a blank line before the body section
};
// Switcher for AI suggestions (for poor messages)
let generateAISuggestion = false;
// Search for the messages in each commit
let issuesAllCommitMessages = [];
for (const commit of mrCommits) {
const commitMessage = commit.message;
const commitMessageTitle = commit.title;
let issuesSingleCommitMessage = [];
let reportSingleCommitMessage = "";
// Check if the commit message contains any Jira ticket references
const jiraTicketRegex = /[A-Z0-9]+-[0-9]+/g;
const jiraTicketMatches = commitMessage.match(jiraTicketRegex);
if (jiraTicketMatches) {
const jiraTicketNames = jiraTicketMatches.join(", ");
issuesSingleCommitMessage.push(
`- probably contains Jira ticket reference (\`${jiraTicketNames}\`). Please remove Jira tickets from commit messages.`
);
}
// Lint commit messages with @commitlint (Conventional Commits style)
const result = await lint(commit.message, lintingRules);
for (const warning of result.warnings) {
// Custom messages for each rule with terminology used by Espressif conventional commits guide
switch (warning.name) {
case "subject-max-length":
issuesSingleCommitMessage.push(
`- *summary* appears to be too long`
);
break;
case "type-empty":
issuesSingleCommitMessage.push(
`- *type/action* looks empty`
);
break;
case "type-case":
issuesSingleCommitMessage.push(
`- *type/action* should start with a lowercase letter`
);
break;
case "scope-empty":
issuesSingleCommitMessage.push(
`- *scope/component* looks empty`
);
break;
case "scope-case":
issuesSingleCommitMessage.push(
`- *scope/component* should be lowercase without whitespace, allowed special characters are \`_\` \`/\` \`.\` \`,\` \`*\` \`-\` \`.\``
);
break;
case "subject-empty":
issuesSingleCommitMessage.push(`- *summary* looks empty`);
generateAISuggestion = true;
break;
case "subject-min-length":
issuesSingleCommitMessage.push(
`- *summary* looks too short`
);
generateAISuggestion = true;
break;
case "subject-case":
issuesSingleCommitMessage.push(
`- *summary* should start with a capital letter`
);
break;
case "subject-full-stop":
issuesSingleCommitMessage.push(
`- *summary* should not end with a period (full stop)`
);
break;
case "type-enum":
issuesSingleCommitMessage.push(
`- *type/action* should be one of [${allowedTypes
.map((type) => `\`${type}\``)
.join(", ")}]`
);
break;
default:
issuesSingleCommitMessage.push(`- ${warning.message}`);
}
}
if (issuesSingleCommitMessage.length) {
reportSingleCommitMessage = `- the commit message \`"${commitMessageTitle}"\`:\n${issuesSingleCommitMessage
.map((message) => ` ${message}`) // Indent each issue by 2 spaces
.join("\n")}`;
issuesAllCommitMessages.push(reportSingleCommitMessage);
}
}
// Create report
if (issuesAllCommitMessages.length) {
issuesAllCommitMessages.sort();
const basicTips = [
`- correct format of commit message should be: \`<type/action>(<scope/component>): <summary>\`, for example \`fix(esp32): Fixed startup timeout issue\``,
`- allowed types are: \`${allowedTypes}\``,
`- sufficiently descriptive message summary should be between ${minimumSummaryChars} to ${maximumSummaryChars} characters and start with upper case letter`,
`- avoid Jira references in commit messages (unavailable/irrelevant for our customers)`,
`- follow this [commit messages guide](${process.env.DANGER_GITLAB_HOST}/espressif/esp-idf/-/wikis/dev-proc/Commit-messages)`,
];
let dangerMessage = `\n**Some issues found for the commit messages in this MR:**\n${issuesAllCommitMessages.join(
"\n"
)}
\n***
\n**Please consider updating these commit messages** - here are some basic tips:\n${basicTips.join(
"\n"
)}
\n \`TIP:\` You can install commit-msg pre-commit hook (\`pre-commit install -t pre-commit -t commit-msg\`) to run this check when committing.
\n***
`;
if (generateAISuggestion) {
// Create AI generated suggestion for git commit message based of gitDiff and current commit messages
const AImessageSuggestion =
await require("./aiGenerateGitMessage.js")();
dangerMessage += AImessageSuggestion;
}
recordRuleExitStatus(ruleName, "Failed");
return warn(dangerMessage);
}
// At this point, the rule has passed
recordRuleExitStatus(ruleName, "Passed");
};

Wyświetl plik

@ -1,16 +0,0 @@
module.exports = {
gptStandardModelTokens: 4096,
minimumSummaryChars: 20,
maximumSummaryChars: 72,
maximumBodyLineChars: 100,
allowedTypes: [
"change",
"ci",
"docs",
"feat",
"fix",
"refactor",
"remove",
"revert",
],
};

Wyświetl plik

@ -1,23 +0,0 @@
const { recordRuleExitStatus } = require("./configParameters.js");
/**
* Check if the author is accidentally making a commit using a personal email
*
* @dangerjs INFO
*/
module.exports = function () {
const ruleName = 'Commits from outside Espressif';
const mrCommitAuthorEmails = danger.gitlab.commits.map(commit => commit.author_email);
const mrCommitCommitterEmails = danger.gitlab.commits.map(commit => commit.committer_email);
const emailPattern = /.*@espressif\.com/;
const filteredEmails = [...mrCommitAuthorEmails, ...mrCommitCommitterEmails].filter((email) => !emailPattern.test(email));
if (filteredEmails.length) {
recordRuleExitStatus(ruleName, "Failed");
return message(
`Some of the commits were authored or committed by developers outside Espressif: ${filteredEmails.join(', ')}. Please check if this is expected.`
);
}
// At this point, the rule has passed
recordRuleExitStatus(ruleName, 'Passed');
};

Wyświetl plik

@ -1,22 +0,0 @@
const { recordRuleExitStatus } = require("./configParameters.js");
/**
* Check if MR has not an excessive numbers of commits (if squashed)
*
* @dangerjs INFO
*/
module.exports = function () {
const ruleName = 'Number of commits in merge request';
const tooManyCommitThreshold = 2; // above this number of commits, squash commits is suggested
const mrCommits = danger.gitlab.commits;
if (mrCommits.length > tooManyCommitThreshold) {
recordRuleExitStatus(ruleName, "Passed (with suggestions)");
return message(
`You might consider squashing your ${mrCommits.length} commits (simplifying branch history).`
);
}
// At this point, the rule has passed
recordRuleExitStatus(ruleName, 'Passed');
};

Wyświetl plik

@ -1,238 +0,0 @@
const { recordRuleExitStatus } = require("./configParameters.js");
/** Check that there are valid JIRA links in MR description.
*
* This check extracts the "Related" section from the MR description and
* searches for JIRA ticket references in the format "Closes [JIRA ticket key]".
*
* It then extracts the closing GitHub links from the corresponding JIRA tickets and
* checks if the linked GitHub issues are still in open state.
*
* Finally, it checks if the required GitHub closing links are present in the MR's commit messages.
*
*/
module.exports = async function () {
const ruleName = 'Jira ticket references';
const axios = require("axios");
const mrDescription = danger.gitlab.mr.description;
const mrCommitMessages = danger.gitlab.commits.map(
(commit) => commit.message
);
const jiraTicketRegex = /[A-Z0-9]+-[0-9]+/;
let partMessages = []; // Create a blank field for future records of individual issues
// Parse section "Related" from MR Description
const sectionRelated = extractSectionRelated(mrDescription);
if (
!sectionRelated.header || // No section Related in MR description or ...
!jiraTicketRegex.test(sectionRelated.content) // no Jira links in section Related
) {
recordRuleExitStatus(ruleName, 'Passed (with suggestions)');
return message(
"Please consider adding references to JIRA issues in the `Related` section of the MR description."
);
}
// Get closing (only) JIRA tickets
const jiraTickets = findClosingJiraTickets(sectionRelated.content);
for (const ticket of jiraTickets) {
ticket.jiraUIUrl = `https://jira.espressif.com:8443/browse/${ticket.ticketName}`;
if (!ticket.correctFormat) {
partMessages.push(
`- closing ticket \`${ticket.record}\` seems to be in the wrong format (or inaccessible to Jira DangerBot).. The correct format is for example \`- Closes JIRA-123\`.`
);
}
// Get closing GitHub issue links from JIRA tickets
const closingGithubLink = await getGitHubClosingLink(ticket.ticketName);
if (closingGithubLink) {
ticket.closingGithubLink = closingGithubLink;
} else if (closingGithubLink === null) {
partMessages.push(
`- the Jira issue number [\`${ticket.ticketName}\`](${ticket.jiraUIUrl}) seems to be invalid (please check if the ticket number is correct)`
);
continue; // Handle unreachable JIRA tickets; skip the following checks
} else {
continue; // Jira ticket have no GitHub closing link; skip the following checks
}
// Get still open GitHub issues
const githubIssueStatusOpen = await isGithubIssueOpen(
ticket.closingGithubLink
);
ticket.isOpen = githubIssueStatusOpen;
if (githubIssueStatusOpen === null) {
// Handle unreachable GitHub issues
partMessages.push(
`- the GitHub issue [\`${ticket.closingGithubLink}\`](${ticket.closingGithubLink}) does not seem to exist on GitHub (referenced from JIRA ticket [\`${ticket.ticketName}\`](${ticket.jiraUIUrl}) )`
);
continue; // skip the following checks
}
// Search in commit message if there are all GitHub closing links (from Related section) for still open GH issues
if (ticket.isOpen) {
if (
!mrCommitMessages.some((item) =>
item.includes(`Closes ${ticket.closingGithubLink}`)
)
) {
partMessages.push(
`- please add \`Closes ${ticket.closingGithubLink}\` to the commit message`
);
}
}
}
// Create report / DangerJS check feedback if issues with Jira links found
if (partMessages.length) {
createReport();
}
// At this point, the rule has passed
recordRuleExitStatus(ruleName, 'Passed');
// ---------------------------------------------------------------
/**
* This function takes in a string mrDescription which contains a Markdown-formatted text
* related to a Merge Request (MR) in a GitLab repository. It searches for a section titled "Related"
* and extracts the content of that section. If the section is not found, it returns an object
* indicating that the header and content are null. If the section is found but empty, it returns
* an object indicating that the header is present but the content is null. If the section is found
* with content, it returns an object indicating that the header is present and the content of the
* "Related" section.
*
* @param {string} mrDescription - The Markdown-formatted text related to the Merge Request.
* @returns {{
* header: string | boolean | null,
* content: string | null
* }} - An object containing the header and content of the "Related" section, if present.
*/
function extractSectionRelated(mrDescription) {
const regexSectionRelated = /## Related([\s\S]*?)(?=## |$)/;
const sectionRelated = mrDescription.match(regexSectionRelated);
if (!sectionRelated) {
return { header: null, content: null }; // Section "Related" is missing
}
const content = sectionRelated[1].replace(/(\r\n|\n|\r)/gm, ""); // Remove empty lines
if (!content.length) {
return { header: true, content: null }; // Section "Related" is present, but empty
}
return { header: true, content: sectionRelated[1] }; // Found section "Related" with content
}
/**
* Finds all JIRA tickets that are being closed in the given sectionRelatedcontent.
* The function searches for lines that start with - Closes and have the format Closes [uppercase letters]-[numbers].
* @param {string} sectionRelatedcontent - A string that contains lines with mentions of JIRA tickets
* @returns {Array} An array of objects with ticketName property that has the correct format
*/
function findClosingJiraTickets(sectionRelatedcontent) {
let closingTickets = [];
const lines = sectionRelatedcontent.split("\n");
for (const line of lines) {
if (!line.startsWith("- Closes")) {
continue; // Not closing-type ticket, skip
}
const correctJiraClosingLinkFormat = new RegExp(
`^- Closes ${jiraTicketRegex.source}$`
);
const matchedJiraTicket = line.match(jiraTicketRegex);
if (matchedJiraTicket) {
if (!correctJiraClosingLinkFormat.test(line)) {
closingTickets.push({
record: line,
ticketName: matchedJiraTicket[0],
correctFormat: false,
});
} else {
closingTickets.push({
record: line,
ticketName: matchedJiraTicket[0],
correctFormat: true,
});
}
}
}
return closingTickets;
}
/**
* This function takes a JIRA issue key and retrieves the description from JIRA's API.
* It then searches the description for a GitHub closing link in the format "Closes https://github.com/owner/repo/issues/123".
* If a GitHub closing link is found, it is returned. If no GitHub closing link is found, it returns null.
* @param {string} jiraIssueKey - The key of the JIRA issue to search for the GitHub closing link.
* @returns {Promise<string|null>} - A promise that resolves to a string containing the GitHub closing link if found,
* or null if not found.
*/
async function getGitHubClosingLink(jiraIssueKey) {
let jiraDescription = "";
// Get JIRA ticket description content
try {
const response = await axios({
url: `https://jira.espressif.com:8443/rest/api/latest/issue/${jiraIssueKey}`,
auth: {
username: process.env.DANGER_JIRA_USER,
password: process.env.DANGER_JIRA_PASSWORD,
},
});
jiraDescription = response.data.fields.description
? response.data.fields.description
: ""; // if the Jira ticket has an unfilled Description, the ".description" property is missing in API response - in that case set "jiraDescription" to an empty string
} catch (error) {
return null;
}
// Find GitHub closing link in description
const regexClosingGhLink =
/Closes\s+(https:\/\/github.com\/\S+\/\S+\/issues\/\d+)/;
const closingGithubLink = jiraDescription.match(regexClosingGhLink);
if (closingGithubLink) {
return closingGithubLink[1];
} else {
return false; // Jira issue has no GitHub closing link in description
}
}
/**
* Check if a GitHub issue linked in a merge request is still open.
*
* @param {string} link - The link to the GitHub issue.
* @returns {Promise<boolean>} A promise that resolves to a boolean indicating if the issue is open.
* @throws {Error} If the link is invalid or if there was an error fetching the issue.
*/
async function isGithubIssueOpen(link) {
const parsedUrl = new URL(link);
const [owner, repo] = parsedUrl.pathname.split("/").slice(1, 3);
const issueNumber = parsedUrl.pathname.split("/").slice(-1)[0];
try {
const response = await axios.get(
`https://api.github.com/repos/${owner}/${repo}/issues/${issueNumber}`
);
return response.data.state === "open"; // return True if GitHub issue is open
} catch (error) {
return null; // GET request to issue fails
}
}
function createReport() {
partMessages.sort();
let dangerMessage = `Some issues found for the related JIRA tickets in this MR:\n${partMessages.join(
"\n"
)}`;
recordRuleExitStatus(ruleName, "Failed");
return warn(dangerMessage);
}
};

Wyświetl plik

@ -1,24 +0,0 @@
const { recordRuleExitStatus } = require("./configParameters.js");
/**
* Check if MR Description has accurate description".
*
* @dangerjs WARN
*/
module.exports = function () {
const ruleName = "Merge request sufficient description";
const mrDescription = danger.gitlab.mr.description;
const descriptionChunk = mrDescription.match(/^([^#]*)/)[1].trim(); // Extract all text before the first section header (i.e., the text before the "## Release notes")
const shortMrDescriptionThreshold = 50; // Description is considered too short below this number of characters
if (descriptionChunk.length < shortMrDescriptionThreshold) {
recordRuleExitStatus(ruleName, "Failed");
return warn(
"The MR description looks very brief, please check if more details can be added."
);
}
// At this point, the rule has passed
recordRuleExitStatus(ruleName, "Passed");
};

Wyświetl plik

@ -1,99 +0,0 @@
const { recordRuleExitStatus } = require("./configParameters.js");
/**
* Check if MR Description contains mandatory section "Release notes"
*
* Extracts the content of the "Release notes" section from the GitLab merge request description.
*
* @dangerjs WARN (if section missing, is empty or wrong markdown format)
*/
module.exports = function () {
const ruleName = 'Merge request Release Notes section';
const mrDescription = danger.gitlab.mr.description;
const wiki_link = `${process.env.DANGER_GITLAB_HOST}/espressif/esp-idf/-/wikis/rfc/How-to-write-release-notes-properly`;
const regexSectionReleaseNotes = /## Release notes([\s\S]*?)(?=## |$)/;
const regexValidEntry = /^\s*[-*+]\s+.+/;
const regexNoReleaseNotes = /no release note/i;
const sectionReleaseNotes = mrDescription.match(regexSectionReleaseNotes);
if (!sectionReleaseNotes) {
recordRuleExitStatus(ruleName, "Failed");
return warn(`The \`Release Notes\` section seems to be missing. Please check if the section header in MR description is present and in the correct markdown format ("## Release Notes").\n\nSee [Release Notes Format Rules](${wiki_link}).`);
}
const releaseNotesLines = sectionReleaseNotes[1].replace(/<!--[\s\S]*?-->/g, '')
const lines = releaseNotesLines.split("\n").filter(s => s.trim().length > 0);
let valid_entries_found = 0;
let no_release_notes_found = false;
let violations = [];
lines.forEach((line) => {
if (line.match(regexValidEntry)) {
valid_entries_found++;
const error_msg = check_entry(line);
if (error_msg) {
violations.push(error_msg);
}
} else if (line.match(regexNoReleaseNotes)) {
no_release_notes_found = true;
}
});
let error_output = [];
if (violations.length > 0) {
error_output = [...error_output, 'Invalid release note entries:', violations.join('\n')];
}
if (no_release_notes_found) {
if (valid_entries_found > 0) {
error_output.push('`No release notes` comment shows up when there is valid entry. Remove bullets before comments in release notes section.');
}
} else {
if (!valid_entries_found) {
error_output.push('The `Release Notes` section seems to have no valid entries. Add bullets before valid entries, or add `No release notes` comment to suppress this error if you mean to have no release notes.');
}
}
if (error_output.length > 0) {
// Paragraphs joined by double `\n`s.
error_output = [...error_output, `See [Release Notes Format Guide](${wiki_link}).`].join('\n\n');
recordRuleExitStatus(ruleName, "Failed");
return warn(error_output);
}
// At this point, the rule has passed
recordRuleExitStatus(ruleName, 'Passed');
};
function check_entry(entry) {
const entry_str = `- \`${entry}\``;
const indent = " ";
if (entry.match(/no\s+release\s+note/i)) {
return [entry_str, `${indent}- \`No release notes\` comment shouldn't start with bullet.`].join('\n');
}
const regex = /^(\s*)[-*+]\s+\[([^\]]+)\]\s+(.*)$/;
const match = regex.exec(entry);
if (!match) {
return [entry_str, `${indent}- Please specify the [area] to which the change belongs (see guide). If this line is just a comment, remove the bullet.`].join('\n');
}
// area is in match[2]
const description = match[3].trim();
let violations = [];
if (match[1]) {
violations.push(`${indent}- Release note entry should start from the beginning of line. (Nested release note not allowed.)`);
}
if (!/^[A-Z0-9]/.test(description)) {
violations.push(`${indent}- Release note statement should start with a capital letter or digit.`);
}
if (violations.length > 0) {
return [entry_str, ...violations].join('\n');
}
return null;
}

Wyświetl plik

@ -1,280 +0,0 @@
const { recordRuleExitStatus } = require("./configParameters.js");
/**
* Check the documentation files in this MR.
*
* Generate an object with all docs/ files found in this MR with paths to their EN/CN versions.
*
* For common files (both language versions exist in this MR), compare the lines of both files.
* Ignore if the CN file is only a single line file with an "include" reference to the EN version.
*
* For files that only have a CN version in this MR, add a message to the message that an EN file also needs to be created.
*
* For a file that only has an EN version in this MR, try loading its CN version from the target Gitlab branch.
* If its CN version doesn't exist in the repository or it does exist,
* but its contents are larger than just an "include" link to the EN version (it's a full-size file),
* add a message to the report
*
* Create a compiled report with the docs/ files issues found and set its severity (WARN/INFO).
* Severity is based on the presence of "needs translation: ??" labels in this MR
*
* @dangerjs WARN (if docs translation issues in the MR)
* @dangerjs INFO (if docs translation issues in the MR and the user has already added translation labels).
* Adding translation labels "needs translation: XX" automatically notifies the Documentation team
*
* @dangerjs WARN (if there are no docs issues in MR, but translation labels have been added anyway)
*
*/
module.exports = async function () {
const ruleName = 'Documentation translation';
let partMessages = []; // Create a blank field for future records of individual issues
const pathProject = "espressif/esp-idf";
const regexIncludeLink = /\.\.\sinclude::\s((\.\.\/)+)en\//;
const allMrFiles = [
...danger.git.modified_files,
...danger.git.created_files,
...danger.git.deleted_files,
];
const docsFilesMR = parseMrDocsFiles(allMrFiles); // Create single object of all doc files in MR with names, paths and groups
// Both versions (EN and CN) of document found changed in this MR
for (const file of docsFilesMR.bothFilesInMr) {
file.contentEn = await getContentFileInMR(file.fileEnPath); // Get content of English file
file.linesEn = file.contentEn.split("\n").length; // Get number of lines of English file
file.contentCn = await getContentFileInMR(file.fileCnPath); // Get content of Chinese file
file.linesCn = file.contentCn.split("\n").length; // Get number of lines of English file
// Compare number of lines in both versions
if (file.linesEn !== file.linesCn) {
// Check if CN file is only link to EN file
if (!regexIncludeLink.test(file.contentCn)) {
// if not just a link ...
partMessages.push(
`- please synchronize the EN and CN version of \`${file.fileName}\`. [\`${file.fileEnPath}\`](${file.fileUrlRepoEN}) has ${file.linesEn} lines; [\`${file.fileCnPath}\`](${file.fileUrlRepoCN}) has ${file.linesCn} lines.`
);
}
}
}
// Only Chinese version of document found changed in this MR
for (const file of docsFilesMR.onlyCnFilesInMr) {
partMessages.push(
`- file \`${file.fileEnPath}\` doesn't exist in this MR or in the GitLab repo. Please add \`${file.fileEnPath}\` into this MR.`
);
}
// Only English version of document found in this MR
for (const file of docsFilesMR.onlyEnFilesInMr) {
const targetBranch = danger.gitlab.mr.target_branch;
file.contentCn = await getContentFileInGitlab(
file.fileCnPath,
targetBranch
); // Try to fetch CN file from target branch of Gitlab repository and store content
if (file.contentCn) {
// File found on target branch in Gitlab repository
if (!regexIncludeLink.test(file.contentCn)) {
// File on Gitlab master is NOT just an ..include:: link to ENG version
file.fileUrlRepoMasterCN = `${process.env.DANGER_GITLAB_HOST}/${pathProject}/-/blob/${targetBranch}/${file.fileCnPath}`;
partMessages.push(
`- file \`${file.fileCnPath}\` was not updated in this MR, but found unchanged full document (not just link to EN) in target branch of Gitlab repository [\`${file.fileCnPath}\`](${file.fileUrlRepoMasterCN}). Please update \`${file.fileCnPath}\` into this MR.`
);
}
} else {
// File failed to fetch, probably does not exist in the target branch
partMessages.push(
`- file \`${file.fileCnPath}\` probably doesn't exist in this MR or in the GitLab repo. Please add \`${file.fileCnPath}\` into this MR.`
);
}
}
// Create a report with found issues with documents in MR
createReport();
// At this point, the rule has passed
recordRuleExitStatus(ruleName, 'Passed');
/**
* Generates an object that represents the relationships between files in two different languages found in this MR.
*
* @param {string[]} docsFilesEN - An array of file paths for documents in English.
* @param {string[]} docsFilesCN - An array of file paths for documents in Chinese.
* @returns {Object} An object with the following properties:
* - bothFilesInMr: An array of objects that represent files that found in MR in both languages. Each object has the following properties:
* - fileName: The name of the file.
* - fileEnPath: The path to the file in English.
* - fileCnPath: The path to the file in Chinese.
* - fileUrlRepoEN: The URL link to MR branch path to the file in English.
* - fileUrlRepoCN: The URL link to MR branch path to the file in Chinese.
* - onlyCnFilesInMr: An array of objects that represent files that only found in MR in English. Each object has the following properties:
* - fileName: The name of the file.
* - fileEnPath: The path to the file in English.
* - fileCnPath: The FUTURE path to the file in Chinese.
* - fileUrlRepoEN: The URL link to MR branch path to the file in English.
* - fileUrlRepoCN: The URL link to MR branch path to the file in Chinese.
* - onlyEnFilesInMr: An array of objects that represent files that only found in MR in Chinese. Each object has the following properties:
* - fileName: The name of the file.
* - fileEnPath: The FUTURE path to the file in English.
* - fileCnPath: The path to the file in Chinese.
* - fileUrlRepoEN: The URL link to MR branch path to the file in English.
* - fileUrlRepoCN: The URL link to MR branch path to the file in Chinese.
*/
function parseMrDocsFiles(allMrFiles) {
const path = require("path");
const mrBranch = danger.gitlab.mr.source_branch;
const docsEnFilesMrPath = allMrFiles.filter((file) =>
file.startsWith("docs/en")
); // Filter all English doc files in MR
const docsCnFilesMrPath = allMrFiles.filter((file) =>
file.startsWith("docs/zh_CN")
); // Filter all Chinese doc files in MR
const docsEnFileNames = docsEnFilesMrPath.map((filePath) =>
path.basename(filePath)
); // Get (base) file names for English docs
const docsCnFileNames = docsCnFilesMrPath.map((filePath) =>
path.basename(filePath)
); // Get (base) file names for Chinese docs
const bothFileNames = docsEnFileNames.filter((fileName) =>
docsCnFileNames.includes(fileName)
); // Get file names that are common to both English and Chinese docs
const onlyEnFileNames = docsEnFileNames.filter(
(fileName) => !docsCnFileNames.includes(fileName)
); // Get file names that are only present in English version
const onlyCnFileNames = docsCnFileNames.filter(
(fileName) => !docsEnFileNames.includes(fileName)
); // Get file names that are only present in Chinese version
return {
bothFilesInMr: bothFileNames.map((fileName) => {
const fileEnPath =
docsEnFilesMrPath[docsEnFileNames.indexOf(fileName)];
const fileCnPath =
docsCnFilesMrPath[docsCnFileNames.indexOf(fileName)];
return {
fileName,
fileEnPath,
fileCnPath,
fileUrlRepoEN: `${process.env.DANGER_GITLAB_HOST}/${pathProject}/-/blob/${mrBranch}/${fileEnPath}`,
fileUrlRepoCN: `${process.env.DANGER_GITLAB_HOST}/${pathProject}/-/blob/${mrBranch}/${fileCnPath}`,
};
}),
onlyEnFilesInMr: onlyEnFileNames.map((fileName) => {
const fileEnPath =
docsEnFilesMrPath[docsEnFileNames.indexOf(fileName)];
const fileCnPath = fileEnPath.replace("en", "zh_CN"); // Generate future CN file path
return {
fileName,
fileEnPath,
fileCnPath,
fileUrlRepoEN: `${process.env.DANGER_GITLAB_HOST}/${pathProject}/-/blob/${mrBranch}/${fileEnPath}`,
fileUrlRepoCN: `${process.env.DANGER_GITLAB_HOST}/${pathProject}/-/blob/${mrBranch}/${fileCnPath}`,
};
}),
onlyCnFilesInMr: onlyCnFileNames.map((fileName) => {
const fileCnPath =
docsCnFilesMrPath[docsCnFileNames.indexOf(fileName)];
const fileEnPath = fileCnPath.replace("zh_CN", "en"); // Generate future EN file path
return {
fileName,
fileEnPath,
fileCnPath,
fileUrlRepoEN: `${process.env.DANGER_GITLAB_HOST}/${pathProject}/-/blob/${mrBranch}/${fileEnPath}`,
fileUrlRepoCN: `${process.env.DANGER_GITLAB_HOST}/${pathProject}/-/blob/${mrBranch}/${fileCnPath}`,
};
}),
};
}
/**
* Retrieves the contents of a file from GitLab using the GitLab API.
*
* @param {string} filePath - The path of the file to retrieve.
* @param {string} branch - The branch where the file is located.
* @returns {string|null} - The contents of the file, with any trailing new lines trimmed, or null if the file cannot be retrieved.
*/
async function getContentFileInGitlab(filePath, branch) {
const axios = require("axios");
const encFilePath = encodeURIComponent(filePath);
const encBranch = encodeURIComponent(branch);
const urlApi = `${process.env.DANGER_GITLAB_API_BASE_URL}/projects/${danger.gitlab.mr.project_id}/repository/files/${encFilePath}/raw?ref=${encBranch}`;
try {
const response = await axios.get(urlApi, {
headers: {
"Private-Token": process.env.DANGER_GITLAB_API_TOKEN,
},
});
return response.data.trim(); // Trim trailing new line
} catch (error) {
return null;
}
}
/**
* Retrieves the contents of a file in a DangerJS merge request object.
*
* @param {string} filePath - The path of the file to retrieve.
* @returns {string|null} - The contents of the file, with any trailing new lines trimmed, or null if the file cannot be retrieved.
*/
async function getContentFileInMR(filePath) {
try {
const content = await danger.git.diffForFile(filePath);
const fileContentAfter = content.after.trim(); // Trim trailing new lines
return fileContentAfter;
} catch (error) {
console.error(`Error while getting file content MR: ${error}`);
return null;
}
}
/**
* Creates a compiled report for found documentation issues in the current MR and alerts the Documentation team if there are any "needs translation" labels present.
*
* Report if documentation labels have been added by mistake.
*/
function createReport() {
const mrLabels = danger.gitlab.mr.labels; // Get MR labels
const regexTranslationLabel = /needs translation:/i;
const translationLabelsPresent = mrLabels.some((label) =>
regexTranslationLabel.test(label)
); // Check if any of MR labels are "needs translation: XX"
// No docs issues found in MR, but translation labels have been added anyway
if (!partMessages.length && translationLabelsPresent) {
recordRuleExitStatus(ruleName, "Failed");
return warn(
`Please remove the \`needs translation: XX\` labels. For documents that need to translate from scratch, Doc team will translate them in the future. For the current stage, we only focus on updating exiting EN and CN translation to make them in sync.`
);
}
// Docs issues found in this MR
partMessages.sort();
let dangerMessage = `Some of the documentation files in this MR seem to have translations issues:\n${partMessages.join(
"\n"
)}\n`;
if (partMessages.length) {
if (!translationLabelsPresent) {
dangerMessage += `
\nWhen synchronizing the EN and CN versions, please follow the [Documentation Code](https://docs.espressif.com/projects/esp-idf/zh_CN/latest/esp32/contribute/documenting-code.html#standardize-document-format). The total number of lines of EN and CN should be same.\n
\nIf you have difficulty in providing translation, you can contact Documentation team by adding <kbd>needs translation: CN</kbd> or <kbd>needs translation: EN</kbd> labels into this MR and retrying Danger CI job. The documentation team will be automatically notified and will help you with the translations before the merge.\n`;
recordRuleExitStatus(ruleName, "Failed");
return warn(dangerMessage); // no "needs translation: XX" labels in MR; report issues as warn
} else {
dangerMessage += `\nTranslation labels <kbd>needs translation: CN</kbd> or <kbd>needs translation: EN</kbd> were added - this will automatically notify the Documentation team to help you with translation issues.`;
recordRuleExitStatus(ruleName, 'Passed (with suggestions)');
return message(dangerMessage); // "needs translation: XX" labels were found in MR and Docs team was notified; report issues as info
}
}
}
};

Wyświetl plik

@ -1,22 +0,0 @@
const { recordRuleExitStatus } = require("./configParameters.js");
/**
* Check if MR is too large (more than 1000 lines of changes)
*
* @dangerjs INFO
*/
module.exports = async function () {
const ruleName = "Merge request size (number of changed lines)";
const bigMrLinesOfCodeThreshold = 1000;
const totalLines = await danger.git.linesOfCode();
if (totalLines > bigMrLinesOfCodeThreshold) {
recordRuleExitStatus(ruleName, "Passed (with suggestions)");
return message(
`This MR seems to be quite large (total lines of code: ${totalLines}), you might consider splitting it into smaller MRs`
);
}
// At this point, the rule has passed
recordRuleExitStatus(ruleName, "Passed");
};

Wyświetl plik

@ -1,31 +0,0 @@
const { recordRuleExitStatus } = require("./configParameters.js");
/**
* Throw Danger WARN if branch name contains more than one slash or uppercase letters
*
* @dangerjs INFO
*/
module.exports = function () {
const ruleName = "Source branch name";
const sourceBranch = danger.gitlab.mr.source_branch;
// Check if the source branch name contains more than one slash
const slashCount = (sourceBranch.match(/\//g) || []).length;
if (slashCount > 1) {
recordRuleExitStatus(ruleName, "Failed");
return warn(
`The source branch name \`${sourceBranch}\` contains more than one slash. This can cause troubles with git sync. Please rename the branch.`
);
}
// Check if the source branch name contains any uppercase letters
if (sourceBranch !== sourceBranch.toLowerCase()) {
recordRuleExitStatus(ruleName, "Failed");
return warn(
`The source branch name \`${sourceBranch}\` contains uppercase letters. This can cause troubles on case-insensitive file systems (macOS). Please use only lowercase letters.`
);
}
// At this point, the rule has passed
recordRuleExitStatus(ruleName, "Passed");
};

Wyświetl plik

@ -1,31 +0,0 @@
const { recordRuleExitStatus } = require("./configParameters.js");
/**
* Check if MR Title contains prefix "WIP: ...".
*
* @dangerjs WARN
*/
module.exports = function () {
const ruleName = 'Merge request not in Draft or WIP state';
const mrTitle = danger.gitlab.mr.title;
const regexes = [
{ prefix: "WIP", regex: /^WIP:/i },
{ prefix: "W.I.P", regex: /^W\.I\.P/i },
{ prefix: "[WIP]", regex: /^\[WIP/i },
{ prefix: "[W.I.P]", regex: /^\[W\.I\.P/i },
{ prefix: "(WIP)", regex: /^\(WIP/i },
{ prefix: "(W.I.P)", regex: /^\(W\.I\.P/i },
];
for (const item of regexes) {
if (item.regex.test(mrTitle)) {
recordRuleExitStatus(ruleName, "Failed");
return warn(
`Please remove the \`${item.prefix}\` prefix from the MR name before merging this MR.`
);
}
}
// At this point, the rule has passed
recordRuleExitStatus(ruleName, "Passed");
};

Plik diff jest za duży Load Diff

Wyświetl plik

@ -1,12 +0,0 @@
{
"name": "dangerjs-esp-idf",
"description": "Merge request automatic linter",
"main": "dangerfile.js",
"dependencies": {
"danger": "^11.2.3",
"axios": "^1.3.3",
"langchain": "^0.0.53",
"openai-gpt-token-counter": "^1.0.3",
"@commitlint/lint": "^13.1.0"
}
}

Wyświetl plik

@ -42,7 +42,7 @@ _For other small/non-public changes, which are not expected to be in the release
_Don't touch the subsection titles below, they will be parsed by scripts._
## Release notes (Mandatory)
## Release notes <!-- Mandatory -->
_Changes made in this MR that should go into the **Release Notes** should be listed here. Please use **past tense** and *specify the area (see maintainers page of IDF internal wiki)*. If there is a subscope, include it and separate with slash (`/`). Minor changes can go to the descriptions above without a release notes entry._

16
.gitmodules vendored
Wyświetl plik

@ -49,12 +49,12 @@
[submodule "components/json/cJSON"]
path = components/json/cJSON
url = ../../DaveGamble/cJSON.git
sbom-version = 1.7.16
sbom-version = 1.7.17
sbom-cpe = cpe:2.3:a:cjson_project:cjson:{}:*:*:*:*:*:*:*
sbom-supplier = Person: Dave Gamble
sbom-url = https://github.com/DaveGamble/cJSON
sbom-description = Ultralightweight JSON parser in ANSI C
sbom-hash = cb8693b058ba302f4829ec6d03f609ac6f848546
sbom-hash = 87d8f0961a01bf09bef98ff89bae9fdec42181ee
[submodule "components/mbedtls/mbedtls"]
path = components/mbedtls/mbedtls
@ -81,11 +81,11 @@
[submodule "components/unity/unity"]
path = components/unity/unity
url = ../../ThrowTheSwitch/Unity.git
sbom-version = v2.4.3-51-g7d2bf62b7e6a
sbom-version = v2.6.0-RC1
sbom-supplier = Organization: ThrowTheSwitch community <http://www.throwtheswitch.org>
sbom-url = https://github.com/ThrowTheSwitch/Unity
sbom-description = Simple Unit Testing for C
sbom-hash = 7d2bf62b7e6afaf38153041a9d53c21aeeca9a25
sbom-hash = bf560290f6020737eafaa8b5cbd2177c3956c03f
[submodule "components/bt/host/nimble/nimble"]
path = components/bt/host/nimble/nimble
@ -120,10 +120,6 @@
path = components/openthread/lib
url = ../../espressif/esp-thread-lib.git
[submodule "components/ieee802154/lib"]
path = components/ieee802154/lib
url = ../../espressif/esp-ieee802154-lib.git
[submodule "components/bt/controller/lib_esp32h2/esp32h2-bt-lib"]
path = components/bt/controller/lib_esp32h2/esp32h2-bt-lib
url = ../../espressif/esp32h2-bt-lib.git
@ -143,3 +139,7 @@
[submodule "components/esp_coex/lib"]
path = components/esp_coex/lib
url = ../../espressif/esp-coex-lib.git
[submodule "components/bt/esp_ble_mesh/lib/lib"]
path = components/bt/esp_ble_mesh/lib/lib
url = ../../espressif/esp-ble-mesh-lib.git

Wyświetl plik

@ -1,9 +1,11 @@
# See https://pre-commit.com for more information
# See https://pre-commit.com/hooks.html for more hooks
default_stages: [commit]
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.0.1
rev: v4.5.0
hooks:
- id: trailing-whitespace
# note: whitespace exclusions use multiline regex, see https://pre-commit.com/#regular-expressions
@ -39,11 +41,12 @@ repos:
hooks:
- id: flake8
args: ['--config=.flake8', '--tee', '--benchmark']
- repo: https://github.com/pycqa/isort
rev: 5.12.0 # python 3.8 compatible
- repo: https://github.com/asottile/reorder-python-imports
rev: v3.12.0
hooks:
- id: isort
name: isort (python)
- id: reorder-python-imports
name: Reorder Python imports
args: [--py38-plus]
exclude: >
(?x)^(
.*_pb2.py
@ -62,11 +65,6 @@ repos:
language: python
pass_filenames: false
always_run: true
- id: check-kconfigs
name: Validate Kconfig files
entry: tools/ci/check_kconfigs.py
language: python
files: '^Kconfig$|Kconfig.*$'
- id: check-deprecated-kconfigs-options
name: Check if any Kconfig Options Deprecated
entry: tools/ci/check_deprecated_kconfigs.py
@ -87,20 +85,13 @@ repos:
always_run: true
files: '\.gitlab/CODEOWNERS'
pass_filenames: false
- id: check-rules-yml
name: Check rules.yml all rules have at lease one job applied, all rules needed exist
entry: tools/ci/check_rules_yml.py
language: python
files: '\.gitlab/ci/.+\.yml|\.gitlab-ci.yml|\.gitmodules'
pass_filenames: false
additional_dependencies:
- PyYAML == 5.3.1
- id: check-generated-rules
name: Check rules are generated (based on .gitlab/ci/dependencies/dependencies.yml)
entry: tools/ci/generate_rules.py
language: python
files: '\.gitlab/ci/dependencies/.+|\.gitlab/ci/.*\.yml'
files: '\.gitlab/ci/dependencies/.+|\.gitlab/ci/.*\.yml|.gitlab-ci.yml'
pass_filenames: false
require_serial: true
additional_dependencies:
- PyYAML == 5.3.1
- id: mypy-check
@ -111,6 +102,7 @@ repos:
- 'mypy-extensions==0.4.3'
- 'types-setuptools==57.4.14'
- 'types-PyYAML==0.1.9'
- 'types-requests'
exclude: >
(?x)^(
.*_pb2.py
@ -157,14 +149,20 @@ repos:
require_serial: true
additional_dependencies:
- PyYAML == 5.3.1
- idf_build_apps~=1.0
- id: sort-build-test-rules-ymls
name: sort .build-test-rules.yml files
entry: tools/ci/check_build_test_rules.py sort-yaml
- idf-build-apps~=2.0
- id: sort-yaml-files
name: sort yaml files
entry: tools/ci/sort_yaml.py
language: python
files: '\.build-test-rules\.yml'
files: '\.build-test-rules\.yml$|known_generate_test_child_pipeline_warnings\.yml$'
additional_dependencies:
- ruamel.yaml
- id: sort-yaml-test
name: sort yaml test
entry: python -m unittest tools/ci/sort_yaml.py
language: python
files: 'tools/ci/sort_yaml\.py$'
additional_dependencies:
- PyYAML == 5.3.1
- ruamel.yaml
- id: check-build-test-rules-path-exists
name: check path in .build-test-rules.yml exists
@ -175,18 +173,20 @@ repos:
always_run: true
pass_filenames: false
require_serial: true
- id: submodule-sbom-hash-check
name: Check if sbom-hash values for submodules in .gitmodules match submodules checkout hash in git tree
entry: python tools/test_sbom/test_submodules.py
language: python
always_run: true
pass_filenames: false
- id: cleanup-ignore-lists
name: Remove non-existing patterns from ignore lists
entry: tools/ci/cleanup_ignore_lists.py
language: python
always_run: true
require_serial: true
- id: gitlab-yaml-linter
name: Check gitlab yaml files
entry: tools/ci/gitlab_yaml_linter.py
language: python
files: '\.gitlab-ci\.yml|\.gitlab/ci/.+\.yml'
pass_filenames: false
additional_dependencies:
- PyYAML == 5.3.1
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.0.1
hooks:
@ -198,12 +198,12 @@ repos:
- id: check-copyright
args: ['--ignore', 'tools/ci/check_copyright_ignore.txt', '--config', 'tools/ci/check_copyright_config.yaml']
- repo: https://github.com/espressif/conventional-precommit-linter
rev: v1.2.1
rev: v1.6.0
hooks:
- id: conventional-precommit-linter
stages: [commit-msg]
- repo: https://github.com/espressif/astyle_py.git
rev: v1.0.2
rev: v1.0.5
hooks:
- id: astyle_py
# If you are modifying astyle version, update tools/format.sh as well
@ -219,3 +219,18 @@ repos:
name: shellcheck dash (export.sh)
args: ['--shell', 'dash', '-x']
files: 'export.sh'
- repo: https://github.com/espressif/esp-idf-sbom.git
rev: v0.13.0
hooks:
- id: validate-sbom-manifest
stages: [post-commit]
- repo: https://github.com/sphinx-contrib/sphinx-lint
rev: v0.9.1
hooks:
- id: 'sphinx-lint'
name: Lint rST files in docs folder using Sphinx Lint
files: ^(docs/en|docs/zh_CN)/.*\.(rst|inc)$
- repo: https://github.com/espressif/esp-idf-kconfig.git
rev: v2.1.0
hooks:
- id: check-kconfig-files

Wyświetl plik

@ -15,7 +15,7 @@ python:
install:
- requirements: docs/requirements.txt
# We need to list all the submodules included in documenation build by DOxygen
# We need to list all the submodules included in documenation build by Doxygen
submodules:
include:
- components/mqtt/esp-mqtt

119
.vale.ini 100644
Wyświetl plik

@ -0,0 +1,119 @@
###################
### Vale Config ###
###################
# This is a Vale linter configuration file.
# - Repo: esp-idf
# - Based on Default config: v0-1-1
# It lists all necessary parameters to configure Vale for your project.
# For official documentation on all config settings, see
# https://vale.sh/docs/topics/config
##############
### Global ###
##############
# This section lists core settings applying to Vale itself.
# Specify path to external resources (e.g., styles and vocab files).
# The path value may be absolute or relative to this configuration file.
StylesPath = .vale/styles
# Specify the minimum alert severity that Vale will report.
MinAlertLevel = suggestion # "suggestion", "warning", or "error"
# Specify vocabulary for special treatment.
# Create a folder in <StylesPath>/Vocab/<name>/and add its name here
# The folder should contain two files:
# - accept.txt -- lists words with accepted case-sensitive spelling
# - reject.txt -- lists words whose occurrences throw an error
# Vocab = Espressif
# Specify the packages to import into your project.
# A package is a zip file containing a number of rules (style) written in YAML.
# For a list of official packages, see Package Hub at https://vale.sh/hub/
# For official documentation on packages, see
# https://vale.sh/docs/topics/packages/
# Before linting, navigate to your project and run `vale sync` to download
# the official packages specified below.
# Packages = Package1, Package2, \
# https://example.com/path/to/package/Package.zip
Packages = Google, Microsoft, RedHat, \
https://dl.espressif.com/dl/esp-vale-config/Espressif-latest.zip
###############
### Formats ###
###############
# This section enables association of "unknown" formats with the ones
# supported by Vale. For official documentation on supported formats, see
# https://vale.sh/docs/topics/scoping/
[formats]
# For example, treat MDX files as Markdown files.
# mdx = md
################################
### Format-specific settings ###
################################
# This section lists the settings that apply to specific file formats
# based on their glob pattern.
# Settings provided under a more specific glob pattern,
# such as [*.{md,txt}] will override those in [*].
[*.{md,rst}]
# Enable styles to activate all rules included in them.
# BasedOnStyles = Style1, Style2
BasedOnStyles = Vale, Espressif-latest
### Deactivate individual rules ###
### in enabled styles.
# Style1.Rule1 = NO
Vale.Repetition = NO
Vale.Spelling = NO
Espressif-latest.Admonitions = NO
Espressif-latest.Contractions = NO
Espressif-latest.Monospace = NO
Espressif-latest.PascalCamelCase = NO
### Change default severity level ###
### of an activated rule.
# Choose between "suggestion", "warning", or "error".
# Style1.Rule2 = error
### Activate individual rules ###
### in non-enabled styles stored in <StylesPath>.
# Style1.Rule = YES
Google.Gender = YES
Google.GenderBias = YES
Google.Slang = YES
Google.Spacing = YES
Microsoft.DateNumbers = YES
Microsoft.Ellipses = YES
Microsoft.FirstPerson = YES
Microsoft.Hyphens = YES
Microsoft.Ordinal = YES
Microsoft.OxfordComma = YES
Microsoft.Percentages = YES
Microsoft.RangeTime = YES
Microsoft.Semicolon = YES
Microsoft.SentenceLength = YES
Microsoft.Suspended = YES
Microsoft.Units = YES
Microsoft.URLFormat = YES
Microsoft.We = YES
Microsoft.Wordiness = YES
RedHat.Contractions = YES
RedHat.RepeatedWords = YES

Wyświetl plik

@ -22,6 +22,9 @@ if(NOT BOOTLOADER_BUILD)
endif()
elseif(CONFIG_COMPILER_OPTIMIZATION_DEBUG)
list(APPEND compile_options "-Og")
if(CMAKE_C_COMPILER_ID MATCHES "GNU" AND NOT CONFIG_IDF_TARGET_LINUX)
list(APPEND compile_options "-fno-shrink-wrap") # Disable shrink-wrapping to reduce binary size
endif()
elseif(CONFIG_COMPILER_OPTIMIZATION_NONE)
list(APPEND compile_options "-O0")
elseif(CONFIG_COMPILER_OPTIMIZATION_PERF)
@ -37,6 +40,9 @@ else() # BOOTLOADER_BUILD
endif()
elseif(CONFIG_BOOTLOADER_COMPILER_OPTIMIZATION_DEBUG)
list(APPEND compile_options "-Og")
if(CMAKE_C_COMPILER_ID MATCHES "GNU" AND NOT CONFIG_IDF_TARGET_LINUX)
list(APPEND compile_options "-fno-shrink-wrap") # Disable shrink-wrapping to reduce binary size
endif()
elseif(CONFIG_BOOTLOADER_COMPILER_OPTIMIZATION_NONE)
list(APPEND compile_options "-O0")
elseif(CONFIG_BOOTLOADER_COMPILER_OPTIMIZATION_PERF)
@ -115,8 +121,10 @@ if(CMAKE_C_COMPILER_ID MATCHES "Clang")
list(APPEND compile_options "-Wno-c2x-extensions")
# warning on xMPU_SETTINGS for esp32s2 has size 0 for C and 1 for C++
list(APPEND compile_options "-Wno-extern-c-compat")
# warning: implicit truncation from 'int' to a one-bit wide bit-field changes value from 1 to -1
list(APPEND compile_options "-Wno-single-bit-bitfield-constant-conversion")
if(NOT (CONFIG_IDF_TARGET_LINUX AND CMAKE_HOST_SYSTEM_NAME STREQUAL "Darwin"))
# warning: implicit truncation from 'int' to a one-bit wide bit-field changes value from 1 to -1
list(APPEND compile_options "-Wno-single-bit-bitfield-constant-conversion")
endif()
endif()
# More warnings may exist in unit tests and example projects.
@ -186,6 +194,13 @@ if(CONFIG_COMPILER_DISABLE_GCC12_WARNINGS)
"-Wno-use-after-free")
endif()
if(CONFIG_COMPILER_DISABLE_GCC13_WARNINGS)
list(APPEND compile_options "-Wno-xor-used-as-pow")
list(APPEND c_compile_options "-Wno-enum-int-mismatch")
list(APPEND cxx_compile_options "-Wno-self-move"
"-Wno-dangling-reference")
endif()
# GCC-specific options
if(CMAKE_C_COMPILER_ID STREQUAL "GNU")
list(APPEND compile_options "-fstrict-volatile-bitfields"

Wyświetl plik

@ -52,9 +52,9 @@ Supported since ESP-IDF v4.2.
| Release branch | Recommended | Required |
|------------------------|-------------|----------|
| release/v4.2 | v4.2.3 | v4.2.3 |
| release/v4.3 | v4.3.3 | v4.3.3 |
| release/v4.4 | v4.4.6 | v4.4.1 |
| release/v4.2 | v4.2.3 | v4.2 |
| release/v4.3 | v4.3.3 | v4.3 |
| release/v4.4 | v4.4.6 | v4.4 |
| release/v5.0 | v5.0.4 | v5.0 |
| release/v5.1 | v5.1.2 | v5.1 |
| release/v5.2 and above | v5.2 | v5.2 |
@ -71,14 +71,10 @@ To be added.
### ESP32-S3
#### v0.1
#### v0.1, v0.2
Supported since ESP-IDF v4.4.
#### v0.2
To be added.
### ESP32-C2 & ESP8684
#### v1.0
@ -93,6 +89,17 @@ To be added.
To be added.
### ESP32-C6
#### v0.0, v0.1
Supported since ESP-IDF v5.1.
### ESP32-H2
#### v0.1, v0.2
Supported since ESP-IDF v5.1.
## What If the ESP-IDF Version Is Lower than the `Required` Version?

Wyświetl plik

@ -52,9 +52,9 @@
| 发布分支 | 推荐版本 | 需求版本 |
|------------------------|-------------|----------|
| release/v4.2 | v4.2.3 | v4.2.3 |
| release/v4.3 | v4.3.3 | v4.3.3 |
| release/v4.4 | v4.4.6 | v4.4.1 |
| release/v4.2 | v4.2.3 | v4.2 |
| release/v4.3 | v4.3.3 | v4.3 |
| release/v4.4 | v4.4.6 | v4.4 |
| release/v5.0 | v5.0.4 | v5.0 |
| release/v5.1 | v5.1.2 | v5.1 |
| release/v5.2 及以上 | v5.2 | v5.2 |
@ -71,14 +71,10 @@
### ESP32-S3
#### v0.1
#### v0.1, v0.2
从 ESP-IDF v4.4 开始支持。
#### v0.2
待更新。
### ESP32-C2 & ESP8684
#### v1.0
@ -93,6 +89,18 @@
待更新。
### ESP32-C6
#### v0.0, v0.1
从 ESP-IDF v5.1 开始支持。
### ESP32-H2
#### v0.1, v0.2
从 ESP-IDF v5.1 开始支持。
## 如果 ESP-IDF 版本低于 `需求版本` 会出现什么情况?

42
Kconfig
Wyświetl plik

@ -22,7 +22,6 @@ mainmenu "Espressif IoT Development Framework Configuration"
config IDF_ENV_BRINGUP
bool
default "y" if IDF_TARGET_ESP32P4
help
- This option is ONLY used when doing new chip bringup.
- This option will only enable necessary hw / sw settings for running
@ -33,6 +32,10 @@ mainmenu "Espressif IoT Development Framework Configuration"
bool
default y if "$(IDF_CI_BUILD)" = "y" || "$(IDF_CI_BUILD)" = 1
config IDF_DOC_BUILD
bool
default y if "$(IDF_DOC_BUILD)" = "y" || "$(IDF_DOC_BUILD)" = 1
config IDF_TOOLCHAIN
# This option records the IDF target when sdkconfig is generated the first time.
# It is not updated if environment variable $IDF_TOOLCHAIN changes later, and
@ -110,6 +113,27 @@ mainmenu "Espressif IoT Development Framework Configuration"
select FREERTOS_UNICORE
select IDF_TARGET_ARCH_RISCV
config IDF_TARGET_ESP32C5
bool
default "y" if IDF_TARGET="esp32c5"
select FREERTOS_UNICORE
select IDF_TARGET_ARCH_RISCV
choice IDF_TARGET_ESP32C5_VERSION
prompt "ESP32-C5 version"
depends on IDF_TARGET_ESP32C5
default IDF_TARGET_ESP32C5_BETA3_VERSION
help
ESP32-C5 will support two versions for a period.
This option is for internal use only.
Select the one that matches your chip model.
config IDF_TARGET_ESP32C5_BETA3_VERSION
bool
prompt "ESP32-C5 beta3"
select ESPTOOLPY_NO_STUB
endchoice
config IDF_TARGET_ESP32P4
bool
default "y" if IDF_TARGET="esp32p4"
@ -135,6 +159,7 @@ mainmenu "Espressif IoT Development Framework Configuration"
default 0x000D if IDF_TARGET_ESP32C6
default 0x0010 if IDF_TARGET_ESP32H2
default 0x0012 if IDF_TARGET_ESP32P4
default 0x0011 if IDF_TARGET_ESP32C5
default 0xFFFF
@ -525,6 +550,13 @@ mainmenu "Espressif IoT Development Framework Configuration"
Enable this option if use GCC 12 or newer, and want to disable warnings which don't appear with
GCC 11.
config COMPILER_DISABLE_GCC13_WARNINGS
bool "Disable new warnings introduced in GCC 13"
default "n"
help
Enable this option if use GCC 13 or newer, and want to disable warnings which don't appear with
GCC 12.
config COMPILER_DUMP_RTL_FILES
bool "Dump RTL files during compilation"
help
@ -576,7 +608,7 @@ mainmenu "Espressif IoT Development Framework Configuration"
Current experimental feature list:
- CONFIG_ESPTOOLPY_FLASHFREQ_120M
- CONFIG_SPIRAM_SPEED_120M
- CONFIG_SPI_FLASH_32BIT_ADDR_ENABLE
- CONFIG_FREERTOS_USE_KERNEL_10_5_1
- CONFIG_ESPTOOLPY_FLASHFREQ_120M && CONFIG_ESPTOOLPY_FLASH_SAMPLE_MODE_DTR
- CONFIG_SPIRAM_SPEED_120M && CONFIG_SPIRAM_MODE_OCT
- CONFIG_BOOTLOADER_CACHE_32BIT_ADDR_QUAD_FLASH
- CONFIG_MBEDTLS_USE_CRYPTO_ROM_IMPL

Wyświetl plik

@ -1,8 +1,19 @@
idf_build_get_property(target IDF_TARGET)
if(${target} STREQUAL "linux")
return() # This component is not supported by the POSIX/Linux simulator
endif()
set(srcs
"app_trace.c"
"app_trace_util.c"
"host_file_io.c")
if(CONFIG_ESP_DEBUG_STUBS_ENABLE)
list(APPEND srcs
"debug_stubs.c")
endif()
if(CONFIG_APPTRACE_GCOV_ENABLE)
if("${CMAKE_C_COMPILER_ID}" STREQUAL "GNU")
list(APPEND srcs
@ -57,8 +68,7 @@ endif()
idf_component_register(SRCS "${srcs}"
INCLUDE_DIRS "${include_dirs}"
PRIV_INCLUDE_DIRS "${priv_include_dirs}"
# Requires "driver" for GPTimer in "SEGGER_SYSVIEW_Config_FreeRTOS.c"
PRIV_REQUIRES soc driver
PRIV_REQUIRES esp_driver_gptimer esp_driver_gpio esp_driver_uart
REQUIRES esp_timer
LDFRAGMENTS linker.lf)

Wyświetl plik

@ -231,7 +231,7 @@ menu "Application Level Tracing"
choice APPTRACE_SV_CPU
prompt "CPU to trace"
depends on APPTRACE_SV_DEST_UART && !FREERTOS_UNICORE
depends on APPTRACE_SV_DEST_UART && !ESP_SYSTEM_SINGLE_CORE_MODE
default APPTRACE_SV_DEST_CPU_0
help
Define the CPU to trace by SystemView.
@ -252,8 +252,8 @@ menu "Application Level Tracing"
choice APPTRACE_SV_TS_SOURCE
prompt "Timer to use as timestamp source"
depends on APPTRACE_SV_ENABLE
default APPTRACE_SV_TS_SOURCE_CCOUNT if FREERTOS_UNICORE && !PM_ENABLE && !IDF_TARGET_ESP32C3
default APPTRACE_SV_TS_SOURCE_GPTIMER if !FREERTOS_UNICORE && !PM_ENABLE && !IDF_TARGET_ESP32C3
default APPTRACE_SV_TS_SOURCE_CCOUNT if ESP_SYSTEM_SINGLE_CORE_MODE && !PM_ENABLE && !IDF_TARGET_ESP32C3
default APPTRACE_SV_TS_SOURCE_GPTIMER if !ESP_SYSTEM_SINGLE_CORE_MODE && !PM_ENABLE && !IDF_TARGET_ESP32C3
default APPTRACE_SV_TS_SOURCE_ESP_TIMER if PM_ENABLE || IDF_TARGET_ESP32C3
help
SystemView needs to use a hardware timer as the source of timestamps
@ -261,7 +261,7 @@ menu "Application Level Tracing"
config APPTRACE_SV_TS_SOURCE_CCOUNT
bool "CPU cycle counter (CCOUNT)"
depends on FREERTOS_UNICORE && !PM_ENABLE && !IDF_TARGET_ESP32C3
depends on ESP_SYSTEM_SINGLE_CORE_MODE && !PM_ENABLE && !IDF_TARGET_ESP32C3
config APPTRACE_SV_TS_SOURCE_GPTIMER
bool "General Purpose Timer (Timer Group)"

Wyświetl plik

@ -1,7 +1,7 @@
/*
* SPDX-FileCopyrightText: 2017-2021 Espressif Systems (Shanghai) CO LTD
* SPDX-FileCopyrightText: 2017-2024 Espressif Systems (Shanghai) CO LTD
*
* SPDX-License-Identifier: Apache-2.0
* SPDX-License-Identifier: Apache-2.0 OR MIT
*/
#include <string.h>
@ -77,7 +77,7 @@ esp_err_t esp_apptrace_init(void)
return ESP_OK;
}
ESP_SYSTEM_INIT_FN(esp_apptrace_init, ESP_SYSTEM_INIT_ALL_CORES, 115)
ESP_SYSTEM_INIT_FN(esp_apptrace_init, SECONDARY, ESP_SYSTEM_INIT_ALL_CORES, 115)
{
return esp_apptrace_init();
}

Wyświetl plik

@ -1,7 +1,7 @@
/*
* SPDX-FileCopyrightText: 2021-2022 Espressif Systems (Shanghai) CO LTD
* SPDX-FileCopyrightText: 2021-2024 Espressif Systems (Shanghai) CO LTD
*
* SPDX-License-Identifier: Apache-2.0
* SPDX-License-Identifier: Apache-2.0 OR MIT
*/
#include <sys/param.h>

Wyświetl plik

@ -1,7 +1,7 @@
/*
* SPDX-FileCopyrightText: 2017-2021 Espressif Systems (Shanghai) CO LTD
* SPDX-FileCopyrightText: 2017-2024 Espressif Systems (Shanghai) CO LTD
*
* SPDX-License-Identifier: Apache-2.0
* SPDX-License-Identifier: Apache-2.0 OR MIT
*/
//
#include "freertos/FreeRTOS.h"

Wyświetl plik

@ -8,7 +8,8 @@
// Currently one stub is used for GCOV functionality
//
#include "esp_private/dbg_stubs.h"
#include "esp_private/startup_internal.h"
#include "dbg_stubs.h"
#include "esp_attr.h"
/*
@ -27,7 +28,7 @@ const static char *TAG = "esp_dbg_stubs";
#define ESP_DBG_STUBS_CODE_BUF_SIZE 32
#define ESP_DBG_STUBS_STACK_MIN_SIZE 2048
#define DBG_STUB_TRAMP_ATTR IRAM_ATTR
#define DBG_STUB_TRAMP_ATTR IRAM_ATTR
static struct {
uint32_t tramp_addr;
@ -46,7 +47,7 @@ extern void esp_dbg_stubs_ll_init(void *stub_table_addr);
static void * esp_dbg_stubs_data_alloc(uint32_t size)
{
ESP_LOGV(TAG, "%s %"PRIu32, __func__, size);
void *p = malloc(size);
void *p = malloc(size);
ESP_LOGV(TAG, "%s EXIT %p", __func__, p);
return p;
}
@ -92,3 +93,9 @@ esp_err_t esp_dbg_stub_entry_get(esp_dbg_stub_id_t id, uint32_t *entry)
return ESP_OK;
}
ESP_SYSTEM_INIT_FN(init_dbg_stubs, SECONDARY, BIT(0), 140)
{
esp_dbg_stubs_init();
return ESP_OK;
}

Wyświetl plik

@ -1,5 +1,5 @@
/*
* SPDX-FileCopyrightText: 2017-2021 Espressif Systems (Shanghai) CO LTD
* SPDX-FileCopyrightText: 2017-2023 Espressif Systems (Shanghai) CO LTD
*
* SPDX-License-Identifier: Apache-2.0
*/
@ -14,7 +14,7 @@
#include "soc/timer_periph.h"
#include "esp_app_trace.h"
#include "esp_freertos_hooks.h"
#include "esp_private/dbg_stubs.h"
#include "dbg_stubs.h"
#include "esp_ipc.h"
#include "hal/wdt_hal.h"
#if CONFIG_IDF_TARGET_ESP32
@ -173,6 +173,13 @@ long gcov_rtio_ftell(void *stream)
return ret;
}
int gcov_rtio_feof(void *stream)
{
int ret = esp_apptrace_feof(ESP_APPTRACE_DEST_TRAX, stream);
ESP_EARLY_LOGV(TAG, "%s(%p) = %d", __FUNCTION__, stream, ret);
return ret;
}
void gcov_rtio_setbuf(void *arg1 __attribute__ ((unused)), void *arg2 __attribute__ ((unused)))
{
return;

Wyświetl plik

@ -5,3 +5,4 @@ fread gcov_rtio_fread
fseek gcov_rtio_fseek
ftell gcov_rtio_ftell
setbuf gcov_rtio_setbuf
feof gcov_rtio_feof

Wyświetl plik

@ -1,5 +1,5 @@
/*
* SPDX-FileCopyrightText: 2017-2021 Espressif Systems (Shanghai) CO LTD
* SPDX-FileCopyrightText: 2017-2023 Espressif Systems (Shanghai) CO LTD
*
* SPDX-License-Identifier: Apache-2.0
*/
@ -29,6 +29,7 @@ const static char *TAG = "esp_host_file_io";
#define ESP_APPTRACE_FILE_CMD_FSEEK 0x4
#define ESP_APPTRACE_FILE_CMD_FTELL 0x5
#define ESP_APPTRACE_FILE_CMD_STOP 0x6 // indicates that there is no files to transfer
#define ESP_APPTRACE_FILE_CMD_FEOF 0x7
/** File operation header */
typedef struct {
@ -68,6 +69,11 @@ typedef struct {
void * file;
} esp_apptrace_fseek_args_t;
/** Helper structure for feof */
typedef struct {
void *file;
} esp_apptrace_feof_args_t;
/** Helper structure for ftell */
typedef struct {
void *file;
@ -228,8 +234,11 @@ size_t esp_apptrace_fwrite(esp_apptrace_dest_t dest, const void *ptr, size_t siz
ESP_EARLY_LOGE(TAG, "Failed to read response (%d)!", ret);
return 0;
}
return resp/size; // return the number of items written
/* OpenOCD writes it like that:
* fwrite(buf, size, 1, file);
* So, if 1 was returned that means fwrite succeed
*/
return resp == 1 ? nmemb : 0;
}
static void esp_apptrace_fread_args_prepare(uint8_t *buf, void *priv)
@ -275,6 +284,10 @@ size_t esp_apptrace_fread(esp_apptrace_dest_t dest, void *ptr, size_t size, size
ESP_EARLY_LOGE(TAG, "Failed to read file data (%d)!", ret);
return 0;
}
/* OpenOCD reads it like that:
* fread(buf, 1 ,size, file);
* So, total read bytes count returns
*/
return resp/size; // return the number of items read
}
@ -354,4 +367,34 @@ int esp_apptrace_fstop(esp_apptrace_dest_t dest)
return ret;
}
static void esp_apptrace_feof_args_prepare(uint8_t *buf, void *priv)
{
esp_apptrace_feof_args_t *args = priv;
memcpy(buf, &args->file, sizeof(args->file));
}
int esp_apptrace_feof(esp_apptrace_dest_t dest, void *stream)
{
esp_apptrace_feof_args_t cmd_args;
cmd_args.file = stream;
esp_err_t ret = esp_apptrace_file_cmd_send(dest, ESP_APPTRACE_FILE_CMD_FEOF, esp_apptrace_feof_args_prepare,
&cmd_args, sizeof(cmd_args));
if (ret != ESP_OK) {
ESP_EARLY_LOGE(TAG, "Failed to send file cmd (%d)!", ret);
return EOF;
}
// now read the answer
int resp;
ret = esp_apptrace_file_rsp_recv(dest, (uint8_t *)&resp, sizeof(resp));
if (ret != ESP_OK) {
ESP_EARLY_LOGE(TAG, "Failed to read response (%d)!", ret);
return EOF;
}
return resp;
}
#endif

Wyświetl plik

@ -1,5 +1,5 @@
/*
* SPDX-FileCopyrightText: 2017-2021 Espressif Systems (Shanghai) CO LTD
* SPDX-FileCopyrightText: 2017-2023 Espressif Systems (Shanghai) CO LTD
*
* SPDX-License-Identifier: Apache-2.0
*/
@ -255,6 +255,17 @@ int esp_apptrace_ftell(esp_apptrace_dest_t dest, void *stream);
*/
int esp_apptrace_fstop(esp_apptrace_dest_t dest);
/**
* @brief Test end-of-file indicator on a stream.
* This function has the same semantic as 'feof' except for the first argument.
*
* @param dest Indicates HW interface to use.
* @param stream File handle returned by esp_apptrace_fopen.
*
* @return Non-Zero if end-of-file indicator is set for stream. See feof for details.
*/
int esp_apptrace_feof(esp_apptrace_dest_t dest, void *stream);
/**
* @brief Triggers gcov info dump.
* This function waits for the host to connect to target before dumping data.

Wyświetl plik

@ -15,7 +15,7 @@ entries:
SEGGER_SYSVIEW_FreeRTOS (noflash)
[mapping:app_trace_driver]
archive: libdriver.a
archive: libesp_driver_gptimer.a
entries:
if APPTRACE_SV_TS_SOURCE_GPTIMER = y:
gptimer (noflash)

Wyświetl plik

@ -1,7 +1,7 @@
/*
* SPDX-FileCopyrightText: 2021-2022 Espressif Systems (Shanghai) CO LTD
* SPDX-FileCopyrightText: 2021-2024 Espressif Systems (Shanghai) CO LTD
*
* SPDX-License-Identifier: Apache-2.0
* SPDX-License-Identifier: Apache-2.0 OR MIT
*/
#include "esp_cpu.h"

Wyświetl plik

@ -1,7 +1,7 @@
/*
* SPDX-FileCopyrightText: 2015-2021 Espressif Systems (Shanghai) CO LTD
* SPDX-FileCopyrightText: 2015-2024 Espressif Systems (Shanghai) CO LTD
*
* SPDX-License-Identifier: Apache-2.0
* SPDX-License-Identifier: Apache-2.0 OR MIT
*/
//
@ -298,7 +298,7 @@ static inline void esp_apptrace_trax_memory_enable(void)
#if CONFIG_IDF_TARGET_ESP32
/* Enable trace memory on PRO CPU */
DPORT_WRITE_PERI_REG(DPORT_PRO_TRACEMEM_ENA_REG, DPORT_PRO_TRACEMEM_ENA_M);
#if CONFIG_FREERTOS_UNICORE == 0
#if CONFIG_ESP_SYSTEM_SINGLE_CORE_MODE == 0
/* Enable trace memory on APP CPU */
DPORT_WRITE_PERI_REG(DPORT_APP_TRACEMEM_ENA_REG, DPORT_APP_TRACEMEM_ENA_M);
#endif

Wyświetl plik

@ -1,16 +1,9 @@
// Copyright 2017 Espressif Systems (Shanghai) PTE LTD
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
/*
* SPDX-FileCopyrightText: 2017-2024 Espressif Systems (Shanghai) CO LTD
*
* SPDX-License-Identifier: Apache-2.0
*/
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#ifndef ESP_DBG_STUBS_H_
#define ESP_DBG_STUBS_H_
@ -28,8 +21,8 @@ typedef enum {
ESP_DBG_STUB_TABLE_SIZE,
ESP_DBG_STUB_CONTROL_DATA, ///< stubs descriptor entry
ESP_DBG_STUB_ENTRY_FIRST,
ESP_DBG_STUB_ENTRY_GCOV ///< GCOV entry
= ESP_DBG_STUB_ENTRY_FIRST,
ESP_DBG_STUB_ENTRY_GCOV ///< GCOV entry
= ESP_DBG_STUB_ENTRY_FIRST,
ESP_DBG_STUB_ENTRY_CAPABILITIES,
ESP_DBG_STUB_ENTRY_MAX
} esp_dbg_stub_id_t;
@ -49,7 +42,7 @@ void esp_dbg_stubs_init(void);
*
* @note Should be called before any esp_apptrace_xxx call.
*
* @param id Stub ID.
* @param id Stub ID.
* @param entry Stub entry. Usually it is stub entry function address,
* but can be any value meaningfull for OpenOCD command/code
* such as capabilities
@ -60,7 +53,7 @@ esp_err_t esp_dbg_stub_entry_set(esp_dbg_stub_id_t id, uint32_t entry);
/**
* @brief Retrives the corresponding stub entry
*
* @param id Stub ID.
* @param id Stub ID.
* @param entry Stub entry. Usually it is stub entry function address,
* but can be any value meaningfull for OpenOCD command/code
* such as capabilities
@ -73,4 +66,4 @@ esp_err_t esp_dbg_stub_entry_get(esp_dbg_stub_id_t id, uint32_t *entry);
}
#endif
#endif //ESP_DBG_STUBS_H_
#endif // ESP_DBG_STUBS_H_

Wyświetl plik

@ -35,7 +35,7 @@ static uint8_t s_down_buf[SYSVIEW_DOWN_BUF_SIZE];
#if CONFIG_APPTRACE_SV_DEST_UART
#define ESP_APPTRACE_DEST_SYSVIEW ESP_APPTRACE_DEST_UART
#if CONFIG_APPTRACE_SV_DEST_CPU_0 || CONFIG_FREERTOS_UNICORE
#if CONFIG_APPTRACE_SV_DEST_CPU_0 || CONFIG_ESP_SYSTEM_SINGLE_CORE_MODE
#define APPTRACE_SV_DEST_CPU 0
#else
#define APPTRACE_SV_DEST_CPU 1
@ -296,7 +296,7 @@ int SEGGER_RTT_ConfigDownBuffer(unsigned BufferIndex, const char* sName, void* p
* linked whenever SystemView is used.
*/
ESP_SYSTEM_INIT_FN(sysview_init, BIT(0), 120)
ESP_SYSTEM_INIT_FN(sysview_init, SECONDARY, BIT(0), 120)
{
SEGGER_SYSVIEW_Conf();
return ESP_OK;

Wyświetl plik

@ -1,4 +0,0 @@
idf_component_register(SRC_DIRS "."
PRIV_INCLUDE_DIRS "."
PRIV_REQUIRES cmock driver)
target_compile_options(${COMPONENT_LIB} PRIVATE "-Wno-format")

Wyświetl plik

@ -0,0 +1,9 @@
# Documentation: .gitlab/ci/README.md#manifest-file-to-control-the-buildtest-apps
components/app_trace/test_apps:
depends_components:
- app_trace
- esp_timer
- soc
- driver
- esp_hw_support

Wyświetl plik

@ -0,0 +1,9 @@
cmake_minimum_required(VERSION 3.16)
include($ENV{IDF_PATH}/tools/cmake/project.cmake)
set(COMPONENTS main)
list(PREPEND SDKCONFIG_DEFAULTS
"$ENV{IDF_PATH}/tools/test_apps/configs/sdkconfig.debug_helpers"
"sdkconfig.defaults")
project(app_trace_test)

Wyświetl plik

@ -0,0 +1,16 @@
| Supported Targets | ESP32 | ESP32-C2 | ESP32-C3 | ESP32-C6 | ESP32-H2 | ESP32-P4 | ESP32-S2 | ESP32-S3 |
| ----------------- | ----- | -------- | -------- | -------- | -------- | -------- | -------- | -------- |
# app_trace test
To build and run this test app for app_trace related tests:
```bash
IDF_TARGET=esp32 idf.py @app_trace build flash monitor
```
To build and run this test app for SystemView related tests:
```bash
IDF_TARGET=esp32 idf.py @sysview build flash monitor
```
`@app_trace` and `@sysview` arguments apply additional `idf.py` options, from [app_trace](app_trace) and [sysview](sysview) files.

Wyświetl plik

@ -0,0 +1 @@
-DSDKCONFIG_DEFAULTS="sdkconfig.defaults;sdkconfig.ci.app_trace" -B build/app_trace -DSDKCONFIG=build/app_trace/sdkconfig

Wyświetl plik

@ -0,0 +1,4 @@
idf_component_register(SRCS "test_app_trace_main.c" "test_trace.c"
INCLUDE_DIRS "."
PRIV_REQUIRES app_trace unity esp_driver_gptimer
WHOLE_ARCHIVE)

Wyświetl plik

@ -0,0 +1,48 @@
/*
* SPDX-FileCopyrightText: 2023 Espressif Systems (Shanghai) CO LTD
*
* SPDX-License-Identifier: Apache-2.0
*/
#include "unity.h"
#include "unity_test_runner.h"
#include "esp_heap_caps.h"
#define TEST_MEMORY_LEAK_THRESHOLD_DEFAULT 0
static int leak_threshold = TEST_MEMORY_LEAK_THRESHOLD_DEFAULT;
void set_leak_threshold(int threshold)
{
leak_threshold = threshold;
}
static size_t before_free_8bit;
static size_t before_free_32bit;
static void check_leak(size_t before_free, size_t after_free, const char *type)
{
ssize_t delta = after_free - before_free;
printf("MALLOC_CAP_%s: Before %u bytes free, After %u bytes free (delta %d)\n", type, before_free, after_free, delta);
TEST_ASSERT_MESSAGE(delta >= leak_threshold, "memory leak");
}
void setUp(void)
{
before_free_8bit = heap_caps_get_free_size(MALLOC_CAP_8BIT);
before_free_32bit = heap_caps_get_free_size(MALLOC_CAP_32BIT);
}
void tearDown(void)
{
size_t after_free_8bit = heap_caps_get_free_size(MALLOC_CAP_8BIT);
size_t after_free_32bit = heap_caps_get_free_size(MALLOC_CAP_32BIT);
check_leak(before_free_8bit, after_free_8bit, "8BIT");
check_leak(before_free_32bit, after_free_32bit, "32BIT");
leak_threshold = TEST_MEMORY_LEAK_THRESHOLD_DEFAULT;
}
void app_main(void)
{
printf("Running app_trace component tests\n");
unity_run_menu();
}

Wyświetl plik

@ -1,5 +1,5 @@
/*
* SPDX-FileCopyrightText: 2021-2022 Espressif Systems (Shanghai) CO LTD
* SPDX-FileCopyrightText: 2021-2023 Espressif Systems (Shanghai) CO LTD
*
* SPDX-License-Identifier: Apache-2.0
*/
@ -16,7 +16,7 @@
#include "freertos/FreeRTOS.h"
#include "freertos/semphr.h"
#include "freertos/task.h"
#if CONFIG_APPTRACE_ENABLE == 1
#include "esp_app_trace.h"
#include "esp_app_trace_util.h"
@ -423,7 +423,6 @@ TEST_CASE("App trace test (1 task + 1 crashed timer ISR @ 1 core)", "[trace][ign
esp_apptrace_test(&test_cfg);
}
TEST_CASE("App trace test (1 crashed task)", "[trace][ignore]")
{
esp_apptrace_test_task_arg_t s_test_tasks[1];
@ -717,6 +716,7 @@ typedef struct {
static bool esp_sysview_test_timer_isr(gptimer_handle_t timer, const gptimer_alarm_event_data_t *edata, void *user_ctx)
{
esp_sysviewtrace_timer_arg_t *tim_arg = (esp_sysviewtrace_timer_arg_t *)user_ctx;
(void) tim_arg;
return false;
}
@ -724,7 +724,7 @@ static void esp_sysviewtrace_test_task(void *p)
{
esp_sysviewtrace_task_arg_t *arg = (esp_sysviewtrace_task_arg_t *) p;
volatile uint32_t tmp = 0;
printf("%x: run sysview task\n", (uint32_t)xTaskGetCurrentTaskHandle());
printf("%p: run sysview task\n", xTaskGetCurrentTaskHandle());
if (arg->timer) {
gptimer_alarm_config_t alarm_config = {
@ -744,7 +744,7 @@ static void esp_sysviewtrace_test_task(void *p)
int i = 0;
while (1) {
static uint32_t count;
printf("%d", arg->id);
printf("%" PRIu32, arg->id);
if ((++count % 80) == 0) {
printf("\n");
}
@ -890,22 +890,22 @@ TEST_CASE("SysView trace test 2", "[trace][ignore]")
TEST_ESP_OK(gptimer_new_timer(&timer_config, &tim_arg2.gptimer));
xTaskCreatePinnedToCore(esp_sysviewtrace_test_task, "svtrace0", 2048, &arg1, 3, &thnd, 0);
printf("Created task %x\n", (uint32_t)thnd);
printf("Created task %p\n", thnd);
#if CONFIG_FREERTOS_UNICORE == 0
xTaskCreatePinnedToCore(esp_sysviewtrace_test_task, "svtrace1", 2048, &arg2, 4, &thnd, 1);
#else
xTaskCreatePinnedToCore(esp_sysviewtrace_test_task, "svtrace1", 2048, &arg2, 4, &thnd, 0);
#endif
printf("Created task %x\n", (uint32_t)thnd);
printf("Created task %p\n", thnd);
xTaskCreatePinnedToCore(esp_sysviewtrace_test_task, "svsync0", 2048, &arg3, 3, &thnd, 0);
printf("Created task %x\n", (uint32_t)thnd);
printf("Created task %p\n", thnd);
#if CONFIG_FREERTOS_UNICORE == 0
xTaskCreatePinnedToCore(esp_sysviewtrace_test_task, "svsync1", 2048, &arg4, 5, &thnd, 1);
#else
xTaskCreatePinnedToCore(esp_sysviewtrace_test_task, "svsync1", 2048, &arg4, 5, &thnd, 0);
#endif
printf("Created task %x\n", (uint32_t)thnd);
printf("Created task %p\n", thnd);
xSemaphoreTake(arg1.done, portMAX_DELAY);
vSemaphoreDelete(arg1.done);
@ -924,4 +924,3 @@ TEST_CASE("SysView trace test 2", "[trace][ignore]")
TEST_ESP_OK(gptimer_del_timer(tim_arg2.gptimer));
}
#endif // #if CONFIG_APPTRACE_SV_ENABLE == 0
#endif // #if CONFIG_APPTRACE_ENABLE == 1

Wyświetl plik

@ -0,0 +1 @@
# app_trace is already enabled by sdkconfig.defaults, so no options are needed here

Wyświetl plik

@ -0,0 +1 @@
CONFIG_APPTRACE_SV_ENABLE=y

Wyświetl plik

@ -0,0 +1,2 @@
CONFIG_ESP_TASK_WDT_CHECK_IDLE_TASK_CPU0=n
CONFIG_APPTRACE_DEST_JTAG=y

Wyświetl plik

@ -0,0 +1 @@
-DSDKCONFIG_DEFAULTS="sdkconfig.defaults;sdkconfig.ci.sysview" -B build/sysview -DSDKCONFIG=build/sysview/sdkconfig

Wyświetl plik

@ -1,3 +1,9 @@
idf_build_get_property(target IDF_TARGET)
if(${target} STREQUAL "linux")
return() # This component is not supported by the POSIX/Linux simulator
endif()
idf_component_register(SRCS "esp_ota_ops.c" "esp_ota_app_desc.c"
INCLUDE_DIRS "include"
REQUIRES partition_table bootloader_support esp_app_format esp_bootloader_format esp_partition

Wyświetl plik

@ -1,5 +1,5 @@
/*
* SPDX-FileCopyrightText: 2015-2021 Espressif Systems (Shanghai) CO LTD
* SPDX-FileCopyrightText: 2015-2023 Espressif Systems (Shanghai) CO LTD
*
* SPDX-License-Identifier: Apache-2.0
*/
@ -31,22 +31,6 @@
#include "esp_bootloader_desc.h"
#include "esp_flash.h"
#if CONFIG_IDF_TARGET_ESP32
#include "esp32/rom/secure_boot.h"
#elif CONFIG_IDF_TARGET_ESP32S2
#include "esp32s2/rom/secure_boot.h"
#elif CONFIG_IDF_TARGET_ESP32C3
#include "esp32c3/rom/secure_boot.h"
#elif CONFIG_IDF_TARGET_ESP32S3
#include "esp32s3/rom/secure_boot.h"
#elif CONFIG_IDF_TARGET_ESP32C2
#include "esp32c2/rom/secure_boot.h"
#elif CONFIG_IDF_TARGET_ESP32C6
#include "esp32c6/rom/secure_boot.h"
#elif CONFIG_IDF_TARGET_ESP32H2
#include "esp32h2/rom/secure_boot.h"
#endif
#define SUB_TYPE_ID(i) (i & 0x0F)
/* Partial_data is word aligned so no reallocation is necessary for encrypted flash write */
@ -200,13 +184,18 @@ esp_err_t esp_ota_write(esp_ota_handle_t handle, const void *data, size_t size)
return ESP_ERR_INVALID_ARG;
}
if (size == 0) {
ESP_LOGD(TAG, "write data size is 0");
return ESP_OK;
}
// find ota handle in linked list
for (it = LIST_FIRST(&s_ota_ops_entries_head); it != NULL; it = LIST_NEXT(it, entries)) {
if (it->handle == handle) {
if (it->need_erase) {
// must erase the partition before writing to it
uint32_t first_sector = it->wrote_size / SPI_FLASH_SEC_SIZE;
uint32_t last_sector = (it->wrote_size + size) / SPI_FLASH_SEC_SIZE;
uint32_t first_sector = it->wrote_size / SPI_FLASH_SEC_SIZE; // first affected sector
uint32_t last_sector = (it->wrote_size + size - 1) / SPI_FLASH_SEC_SIZE; // last affected sector
ret = ESP_OK;
if ((it->wrote_size % SPI_FLASH_SEC_SIZE) == 0) {
@ -933,9 +922,27 @@ esp_err_t esp_ota_erase_last_boot_app_partition(void)
return ESP_OK;
}
#if SOC_EFUSE_SECURE_BOOT_KEY_DIGESTS > 1 && CONFIG_SECURE_BOOT_V2_ENABLED
esp_err_t esp_ota_revoke_secure_boot_public_key(esp_ota_secure_boot_public_key_index_t index) {
#if SOC_SUPPORT_SECURE_BOOT_REVOKE_KEY && CONFIG_SECURE_BOOT_V2_ENABLED
// Validates the image at "app_pos" with the secure boot digests other than "revoked_key_index"
static bool validate_img(esp_ota_secure_boot_public_key_index_t revoked_key_index, esp_partition_pos_t *app_pos)
{
bool verified = false;
for (int i = 0; i < SOC_EFUSE_SECURE_BOOT_KEY_DIGESTS; i++) {
if (i == revoked_key_index) {
continue;
}
if (esp_secure_boot_verify_with_efuse_digest_index(i, app_pos) == ESP_OK) {
verified = true;
ESP_LOGI(TAG, "Application successfully verified with public key digest %d", i);
break;
}
}
return verified;
}
esp_err_t esp_ota_revoke_secure_boot_public_key(esp_ota_secure_boot_public_key_index_t index)
{
if (!esp_secure_boot_enabled()) {
ESP_LOGE(TAG, "Secure boot v2 has not been enabled.");
return ESP_FAIL;
@ -948,14 +955,21 @@ esp_err_t esp_ota_revoke_secure_boot_public_key(esp_ota_secure_boot_public_key_i
return ESP_ERR_INVALID_ARG;
}
esp_image_sig_public_key_digests_t app_digests = { 0 };
esp_err_t err = esp_secure_boot_get_signature_blocks_for_running_app(true, &app_digests);
if (err != ESP_OK || app_digests.num_digests == 0) {
ESP_LOGE(TAG, "This app is not signed, but check signature on update is enabled in config. It won't be possible to verify any update.");
const esp_partition_t *running_app_part = esp_ota_get_running_partition();
esp_err_t ret = ESP_FAIL;
#ifdef CONFIG_BOOTLOADER_APP_ROLLBACK_ENABLE
esp_ota_img_states_t running_app_state;
ret = esp_ota_get_state_partition(running_app_part, &running_app_state);
if (ret != ESP_OK) {
ESP_LOGE(TAG, "esp_ota_get_state_partition returned: %s", esp_err_to_name(ret));
return ESP_FAIL;
}
if (running_app_state != ESP_OTA_IMG_VALID) {
ESP_LOGE(TAG, "The current running application is not marked as a valid image. Aborting..");
return ESP_FAIL;
}
#endif
esp_err_t ret;
esp_secure_boot_key_digests_t trusted_keys;
ret = esp_secure_boot_read_key_digests(&trusted_keys);
if (ret != ESP_OK) {
@ -964,55 +978,38 @@ esp_err_t esp_ota_revoke_secure_boot_public_key(esp_ota_secure_boot_public_key_i
}
if (trusted_keys.key_digests[index] == NULL) {
ESP_LOGI(TAG, "Trusted Key block(%d) already revoked.", index);
ESP_LOGI(TAG, "Given public key digest(%d) is already revoked.", index);
return ESP_OK;
}
esp_image_sig_public_key_digests_t trusted_digests = { 0 };
for (unsigned i = 0; i < SECURE_BOOT_NUM_BLOCKS; i++) {
if (i == index) {
continue; // omitting - to find if there is a valid key after revoking this digest
}
/* Check if the application can be verified with a key other than the one being revoked */
esp_partition_pos_t running_app_pos = {
.offset = running_app_part->address,
.size = running_app_part->size,
};
if (trusted_keys.key_digests[i] != NULL) {
bool all_zeroes = true;
for (unsigned j = 0; j < ESP_SECURE_BOOT_DIGEST_LEN; j++) {
all_zeroes = all_zeroes && (*(uint8_t *)(trusted_keys.key_digests[i] + j) == 0);
}
if (!all_zeroes) {
memcpy(trusted_digests.key_digests[trusted_digests.num_digests++], (uint8_t *)trusted_keys.key_digests[i], ESP_SECURE_BOOT_DIGEST_LEN);
} else {
ESP_LOGD(TAG, "Empty trusted key block (%d).", i);
}
}
}
bool match = false;
for (unsigned i = 0; i < trusted_digests.num_digests; i++) {
if (match == true) {
break;
}
for (unsigned j = 0; j < app_digests.num_digests; j++) {
if (memcmp(trusted_digests.key_digests[i], app_digests.key_digests[j], ESP_SECURE_BOOT_DIGEST_LEN) == 0) {
ESP_LOGI(TAG, "App key block(%d) matches Trusted key block(%d)[%d -> Next active trusted key block].", j, i, i);
esp_err_t err = esp_efuse_set_digest_revoke(index);
if (err != ESP_OK) {
ESP_LOGE(TAG, "Failed to revoke digest (0x%x).", err);
return ESP_FAIL;
}
ESP_LOGI(TAG, "Revoked signature block %d.", index);
match = true;
break;
}
}
}
if (match == false) {
ESP_LOGE(TAG, "Running app doesn't have another valid secure boot key. Cannot revoke current key(%d).", index);
if (!validate_img(index, &running_app_pos)) {
ESP_LOGE(TAG, "Application cannot be verified with any key other than the one being revoked");
return ESP_FAIL;
}
/* Check if bootloder can be verified with any key other than the one being revoked */
esp_partition_pos_t bootloader_pos = {
.offset = ESP_BOOTLOADER_OFFSET,
.size = (ESP_PARTITION_TABLE_OFFSET - ESP_BOOTLOADER_OFFSET),
};
if (!validate_img(index, &bootloader_pos)) {
ESP_LOGE(TAG, "Bootloader cannot be verified with any key other than the one being revoked");
return ESP_FAIL;
}
esp_err_t err = esp_efuse_set_digest_revoke(index);
if (err != ESP_OK) {
ESP_LOGE(TAG, "Failed to revoke digest (0x%x).", err);
return ESP_FAIL;
}
ESP_LOGI(TAG, "Revoked signature block %d.", index);
return ESP_OK;
}
#endif

Wyświetl plik

@ -1,5 +1,5 @@
/*
* SPDX-FileCopyrightText: 2015-2021 Espressif Systems (Shanghai) CO LTD
* SPDX-FileCopyrightText: 2015-2023 Espressif Systems (Shanghai) CO LTD
*
* SPDX-License-Identifier: Apache-2.0
*/
@ -111,7 +111,7 @@ esp_err_t esp_ota_begin(const esp_partition_t* partition, size_t image_size, esp
* @param size Size of data buffer in bytes.
*
* @return
* - ESP_OK: Data was written to flash successfully.
* - ESP_OK: Data was written to flash successfully, or size = 0
* - ESP_ERR_INVALID_ARG: handle is invalid.
* - ESP_ERR_OTA_VALIDATE_FAILED: First byte of image contains invalid app image magic byte.
* - ESP_ERR_FLASH_OP_TIMEOUT or ESP_ERR_FLASH_OP_FAIL: Flash write failed.
@ -350,10 +350,10 @@ typedef enum {
} esp_ota_secure_boot_public_key_index_t;
/**
* @brief Revokes the old signature digest. To be called in the application after the rollback logic.
* @brief Revokes the signature digest denoted by the given index. This should be called in the application only after the rollback logic otherwise the device may end up in unrecoverable state.
*
* Relevant for Secure boot v2 on ESP32-S2, ESP32-S3, ESP32-C3, ESP32-C6, ESP32-H2 where up to 3 key digests can be stored (Key \#N-1, Key \#N, Key \#N+1).
* When key \#N-1 used to sign an app is invalidated, an OTA update is to be sent with an app signed with key \#N-1 & Key \#N.
* When a key used to sign an app is invalidated, an OTA update is to be sent with an app signed with at least one of the other two keys which has not been revoked already.
* After successfully booting the OTA app should call this function to revoke Key \#N-1.
*
* @param index - The index of the signature block to be revoked

Wyświetl plik

@ -19,7 +19,7 @@ def run_multiple_stages(dut: Dut, test_case_num: int, stages: int) -> None:
@pytest.mark.supported_targets
@pytest.mark.temp_skip_ci(targets=['esp32c6', 'esp32h2'], reason='c6/h2 support TBD')
@pytest.mark.temp_skip_ci(targets=['esp32c6', 'esp32h2', 'esp32p4'], reason='c6/h2/p4 support TBD') # TODO: IDF-8959
@pytest.mark.generic
def test_app_update(dut: Dut) -> None:
extra_data = dut.parse_test_menu()

Wyświetl plik

@ -1,3 +1,9 @@
idf_build_get_property(target IDF_TARGET)
if(${target} STREQUAL "linux")
return() # This component is not supported by the POSIX/Linux simulator
endif()
idf_component_register(PRIV_REQUIRES partition_table esptool_py)
# Do not generate flash file when building bootloader or is in early expansion of the build

Wyświetl plik

@ -6,7 +6,7 @@ menu "Bootloader config"
hex
default 0x1000 if IDF_TARGET_ESP32 || IDF_TARGET_ESP32S2
# the first 2 sectors are reserved for the key manager with AES-XTS (flash encryption) purpose
default 0x2000 if IDF_TARGET_ESP32P4
default 0x2000 if IDF_TARGET_ESP32P4 || IDF_TARGET_ESP32C5
default 0x0
help
Offset address that 2nd bootloader will be flashed to.
@ -23,7 +23,6 @@ menu "Bootloader config"
- The default "Size" setting will add the -0s flag to CFLAGS.
- The "Debug" setting will add the -Og flag to CFLAGS.
- The "Performance" setting will add the -O2 flag to CFLAGS.
- The "None" setting will add the -O0 flag to CFLAGS.
Note that custom optimization levels may be unsupported.
@ -34,7 +33,9 @@ menu "Bootloader config"
config BOOTLOADER_COMPILER_OPTIMIZATION_PERF
bool "Optimize for performance (-O2)"
config BOOTLOADER_COMPILER_OPTIMIZATION_NONE
bool "Debug without optimization (-O0)"
bool "Debug without optimization (-O0) (Deprecated, will be removed in IDF v6.0)"
depends on IDF_TARGET_ARCH_XTENSA || IDF_TARGET_ESP32C3 || IDF_TARGET_ESP32C2
endchoice
choice BOOTLOADER_LOG_LEVEL
@ -66,34 +67,82 @@ menu "Bootloader config"
default 4 if BOOTLOADER_LOG_LEVEL_DEBUG
default 5 if BOOTLOADER_LOG_LEVEL_VERBOSE
config BOOTLOADER_SPI_CUSTOM_WP_PIN
bool "Use custom SPI Flash WP Pin when flash pins set in eFuse (read help)"
depends on IDF_TARGET_ESP32 && (ESPTOOLPY_FLASHMODE_QIO || ESPTOOLPY_FLASHMODE_QOUT)
default y if BOOTLOADER_SPI_WP_PIN != 7 # backwards compatibility, can remove in IDF 5
default n
help
This setting is only used if the SPI flash pins have been overridden by setting the eFuses
SPI_PAD_CONFIG_xxx, and the SPI flash mode is QIO or QOUT.
menu "Serial Flash Configurations"
config BOOTLOADER_SPI_CUSTOM_WP_PIN
bool "Use custom SPI Flash WP Pin when flash pins set in eFuse (read help)"
depends on IDF_TARGET_ESP32 && (ESPTOOLPY_FLASHMODE_QIO || ESPTOOLPY_FLASHMODE_QOUT)
default y if BOOTLOADER_SPI_WP_PIN != 7 # backwards compatibility, can remove in IDF 5
default n
help
This setting is only used if the SPI flash pins have been overridden by setting the eFuses
SPI_PAD_CONFIG_xxx, and the SPI flash mode is QIO or QOUT.
When this is the case, the eFuse config only defines 3 of the 4 Quad I/O data pins. The WP pin (aka
ESP32 pin "SD_DATA_3" or SPI flash pin "IO2") is not specified in eFuse. The same pin is also used
for external SPIRAM if it is enabled.
When this is the case, the eFuse config only defines 3 of the 4 Quad I/O data pins. The WP pin (aka
ESP32 pin "SD_DATA_3" or SPI flash pin "IO2") is not specified in eFuse. The same pin is also used
for external SPIRAM if it is enabled.
If this config item is set to N (default), the correct WP pin will be automatically used for any
Espressif chip or module with integrated flash. If a custom setting is needed, set this config item to
Y and specify the GPIO number connected to the WP.
If this config item is set to N (default), the correct WP pin will be automatically used for any
Espressif chip or module with integrated flash. If a custom setting is needed, set this config item to
Y and specify the GPIO number connected to the WP.
config BOOTLOADER_SPI_WP_PIN
int "Custom SPI Flash WP Pin"
range 0 33
default 7
depends on IDF_TARGET_ESP32 && (ESPTOOLPY_FLASHMODE_QIO || ESPTOOLPY_FLASHMODE_QOUT)
#depends on BOOTLOADER_SPI_CUSTOM_WP_PIN # backwards compatibility, can uncomment in IDF 5
help
The option "Use custom SPI Flash WP Pin" must be set or this value is ignored
config BOOTLOADER_SPI_WP_PIN
int "Custom SPI Flash WP Pin"
range 0 33
default 7
depends on IDF_TARGET_ESP32 && (ESPTOOLPY_FLASHMODE_QIO || ESPTOOLPY_FLASHMODE_QOUT)
#depends on BOOTLOADER_SPI_CUSTOM_WP_PIN # backwards compatibility, can uncomment in IDF 5
help
The option "Use custom SPI Flash WP Pin" must be set or this value is ignored
If burning a customized set of SPI flash pins in eFuse and using QIO or QOUT mode for flash, set this
value to the GPIO number of the SPI flash WP pin.
If burning a customized set of SPI flash pins in eFuse and using QIO or QOUT mode for flash, set this
value to the GPIO number of the SPI flash WP pin.
config BOOTLOADER_FLASH_DC_AWARE
bool "Allow app adjust Dummy Cycle bits in SPI Flash for higher frequency (READ HELP FIRST)"
help
This will force 2nd bootloader to be loaded by DOUT mode, and will restore Dummy Cycle setting by
resetting the Flash
config BOOTLOADER_FLASH_XMC_SUPPORT
bool "Enable the support for flash chips of XMC (READ DOCS FIRST)"
default y
help
Perform the startup flow recommended by XMC. Please consult XMC for the details of this flow.
XMC chips will be forbidden to be used, when this option is disabled.
DON'T DISABLE THIS UNLESS YOU KNOW WHAT YOU ARE DOING.
comment "Features below require specific hardware (READ DOCS FIRST!)"
config BOOTLOADER_FLASH_32BIT_ADDR
bool
default y if ESPTOOLPY_FLASHSIZE_32MB || ESPTOOLPY_FLASHSIZE_64MB || ESPTOOLPY_FLASHSIZE_128MB
default n
help
This is a helper config for 32bits address flash. Invisible for users.
config BOOTLOADER_FLASH_NEEDS_32BIT_FEAT
bool
default y if BOOTLOADER_FLASH_32BIT_ADDR && !ESPTOOLPY_OCT_FLASH
help
This is a helper config for 32bits address flash. Invisible for users.
config BOOTLOADER_CACHE_32BIT_ADDR_QUAD_FLASH
bool "Enable cache access to 32-bit-address (over 16MB) range of SPI Flash (READ DOCS FIRST)"
depends on BOOTLOADER_FLASH_NEEDS_32BIT_FEAT && IDF_TARGET_ESP32S3 && IDF_EXPERIMENTAL_FEATURES
default n
help
Enabling this option allows the CPU to access 32-bit-address flash beyond 16M range.
1. This option only valid for 4-line flash. Octal flash doesn't need this.
2. This option is experimental, which means it cant use on all flash chips stable, for more
information, please contact Espressif Business support.
config BOOTLOADER_CACHE_32BIT_ADDR_OCTAL_FLASH
bool
default y if ESPTOOLPY_OCT_FLASH && BOOTLOADER_FLASH_32BIT_ADDR
default n
endmenu
choice BOOTLOADER_VDDSDIO_BOOST
bool "VDDSDIO LDO voltage"
@ -244,8 +293,11 @@ menu "Bootloader config"
If this option is not set (default), then rtc_wdt will be disabled by ESP-IDF before calling
the app_main() function.
Use function rtc_wdt_feed() for resetting counter of rtc_wdt.
Use function rtc_wdt_disable() for disabling rtc_wdt.
Use function wdt_hal_feed() for resetting counter of RTC_WDT.
For esp32/s2 you can also use rtc_wdt_feed().
Use function wdt_hal_disable() for disabling RTC_WDT.
For esp32/s2 you can also use rtc_wdt_disable().
config BOOTLOADER_WDT_TIME_MS
int "Timeout for RTC watchdog (ms)"
@ -428,16 +480,6 @@ menu "Bootloader config"
- "Reserve RTC FAST memory for custom purposes"
- "GPIO triggers factory reset"
config BOOTLOADER_FLASH_XMC_SUPPORT
bool "Enable the support for flash chips of XMC (READ HELP FIRST)"
default y
depends on !IDF_ENV_BRINGUP
help
Perform the startup flow recommended by XMC. Please consult XMC for the details of this flow.
XMC chips will be forbidden to be used, when this option is disabled.
DON'T DISABLE THIS UNLESS YOU KNOW WHAT YOU ARE DOING.
endmenu # Bootloader
@ -794,7 +836,6 @@ menu "Security features"
bool "Enable flash encryption on boot (READ DOCS FIRST)"
default N
select SPI_FLASH_ENABLE_ENCRYPTED_READ_WRITE
select NVS_ENCRYPTION
help
If this option is set, flash contents will be encrypted by the bootloader on first boot.
@ -949,6 +990,14 @@ menu "Security features"
Revoking unused digest slots makes ensures that no trusted keys can be added later by an attacker.
If set, it means that you have a plan to use unused digests slots later.
Note that if you plan to enable secure boot during the first boot up, the bootloader will intentionally
revoke the unused digest slots while enabling secure boot, even if the above config is enabled because
keeping the unused key slots un-revoked would a security hazard.
In case for any development workflow if you need to avoid this revocation, you should enable
secure boot externally (host based mechanism) rather than enabling it during the boot up,
so that the bootloader would not need to enable secure boot and thus you could avoid its revocation
strategy.
config SECURE_FLASH_UART_BOOTLOADER_ALLOW_ENC
bool "Leave UART bootloader encryption enabled"
depends on SECURE_FLASH_ENCRYPTION_MODE_DEVELOPMENT
@ -1033,6 +1082,22 @@ menu "Security features"
DIS_DOWNLOAD_MANUAL_ENCRYPT, DIS_USB_JTAG, DIS_USB_SERIAL_JTAG, STRAP_JTAG_SEL, USB_PHY_SEL.
endmenu # Potentially Insecure
config SECURE_FLASH_ENCRYPT_ONLY_IMAGE_LEN_IN_APP_PART
bool "Encrypt only the app image that is present in the partition of type app"
depends on SECURE_FLASH_ENC_ENABLED && !SECURE_FLASH_REQUIRE_ALREADY_ENABLED
default y
help
If set (default), optimise encryption time for the partition of type APP,
by only encrypting the app image that is present in the partition,
instead of the whole partition.
The image length used for encryption is derived from the image metadata, which
includes the size of the app image, checksum, hash and also the signature sector
when secure boot is enabled.
If not set, the whole partition of type APP would be encrypted,
which increases the encryption time but might be useful if there
is any custom data appended to the firmware image.
config SECURE_FLASH_CHECK_ENC_EN_IN_APP
bool "Check Flash Encryption enabled on app startup"
depends on SECURE_FLASH_ENC_ENABLED

Wyświetl plik

@ -23,3 +23,7 @@ CONFIG_FLASH_ENCRYPTION_UART_BOOTLOADER_ALLOW_CACHE CONFIG_SECURE_FLASH_
# Secure Boot Scheme
CONFIG_SECURE_BOOT_ENABLED CONFIG_SECURE_BOOT_V1_ENABLED
CONFIG_SPI_FLASH_32BIT_ADDR_ENABLE CONFIG_BOOTLOADER_CACHE_32BIT_ADDR_QUAD_FLASH
CONFIG_SPI_FLASH_QUAD_32BIT_ADDR_ENABLE CONFIG_BOOTLOADER_CACHE_32BIT_ADDR_QUAD_FLASH
CONFIG_SPI_FLASH_OCTAL_32BIT_ADDR_ENABLE CONFIG_BOOTLOADER_CACHE_32BIT_ADDR_OCTAL_FLASH

Some files were not shown because too many files have changed in this diff Show More