diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 7ea17b70f..0076045b6 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -1,6 +1,10 @@ name: Build native Zephyr samples -on: [push, pull_request] +on: + push: + branches: + - main + pull_request: jobs: build: diff --git a/.github/workflows/leave_pr_comment.yml b/.github/workflows/leave_pr_comment.yml new file mode 100644 index 000000000..8469627a3 --- /dev/null +++ b/.github/workflows/leave_pr_comment.yml @@ -0,0 +1,60 @@ +# SPDX-License-Identifier: Apache-2.0 + +name: Leave PR comment + +on: + workflow_run: + workflows: ["Package, test and upload core"] + types: + - completed + +permissions: + contents: read + pull-requests: write + +jobs: + comment-on-pr: + runs-on: ubuntu-latest + steps: + - name: Download artifact + uses: dawidd6/action-download-artifact@ac66b43f0e6a346234dd65d4d0c8fbb31cb316e5 # v11 + with: + workflow: package-core.yml + run_id: ${{ github.event.workflow_run.id }} + name: comment-request + if_no_artifact_found: ignore + + - name: Load PR number + run: | + echo "PR_NUM=$(cat pr_number || true)" >> $GITHUB_ENV + + - name: Check PR number + id: check-pr + uses: carpentries/actions/check-valid-pr@2e20fd5ee53b691e27455ce7ca3b16ea885140e8 # v0.15.0 + with: + pr: ${{ env.PR_NUM }} + sha: ${{ github.event.workflow_run.head_sha }} + + - name: Validate PR number + if: ${{ steps.check-pr.outputs.VALID != 'true' }} + run: | + echo "ABORT: PR number validation failed!" + exit 1 + + - name: Update PR comment + if: ${{ steps.check-pr.outputs.VALID == 'true' }} + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + GH_REPO: ${{ github.repository }} + run: | + BODY="$(cat comment_body || echo '# :fire: CI failure processing logs!')" + + # delete existing comment, if present, then add a new one + gh pr comment ${PR_NUM} --delete-last --yes || true + gh pr comment ${PR_NUM} --body "${BODY}" + + - name: Clean up intermediate artifacts + if: ${{ steps.check-pr.outputs.VALID == 'true' }} + uses: geekyeggo/delete-artifact@v5.1.0 + with: + name: comment-request diff --git a/.github/workflows/package_core.yml b/.github/workflows/package_core.yml index cc88a5f89..0a4a16b51 100644 --- a/.github/workflows/package_core.yml +++ b/.github/workflows/package_core.yml @@ -1,8 +1,13 @@ name: Package, test and upload core on: - - push - - pull_request + push: + branches: + - main + pull_request: + +env: + BRANCH_NAME: origin/${{ github.head_ref || github.ref_name }} jobs: @@ -13,6 +18,7 @@ jobs: CORE_TAG: ${{ env.CORE_TAG }} CORE_HASH: ${{ env.CORE_HASH }} ALL_BOARD_DATA: ${{ env.ALL_BOARD_DATA }} + ALL_BOARD_FQBNS: ${{ env.ALL_BOARD_FQBNS }} ARTIFACTS: ${{ env.ARTIFACTS }} SUB_ARCHES: ${{ env.SUB_ARCHES }} steps: @@ -21,7 +27,7 @@ jobs: run: | sudo apt-get remove --purge man-db -y # skips the mandb triggers sudo apt-get update - sudo apt-get install -y --no-install-recommends git cmake wget python3-pip ninja-build ccache + sudo apt-get install -y --no-install-recommends git cmake wget python3-pip ninja-build - uses: actions/checkout@v4 with: @@ -33,14 +39,15 @@ jobs: - name: Initialize Zephyr environment run: | yes | ./extra/bootstrap.sh -o=--filter=tree:0 - echo "CORE_HASH=$(git describe --always)" >> "$GITHUB_ENV" + echo "CORE_HASH=$(git describe --always $BRANCH_NAME)" >> "$GITHUB_ENV" echo "ALL_BOARD_DATA=$(extra/get_board_details.sh | jq -c 'sort_by(.variant)')" >> "$GITHUB_ENV" echo "## Building \`$(extra/get_core_version.sh)\`" >> "$GITHUB_STEP_SUMMARY" - name: Map output packages # needs the above env vars to be usable run: | - echo "CORE_TAG=$(git describe --tags --exact-match 2>/dev/null || echo $CORE_HASH)" >> "$GITHUB_ENV" + echo "CORE_TAG=$(git describe --tags --exact-match $BRANCH_NAME 2>/dev/null || echo $CORE_HASH)" >> "$GITHUB_ENV" + echo "ALL_BOARD_FQBNS=$(jq -c 'map((. + {link_mode: "static"}), (. + {link_mode: "dynamic"}))' <<< ${ALL_BOARD_DATA})" >> "$GITHUB_ENV" echo "ARTIFACTS=$(jq -c '["zephyr"] + (map(.artifact) | unique)' <<< ${ALL_BOARD_DATA})" >> "$GITHUB_ENV" echo "SUB_ARCHES=$(jq -c 'map(.subarch) | unique' <<< ${ALL_BOARD_DATA})" >> "$GITHUB_ENV" @@ -60,13 +67,15 @@ jobs: path: arduino-api.tar.zstd build-board: - name: Build loader for ${{ matrix.board }} + # NOTE: this name is hardcoded in ci_inspect_logs.py + name: Build for ${{ matrix.board }} runs-on: ubuntu-latest needs: - build-env env: CCACHE_IGNOREOPTIONS: -specs=* - OUTPUT_ARTIFACT: binaries-${{ matrix.board }}-${{ needs.build-env.outputs.CORE_HASH }} + ARTIFACT_TAG: ${{ needs.build-env.outputs.CORE_HASH }}-${{ matrix.board }} + REPORT: reports/zephyr-${{ matrix.variant }} strategy: matrix: include: @@ -80,40 +89,73 @@ jobs: - name: Restore build environment run: | + sudo apt-get remove --purge man-db -y # skips the mandb triggers + sudo apt-get update + sudo apt-get install -y --no-install-recommends git cmake wget python3-pip ninja-build ccache (cd ~ && tar --use-compress-program=unzstd -xpf build-env.tar.zstd && rm build-env.tar.zstd) - name: ccache uses: hendrikmuhs/ccache-action@v1.2 with: verbose: 1 + key: ${{ github.job }}-${{ matrix.board }} - name: Build loader shell: bash run: | - if ! ./extra/build.sh ${{ matrix.board }} 2> >(tee error.log) ; then + mkdir -p reports + if ! ./extra/build.sh ${{ matrix.board }} 1> >(tee $REPORT.stdout) 2> >(tee $REPORT.stderr) ; then echo "### :x: ${{ matrix.board }} (\`${{ matrix.variant }}\`) build errors" > $GITHUB_STEP_SUMMARY echo >> $GITHUB_STEP_SUMMARY echo "\`\`\`" >> $GITHUB_STEP_SUMMARY - cat error.log >> $GITHUB_STEP_SUMMARY + cat $REPORT.stderr >> $GITHUB_STEP_SUMMARY echo "\`\`\`" >> $GITHUB_STEP_SUMMARY exit 1 fi - - name: Package board binaries + # look for warnings (errors are a happy path!) + grep -i "warning:" $REPORT.stdout > $REPORT.warnings || true + + # extract the memory usage table (from the header to the first non-% line) + # override the size of the Flash with the size of the loader partition + # and add the size of the sketch partition (not reported by Zephyr) + LOADER_SIZE=$(( $(cat variants/${{ matrix.variant }}/syms-static.ld | grep '_loader_max_size' | cut -d '=' -f 2 | tr -d ') ;') )) + SKETCH_SIZE=$(( $(cat variants/${{ matrix.variant }}/syms-static.ld | grep '_sketch_max_size' | cut -d '=' -f 2 | tr -d ') ;') )) + cat $REPORT.stdout | sed -n '/^Memory region/,/^[^%]*$/p' | head -n -1 \ + | awk 'BEGIN {split("B KB MB GB", u); for(i in u) m[u[i]]=1024^(i-1)} /:/ {print "[\"" $1 "\"," $2*m[$3] "," $4*m[$5] "]"}' \ + | sort \ + | jq -s "map((select(.[0] == \"FLASH:\") | .[2]) |= ${LOADER_SIZE}) + [ [ \"SKETCH:\", 0, ${SKETCH_SIZE} ] ]" > $REPORT.meminfo + jq + + - name: Package board artifacts if: ${{ !cancelled() }} run: | + cp firmwares/zephyr-${{ matrix.variant }}.config $REPORT.config + echo "REPORT_FILES<> $GITHUB_ENV + ls reports/* >> $GITHUB_ENV + echo "EOF" >> $GITHUB_ENV + + # Archive built binaries (and build dir on failure) + [ "${{ job.status }}" == "failure" ] && FULL_BUILD_DIR="build/${{ matrix.variant }}/" tar chf - \ firmwares/*${{ matrix.variant }}* \ variants/${{ matrix.variant }}/ \ - ${{ (job.status == 'failure') && format('build/{0}/', matrix.variant) }} \ - | zstd > ${OUTPUT_ARTIFACT}.tar.zstd + ${FULL_BUILD_DIR} \ + | zstd > binaries-${ARTIFACT_TAG}.tar.zstd - name: Archive board binaries if: ${{ !cancelled() }} uses: actions/upload-artifact@v4 with: - name: ${{ format('{0}{1}', (job.status == 'failure') && 'failed-' || '', env.OUTPUT_ARTIFACT) }} - path: ${{ env.OUTPUT_ARTIFACT }}.tar.zstd + name: ${{ format('{0}binaries-{1}', (job.status == 'failure') && 'failed-' || '', env.ARTIFACT_TAG) }} + path: binaries-${{ env.ARTIFACT_TAG }}.tar.zstd + + - name: Archive build reports + if: ${{ !cancelled() }} + uses: actions/upload-artifact@v4 + with: + name: build-report-${{ env.ARTIFACT_TAG }} + path: ${{ env.REPORT_FILES }} package-core: name: Package ${{ matrix.artifact }} @@ -122,6 +164,7 @@ jobs: - build-env - build-board env: + ALL_BOARD_DATA: ${{ needs.build-env.outputs.ALL_BOARD_DATA }} CORE_ARTIFACT: ArduinoCore-${{ matrix.artifact }}-${{ needs.build-env.outputs.CORE_HASH }} CORE_TAG: ${{ needs.build-env.outputs.CORE_TAG }} strategy: @@ -156,6 +199,7 @@ jobs: tar --use-compress-program=unzstd -xpf $f done ./extra/package_core.sh ${{ matrix.artifact }} ${CORE_TAG} distrib/${CORE_ARTIFACT}.tar.bz2 + [ ${{ matrix.artifact }} == "zephyr" ] && tar jcf distrib/${CORE_ARTIFACT}-spdx.tar.bz2 variants/*/spdx/ || true - uses: actions/upload-artifact@v4 if: ${{ success() || failure() }} @@ -163,12 +207,17 @@ jobs: name: ${{ env.CORE_ARTIFACT }} path: distrib/${{ env.CORE_ARTIFACT }}.tar.bz2 + - uses: actions/upload-artifact@v4 + if: ${{ matrix.artifact == 'zephyr' }} + with: + name: ${{ env.CORE_ARTIFACT }}-spdx + path: distrib/${{ env.CORE_ARTIFACT }}-spdx.tar.bz2 + cleanup-build: name: Clean up intermediates runs-on: ubuntu-latest needs: - package-core - if: always() steps: - uses: geekyeggo/delete-artifact@v5.1.0 with: @@ -179,7 +228,8 @@ jobs: failOnError: false test-core: - name: Test ${{ matrix.subarch }}:${{ matrix.board }} + # NOTE: this name is hardcoded in ci_inspect_logs.py + name: Test ${{ matrix.board }}:${{ matrix.link_mode }} runs-on: ubuntu-latest needs: - build-env @@ -187,14 +237,23 @@ jobs: strategy: matrix: include: - ${{ fromJSON( needs.build-env.outputs.ALL_BOARD_DATA ) }} + ${{ fromJSON( needs.build-env.outputs.ALL_BOARD_FQBNS ) }} fail-fast: false env: PLAT: arduino:${{ matrix.subarch }} - FQBN: arduino:${{ matrix.subarch }}:${{ matrix.board }} + FQBN: arduino:${{ matrix.subarch }}:${{ matrix.board }}:link_mode=${{ matrix.link_mode }} CORE_ARTIFACT: ArduinoCore-${{ matrix.artifact }}-${{ needs.build-env.outputs.CORE_HASH }} + ARTIFACT_TAG: ${{ needs.build-env.outputs.CORE_HASH }}-${{ matrix.board }}-${{ matrix.link_mode }} if: ${{ !cancelled() && needs.build-env.result == 'success' }} steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + persist-credentials: false + sparse-checkout: | + extra/ci_test_list.sh + extra/artifacts/ + - uses: actions/download-artifact@v4 with: name: ${{ env.CORE_ARTIFACT }} @@ -204,22 +263,24 @@ jobs: tar xf ${CORE_ARTIFACT}.tar.bz2 # will create ArduinoCore-zephyr/ echo "REPORT_FILE=$(echo ${FQBN} | tr ':' '-').json" >> $GITHUB_ENV - - name: Create Blink sketch + - name: Get test sketches run: | - mkdir Blink/ - wget -nv https://raw.githubusercontent.com/arduino/arduino-examples/refs/heads/main/examples/01.Basics/Blink/Blink.ino -P Blink/ + # sets ALL_TESTS and ALL_LIBRARIES env vars + extra/ci_test_list.sh ${{ matrix.artifact }} ${{ matrix.variant }} - - name: Compile Blink for ${{ matrix.board }} - uses: pillo79/compile-sketches@main + - name: Compile tests for ${{ matrix.board }} + uses: pillo79/compile-sketches@4a1dead03155c17ddedc373699d7aa80a78a6c7d # next with: fqbn: ${{ env.FQBN }} platforms: | - # Use Board Manager to install the latest release of Arduino Zephyr Boards to get the toolchain - - name: "arduino:zephyr" - source-url: "https://downloads.arduino.cc/packages/package_zephyr_index.json" + # Use Board Manager version first, to install the toolchain + - name: ${{ env.PLAT }} - name: ${{ env.PLAT }} source-path: "ArduinoCore-zephyr" - sketch-paths: Blink + sketch-paths: | + ${{ env.ALL_TESTS }} + libraries: | + ${{ env.ALL_LIBRARIES }} cli-compile-flags: | - '--build-property' - 'compiler.c.extra_flags=-Wno-type-limits -Wno-missing-field-initializers' @@ -227,37 +288,23 @@ jobs: - 'compiler.cpp.extra_flags=-Wno-type-limits -Wno-missing-field-initializers' verbose: 'false' enable-deltas-report: 'false' - enable-warnings-report: 'true' - enable-warnings-log: 'true' - - - name: Get job ID - id: job_id - if: ${{ success() || failure() }} - uses: actions/github-script@main - with: - script: | - const { data: workflow_run } = await github.rest.actions.listJobsForWorkflowRun({ - owner: context.repo.owner, - repo: context.repo.repo, - run_id: context.runId - }); - const job_name = `Test ${{ matrix.subarch }}:${{ matrix.board }}` - return workflow_run.jobs.find((job) => job.name === job_name).id; + enable-issues-report: 'true' + always-succeed: 'true' - name: Prepare log if: ${{ success() || failure() }} run: | + [ ! -f sketches-reports/${REPORT_FILE} ] && mkdir -p sketches-reports && echo "{}" > sketches-reports/${REPORT_FILE} sed -i -e 's!/home/runner/.arduino15/packages/arduino/hardware/zephyr/[^/]*/!!g' sketches-reports/${REPORT_FILE} - cat sketches-reports/${REPORT_FILE} | jq -cr ".boards[0].sketches[0] += { job_id: ${{ steps.job_id.outputs.result }} }" > ${REPORT_FILE} && mv ${REPORT_FILE} sketches-reports/ - uses: actions/upload-artifact@v4 if: ${{ success() || failure() }} with: - name: test-report-${{ needs.build-env.outputs.CORE_TAG }}-${{ matrix.board }} + name: test-report-${{ env.ARTIFACT_TAG }} path: sketches-reports/* - collect-logs: - name: Collect logs + inspect-logs: + name: Analyze logs runs-on: ubuntu-latest needs: - build-env @@ -266,67 +313,66 @@ jobs: if: ${{ !cancelled() && needs.build-env.result == 'success' }} env: ALL_BOARD_DATA: ${{ needs.build-env.outputs.ALL_BOARD_DATA }} + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + GH_REPO: ${{ github.repository }} steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + persist-credentials: false + fetch-tags: true + - uses: actions/download-artifact@v4 with: path: . - pattern: test-report-* + pattern: "*-report-*" merge-multiple: true - run: | - ARTIFACTS=$(jq -cr 'map(.artifact) | unique | .[]' <<< ${ALL_BOARD_DATA}) # this avoids the 'zephyr' artifact - for artifact in $ARTIFACTS ; do - echo "### \`$artifact\` test results:" >> "$GITHUB_STEP_SUMMARY" - jq -c "map(select(.artifact == \"$artifact\")) | .[]" <<< ${ALL_BOARD_DATA} | while read -r BOARD_DATA; do - BOARD=$(echo $BOARD_DATA | jq -cr '.board') - VARIANT=$(echo $BOARD_DATA | jq -cr '.variant') - SUBARCH=$(echo $BOARD_DATA | jq -cr '.subarch') - FQBN="arduino:$SUBARCH:$BOARD" - REPORT_FILE="$(echo $FQBN | tr ':' '-').json" - if [ ! -f $REPORT_FILE ]; then - echo "* :x: $BOARD (\`$VARIANT\`) - No report found?" >> "$GITHUB_STEP_SUMMARY" - else - REPORT=$(jq -cr '.boards[0].sketches[0]' $REPORT_FILE) - JOB_ID=$(echo $REPORT | jq -cr '.job_id') - JOB_URL="https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}/job/${JOB_ID}#step:5:2" - if ! $(echo $REPORT | jq -cr '.compilation_success') ; then - echo "* :x: [$BOARD]($JOB_URL) (\`$VARIANT\`) - Build failed" >> "$GITHUB_STEP_SUMMARY" - else - WARNINGS=$(echo $REPORT | jq -cr '.warnings.current.absolute // 0') - if [ $WARNINGS -eq 0 ]; then - echo "* :white_check_mark: $BOARD (\`$VARIANT\`) - Build successful" >> "$GITHUB_STEP_SUMMARY" - else - echo >> "$GITHUB_STEP_SUMMARY" - echo "
   :warning: $BOARD ($VARIANT) - $WARNINGS warnings:" >> "$GITHUB_STEP_SUMMARY" - echo >> "$GITHUB_STEP_SUMMARY" - echo "\`\`\`" >> "$GITHUB_STEP_SUMMARY" - echo $REPORT | jq -cr '.warnings_log[]' >> "$GITHUB_STEP_SUMMARY" - echo "\`\`\`" >> "$GITHUB_STEP_SUMMARY" - echo >> "$GITHUB_STEP_SUMMARY" - echo "
" >> "$GITHUB_STEP_SUMMARY" - echo >> "$GITHUB_STEP_SUMMARY" - fi - fi - fi - done - done + export WORKFLOW_JOBS=$(gh run view ${{ github.run_id }} --attempt ${{ github.run_attempt }} --json jobs --jq '.jobs') + extra/ci_inspect_logs.py result summary full_log + + cat summary >> $GITHUB_STEP_SUMMARY + cat full_log >> $GITHUB_STEP_SUMMARY + + if [ "${{ github.event_name }}" == "pull_request" ]; then + mkdir -p comment-request + echo "${{ github.event.pull_request.number }}" > comment-request/pr_number + echo -e "## Built \`$(extra/get_core_version.sh)\`\n" > comment-request/comment_body + echo >> comment-request/comment_body + cat summary | sed -e 's!\${\\color{\S*}\(.*\)}\$!\1!g' -e 's!\\%!%!g' >> comment-request/comment_body + fi + + - name: Archive comment information + uses: actions/upload-artifact@v4 + if: ${{ github.event_name == 'pull_request' }} + with: + name: comment-request + path: comment-request/ + retention-days: 1 - name: Clean up intermediate artifacts uses: geekyeggo/delete-artifact@v5.1.0 with: - name: test-report-* + name: | + build-report-* + test-report-* failOnError: false + - run: | + [ $(cat result) == 'PASSED' ] # otherwise CI test failed + verify-core: runs-on: ubuntu-latest - if: cancelled() || contains(needs.*.result, 'failure') needs: - build-env - package-core - - test-core + - inspect-logs + if: ${{ !cancelled() }} steps: - - name: Notify failure - run: exit 1 + - name: CI run result + run: | + exit ${{ contains(needs.*.result, 'failure') && '1' || '0' }} publish-core: name: Publish core @@ -335,7 +381,7 @@ jobs: needs: - build-env - package-core - - test-core + - inspect-logs environment: production permissions: id-token: write @@ -365,7 +411,7 @@ jobs: needs: - build-env - package-core - - test-core + - inspect-logs env: CORE_TAG: ${{ needs.build-env.outputs.CORE_TAG }} CORE_HASH: ${{ needs.build-env.outputs.CORE_HASH }} diff --git a/cores/arduino/abi.cpp b/cores/arduino/abi.cpp index 872c8d083..e2116516d 100644 --- a/cores/arduino/abi.cpp +++ b/cores/arduino/abi.cpp @@ -6,17 +6,6 @@ #include -namespace std { -void __throw_length_error(const char *__s __attribute__((unused))) { -} - -void __throw_bad_alloc() { -} - -void __throw_bad_function_call() { -} -}; // namespace std - extern "C" { void *__dso_handle = (void *)&__dso_handle; diff --git a/extra/artifacts/_common.test_setup.sh b/extra/artifacts/_common.test_setup.sh new file mode 100644 index 000000000..79fa43f21 --- /dev/null +++ b/extra/artifacts/_common.test_setup.sh @@ -0,0 +1,28 @@ +# This script is sourced from extra/ci_test_list.sh to provide +# artifact-specific tests for Zephyr CI tests. +# +# Two helper functions are provided for easy GitHub queries: +# - get_branch_tip [ ...] +# - get_latest_release [ ...] +# +# By default, the whole project will be added to the test suite. +# When given additional path arguments, the functions will only +# copy artifacts under the provided paths. + +if [ "$ARTIFACT" == "zephyr_contrib" ] ; then + # Minimal safety test for Zephyr contrib boards + get_branch_tip examples arduino/arduino-examples main \ + examples/01.Basics/Blink +else + # Get a few core Arduino examples + get_branch_tip examples arduino/arduino-examples main \ + examples/01.Basics/Blink \ + examples/01.Basics/AnalogReadSerial \ + examples/04.Communication/SerialPassthrough \ + + # Smoke test for C++ features + get_latest_release libraries arduino-libraries/Arduino_JSON \ + + # Smoke test for SPI API compatibilty + # get_branch_tip libraries PaulStoffregen/SerialFlash master +fi diff --git a/extra/artifacts/zephyr_contrib.exc b/extra/artifacts/zephyr_contrib.exc new file mode 100644 index 000000000..794d52fc4 --- /dev/null +++ b/extra/artifacts/zephyr_contrib.exc @@ -0,0 +1,3 @@ +libraries/Camera/ +libraries/Storage/ +libraries/Zephyr_SDRAM/ diff --git a/extra/artifacts/zephyr_main.test_setup.sh b/extra/artifacts/zephyr_main.test_setup.sh new file mode 100644 index 000000000..5f2424927 --- /dev/null +++ b/extra/artifacts/zephyr_main.test_setup.sh @@ -0,0 +1,20 @@ +# This script is sourced from extra/ci_test_list.sh to provide +# artifact-specific tests for Zephyr CI tests. +# +# Two helper functions are provided for easy GitHub queries: +# - get_branch_tip [ ...] +# - get_latest_release [ ...] +# +# By default, the whole project will be added to the test suite. +# When given additional path arguments, the functions will only +# copy artifacts under the provided paths. + +# ArduinoBLE +get_branch_tip libraries arduino-libraries/ArduinoBLE master \ + examples/Central/LedControl \ + examples/Central/Scan \ + examples/Peripheral/Advertising/EnhancedAdvertising \ + examples/Peripheral/ButtonLED \ + +# Arduino_SecureElement +get_latest_release libraries arduino-libraries/Arduino_SecureElement diff --git a/extra/artifacts/zephyr_unoq.exc b/extra/artifacts/zephyr_unoq.exc new file mode 100644 index 000000000..eb3870852 --- /dev/null +++ b/extra/artifacts/zephyr_unoq.exc @@ -0,0 +1,5 @@ +libraries/Camera/ +libraries/Ethernet/ +libraries/Storage/ +libraries/WiFi/ +libraries/Zephyr_SDRAM/ diff --git a/extra/artifacts/zephyr_unoq.only b/extra/artifacts/zephyr_unoq.only new file mode 100644 index 000000000..af7b603ee --- /dev/null +++ b/extra/artifacts/zephyr_unoq.only @@ -0,0 +1,3 @@ +libraries/Arduino_LED_Matrix/ +libraries/Arduino_RouterBridge/ +libraries/Arduino_RPClite/ diff --git a/extra/artifacts/zephyr_unoq.test_setup.sh b/extra/artifacts/zephyr_unoq.test_setup.sh new file mode 100644 index 000000000..5af16b32e --- /dev/null +++ b/extra/artifacts/zephyr_unoq.test_setup.sh @@ -0,0 +1,13 @@ +# This script is sourced from extra/ci_test_list.sh to provide +# artifact-specific tests for Zephyr CI tests. +# +# Two helper functions are provided for easy GitHub queries: +# - get_branch_tip [ ...] +# - get_latest_release [ ...] +# +# By default, the whole project will be added to the test suite. +# When given additional path arguments, the functions will only +# copy artifacts under the provided paths. + +# ArduinoBLE +get_branch_tip libraries arduino-libraries/ArduinoBLE master diff --git a/extra/build.sh b/extra/build.sh index 2b2bac7d5..75aead604 100755 --- a/extra/build.sh +++ b/extra/build.sh @@ -73,7 +73,9 @@ fi BUILD_DIR=build/${variant} VARIANT_DIR=variants/${variant} rm -rf ${BUILD_DIR} +west spdx --init -d ${BUILD_DIR} west build -d ${BUILD_DIR} -b ${target} loader -t llext-edk ${args} +west spdx --analyze-includes -d ${BUILD_DIR} -s ${VARIANT_DIR}/spdx # Extract the generated EDK tarball and copy it to the variant directory mkdir -p ${VARIANT_DIR} firmwares @@ -105,6 +107,7 @@ extra/gen_provides.py "${BUILD_DIR}/zephyr/zephyr.elf" -LF \ "+kheap__system_heap" \ "*sketch_base_addr=_sketch_start" \ "*sketch_max_size=_sketch_max_size" \ + "*loader_max_size=_loader_max_size" \ "malloc=__wrap_malloc" \ "free=__wrap_free" \ "realloc=__wrap_realloc" \ diff --git a/extra/ci_inspect_logs.py b/extra/ci_inspect_logs.py new file mode 100755 index 000000000..7f0a501d3 --- /dev/null +++ b/extra/ci_inspect_logs.py @@ -0,0 +1,644 @@ +#!/usr/bin/env python3 + +from collections import defaultdict +import json +import os +import re +import sys + +SKIP = -1 # Test was not performed +PASS = 0 # (PASS) Compiled successfully +WARNING = 1 # (PASS) Compiled with warnings +EXPECTED_ERROR = 2 # (PASS*) Compilation failed with expected errors +ERROR = 3 # (FAIL) Compilation failed with errors +FAILURE = 4 # Test run failed to complete + +TEST_LEGEND = [ + "Test passed successfully, with no warnings or errors.", + "Test completed with some warnings; no errors detected.", + "Test completed with errors, but all are known/expected.", + "Test completed with unexpected errors.", + "Test run failed to complete.", + "Test was skipped." # -1 +] + +TEST_STATUS = [ + ":green_circle:", + ":yellow_circle:", + ":no_entry_sign:", + ":red_circle:", + ":fire:", + ":new_moon:" # -1 +] + +BOARD_STATUS = [ + ":white_check_mark:", + ":white_check_mark:*", + ":heavy_check_mark:*", + ":x:", + ":fire:", + ":new_moon:" # -1 +] + +# Loader build status data structure +class LoaderEntry: + def __init__(self, artifact, board, variant, job_link): + self.artifact = artifact + self.board = board + self.variant = variant + self.job_link = job_link + + self.warnings = self.read_warnings() # list of warning messages + self.config = self.read_config() # set of Kconfig settings + self.meminfo = self.read_meminfo() # memory usage report + + if not (self.config and self.meminfo): + self.status = FAILURE + elif self.warnings: + self.status = WARNING + else: + self.status = PASS + + def read_config(self): + # get board's config settings + report_file = f"zephyr-{self.variant}.config" + try: + with open(report_file, 'r') as f: + configs = {} + for line in f: + if line.startswith('#') or '=' not in line: + continue + sym, val = line.strip().split('=', 1) + if val.startswith('"'): + configs[sym] = val.strip('"') + elif val=='y': + configs[sym] = 1 + else: + configs[sym] = eval(val) + return configs + except Exception as e: + return {} + + def read_warnings(self): + # get list of board warnings (may be empty) + report_file = f"zephyr-{self.variant}.warnings" + try: + with open(report_file, 'r') as f: + return [ line.strip() for line in f if line.strip() ] + except Exception as e: + return [] + + def read_meminfo(self): + # get board's memory report + report_file = f"zephyr-{self.variant}.meminfo" + try: + with open(report_file, 'r') as f: + report_data = json.load(f) + meminfo = { region.replace(':',''): [used, total] for region, used, total in report_data } + meminfo.pop('IDT_LIST', None) + return meminfo + except Exception as e: + return {} + +# Single test data structure +class TestEntry: + def __init__(self, artifact, board, sketch, link_mode, excepted, status, issues, job_link): + self.artifact = artifact + self.board = board + self.sketch = sketch + self.link_mode = link_mode + self.excepted = excepted + self.status = EXPECTED_ERROR if excepted and status == ERROR else status + self.issues = issues + self.job_link = job_link + self.invalid_exception = excepted and status in (PASS, WARNING) + + # (1.................) (2....) (3................) (4.) + match = re.search(r'(libraries|examples)/([^/]+)/(examples/|extras/)?(.*)', sketch) + if match: + self.group = match.group(2) + self.name = match.group(4) + else: + self.group = "" + self.name = sketch + +# Summary data structure +class TestGroup: + def __init__(self): + # Sets to track unique board, sketch and (group,name) tuples + self.boards = set() + self.sketches = set() + self.group_names = set() + # Counts of test results by status + self.counts = { status : 0 for status in [PASS, WARNING, EXPECTED_ERROR, ERROR, FAILURE] } + # Overall status of the group + self.status = SKIP + # List of individual TestEntry objects by feature + self.tests = [] + self.tests_with_issues = [] + self.tests_with_invalid_exceptions = [] + + def track(self, test_entry): + """ + Update this group with a new test entry + """ + self.tests.append(test_entry) + if test_entry.issues: + self.tests_with_issues.append(test_entry) + if test_entry.invalid_exception: + self.tests_with_invalid_exceptions.append(test_entry) + + self.counts[test_entry.status] += 1 + self.status = max(self.status, test_entry.status) + self.boards.add(test_entry.board) + self.sketches.add(test_entry.sketch) + self.group_names.add((test_entry.group, test_entry.name)) + +# Global Data Structures +# ---------------------- + +# Loader build results, one per board +BOARD_LOADERS = {} # { board: LoaderEntry() } + +# Test results grouped by artifact, board, and artifact/sketch +# (grouping sketch results across different artifacts is really confusing) +ARTIFACT_TESTS = defaultdict(TestGroup) # { artifact: TestGroup() } +BOARD_TESTS = defaultdict(TestGroup) # { board: TestGroup() } +SKETCH_TESTS = defaultdict(lambda: defaultdict(TestGroup)) # { artifact: { sketch: TestGroup() } } + +def log_test(artifact, board, sketch, link_mode, exceptions, status, issues, job_link=None): + """ + Logs individual test results into the global test tracking structures. + """ + + # Ensure issues is a list + if isinstance(issues, str): + issues = [ issues ] + + # Create the test entry + excepted = any(pattern.match(sketch) for pattern in exceptions) + test_entry = TestEntry(artifact, board, sketch, link_mode, excepted, status, issues, job_link) + + # Track in global structures + ARTIFACT_TESTS[artifact].track(test_entry) + BOARD_TESTS[board].track(test_entry) + SKETCH_TESTS[artifact][sketch].track(test_entry) + +def print_summary(): + """ + Prints the summary section of the report, including overall status and a recap table. + """ + + if ci_run_passed: + title = f"# [CI run]({JOB_LINK_STEM}#user-content-summary) PASSED :green_circle:\n" + else: + failed_boards = [ f"{BOARD_STATUS[res.status]} `{board}`" for board, res in BOARD_TESTS.items() if res.status in (ERROR, FAILURE) ] + title = f"# [CI run]({JOB_LINK_STEM}#user-content-summary) FAILED: {', '.join(failed_boards)}\n" + f_print("\n") + f_print(title) + + # Print the recap table, one line per board. + # 8 columns: + # - Artifact name (multi-row for boards under the same artifact) + # - Board name + # - Core compilation status (ok, number of warnings) + # - Overall sketch compilation status for the core + # - Used RAM percent + # - Sketches tested + # - Sketches with warnings + # - Failed sketches + f_print("\n") + + for artifact in ARTIFACTS: + artifact_boards = sorted(ARTIFACT_TESTS[artifact].boards) + artifact_status = ARTIFACT_TESTS[artifact].status + + first_row_text = f"" + for board in artifact_boards: + # Artifact name (multi-row) + f_print(f"{first_row_text}") + first_row_text = "" + + # Board name + res = BOARD_LOADERS[board] + if res.job_link: + f_print(f"") + continue + + pin = f"{len(res.warnings)} :label:" if res.status == WARNING else ":green_book:" + f_print(f"") + + # Sketch build status + message on failure + res = BOARD_TESTS[board] + f_print(f"") + if res.status == FAILURE: + f_print(f"") + continue + + # Memory usage + f_print(f"") + + # Test count summary + tests_str = len(res.tests) or "-" + warnings_str = res.counts[WARNING] or "-" + errors_str = f"{res.counts[ERROR]}" if res.counts[ERROR] else "-" + if res.counts[EXPECTED_ERROR]: + if errors_str == "-": # only expected errors + errors_str = f"({res.counts[EXPECTED_ERROR]}*)" + else: # both actual and expected errors + errors_str += f" (+{res.counts[EXPECTED_ERROR]}*)" + f_print(f"") + f_print("
ArtifactBoardCoreTestsRAMSketchesWarningsErrors
{BOARD_STATUS[artifact_status]} {artifact}
{board}") + else: + f_print(f"{board}") + + # Core build status + message on failure + if res.status == FAILURE: + f_print(f"{BOARD_STATUS[FAILURE]}Core build failed!
{pin}{BOARD_STATUS[res.status]}") + f_print("
".join(f"{test.issues[0]} (full log)" for test in res.tests)) + f_print("
\n\n{color_entry(BOARD_LOADERS[board].meminfo['RAM'], False)}\n\n{tests_str}{warnings_str}{errors_str}
\n") + + # Print the legend + f_print("
Legend") + f_print("

") + for status in FAILURE, ERROR, EXPECTED_ERROR, WARNING, PASS, SKIP: + f_print(f"") + f_print(f"") + f_print(f"") + f_print("
BoardTestStatus description
{BOARD_STATUS[status]}{TEST_STATUS[status]}{TEST_LEGEND[status]}
\n") + + # Print artifact error warnings + for artifact in ARTIFACTS: + artifact_boards = sorted(ARTIFACT_TESTS[artifact].boards) + failed_boards = [ f"`{board}`" for board in artifact_boards if BOARD_TESTS[board].status in (ERROR, FAILURE) ] + + if failed_boards: + f_print("> [!CAUTION]") + f_print(f"> `{artifact}` is blocked due to failures on {', '.join(failed_boards)}!\n") + +def print_test_matrix(artifact, artifact_boards, title, sketch_filter=lambda x: True): + """ + Prints a matrix of test results for a given artifact and its boards. The + sketch_filter function determines which sketches to include. No table is printed + if no sketches pass the filter. + """ + + # Build the header row, which includes board names and board statuses. + # Headers have a link to the board's CI job, if available. + header_row = f"{artifact} {title}" + for board in artifact_boards: + res = BOARD_TESTS[board] + header_col = f"{board}
{BOARD_STATUS[res.status]}" + header_row += f"{header_col}" + header_row += "" + + # Group sketches by library + sketch_groups = defaultdict(list) + for sketch in ARTIFACT_TESTS[artifact].sketches: + res = SKETCH_TESTS[artifact][sketch] + if not sketch_filter(res): + continue + + for group, name in res.group_names: + sketch_groups[group].append((name, res)) + + # Build the data rows, grouping libraries together. Each row corresponds to + # a sketch, each cell to the test result icon of that sketch on that board. + data_rows = [] + invalid_exceptions = defaultdict(set) + for group in sorted(sketch_groups.keys()): + if group: + data_rows.append(f"{group}{group}") + #data_rows.append(f"{group}{''.join('---' for x in artifact_boards)}") + for sample, res in sorted(sketch_groups[group]): + row_data = f"{TEST_STATUS[res.status]}" + # If there are issues, make the sketch name a link to the detailed logs below + name_link = f"{sample}" + if res.tests_with_issues: + sketch = next(iter(res.sketches)) # only one + sketch_id = sketch.replace('/', '_').replace(' ', '_').replace('-', '_') + name_link = f"{name_link}" + row_data += f"{name_link}" + + for board in artifact_boards: + # there are multiple tests per sketch&board due to FQBN variations + status = max( [ t.status for t in res.tests if t.board == board ], default=SKIP) + invalid = any( [ t.invalid_exception for t in res.tests if t.board == board ] ) + if invalid and status in (PASS, WARNING): + status_icon = ":interrobang:" + invalid_exceptions[board].add(sketch) + else: + status_icon = TEST_STATUS[status] + row_data += f"{status_icon}" + + row_data += "" + data_rows.append(row_data) + + if not data_rows: + return + + # Print the table + f_print("") + f_print(header_row) + for row in data_rows: + f_print(row) + f_print("
\n") + + if invalid_exceptions: + f_print(f"
{artifact} :interrobang: unnecessary known_example_issues.txt entries") + f_print("

") + for board in sorted(invalid_exceptions.keys()): + sketches = sorted(invalid_exceptions[board]) + first_row_text = f"" + for sketch in sketches: + f_print(f"{first_row_text}") + first_row_text = "" + f_print("
BoardSketch
{board}
{sketch}
\n") + + # Print detailed logs for sketches with issues + for group in sorted(sketch_groups.keys()): + for sample, res in sorted(sketch_groups[group]): + if not res.tests_with_issues: + continue + + # Header and anchor for the detailed logs + sketch = next(iter(res.sketches)) # only one + sketch_id = sketch.replace('/', '_').replace(' ', '_').replace('-', '_') + f_print(f"") + f_print(f"
{artifact} logs for {TEST_STATUS[res.status]} {group} {sample}") + f_print("

") + + # Test logs by board + for test in sorted(res.tests_with_issues, key=lambda x: x.status, reverse=True): + test_text = f"{test.board}:{test.link_mode}" + if test.job_link: + test_text += f" (full log)" + f_print(f"") + + f_print("
{TEST_STATUS[test.status]} {test_text}
\n\n```") + for line in test.issues: + f_print(line) + f_print("```\n
\n") + +REGIONS_BY_SOC = defaultdict(set) # { soc: set(regions) } + +BASE_COLOR = 0x20 +DELTA_COLOR = 0xff-2*BASE_COLOR + +# percent is in range [0, 1] where 0 is good, 1 is bad +def get_percent(values): + if not values: + return 0.0 + return values[0] / values[1] + +def color_cmd(percent): + color_amt = int(DELTA_COLOR * percent) + return f"\\color{{#{BASE_COLOR + color_amt:02x}{0xff - color_amt:02x}{BASE_COLOR:02x}}}" + +def color_entry(values, full=True): + if not values: + return "" + + percent = get_percent(values) + if full: + return f"${{{color_cmd(percent)}\\frac{{{values[0]}}}{{{values[1]}}}\\space({percent*100:0.1f}\\\\%)}}$" + else: + return f"{'' if percent < 0.85 else ':warning:'} ${{{color_cmd(percent)}{percent*100:0.1f}\\\\%}}$" + +def print_mem_report(artifact, artifact_boards): + + f_print("", end='') + f_print("", end='') + f_print("", end='') + f_print("", end='') + f_print("", end='') + f_print("", end='') + f_print("", end='') + f_print("", end='') + f_print("") + f_print("") + + for soc, board in sorted((ALL_BOARD_DATA[board]['soc'], board) for board in artifact_boards): + max_pct = max([ get_percent(BOARD_LOADERS[board].meminfo[r]) for r in ('FLASH', 'RAM') ]) + icon = ':warning:' if max_pct > 0.85 else '' + board_str = board.replace('_', '\\\\_') + + row = [ + icon, + f"${{{color_cmd(max_pct)}\\texttt{{{board_str}}}}}$", + f"{soc}", + color_entry(BOARD_LOADERS[board].meminfo['FLASH']), + color_entry(BOARD_LOADERS[board].meminfo['RAM']), + f"${{{ BOARD_LOADERS[board].meminfo['SKETCH'][1] }}}$", + f"${{{ BOARD_LOADERS[board].config.get('CONFIG_HEAP_MEM_POOL_SIZE', 0) }}}$", + f"${{{ BOARD_LOADERS[board].config['CONFIG_SRAM_SIZE']*1024 - BOARD_LOADERS[board].meminfo['RAM'][0] }}}$", + f"${{{ BOARD_LOADERS[board].config['CONFIG_LLEXT_HEAP_SIZE']*1024 }}}$", + f"${{{ BOARD_LOADERS[board].config.get('CONFIG_MBEDTLS_HEAP_SIZE', '-') }}}$" + ] + + f_print("") + col_aligns = ['center', 'left', 'center', 'right', 'right', 'right', 'right', 'right', 'right', 'right'] + for index, cell in enumerate(row): + f_print(f"") + f_print("") + f_print("
BoardSoCFLASHRAMUser
sketch
User heapsOS heaps
SYSLIBCLLEXTMBEDTLS
\n\n{cell}\n\n
") + + extra_data_present = False + for soc in sorted(list(set([ ALL_BOARD_DATA[board]['soc'] for board in artifact_boards ]))): + soc_boards = [ board for board in artifact_boards if ALL_BOARD_DATA[board]['soc'] == soc ] + sorted_regions = sorted(r for r in REGIONS_BY_SOC[soc] if r not in ('FLASH', 'RAM', 'SKETCH')) + if not sorted_regions: + continue + + if not extra_data_present: + f_print("
SoC-specific data

\n") + extra_data_present = True + + f_print(f"") + for r in sorted_regions: + f_print(f"") + f_print("") + for board in sorted(soc_boards): + f_print(f"") + for r in sorted_regions: + if r in BOARD_LOADERS[board].meminfo: + f_print(f"") + else: + f_print(f"") + f_print("") + f_print("
{soc}Board{r}
{board}\n\n{color_entry(BOARD_LOADERS[board].meminfo[r])}\n\n
\n") + # f_print() + # for c in ('CONFIG_HEAP_MEM_POOL_SIZE', 'CONFIG_LLEXT_HEAP_SIZE', 'CONFIG_MBEDTLS_HEAP_SIZE'): + # if c in BOARD_LOADERS[board].config: + # f_print(f"{c:>25} {BOARD_LOADERS[board].config[c]:8}") + # f_print("") + + if extra_data_present: + f_print("
") + +# Main Logic +# ---------- + +# Environment Variable Checks +ALL_BOARD_DATA_STR = os.environ.get('ALL_BOARD_DATA') +WORKFLOW_JOBS_STR = os.environ.get('WORKFLOW_JOBS') +GITHUB_REPOSITORY = os.environ.get('GITHUB_REPOSITORY') +GITHUB_RUN_ID = os.environ.get('GITHUB_RUN_ID') +JOB_LINK_STEM = f"https://github.com/{GITHUB_REPOSITORY}/actions/runs/{GITHUB_RUN_ID}" + +if not ALL_BOARD_DATA_STR or not GITHUB_REPOSITORY or not GITHUB_RUN_ID: + print("Not in a Github CI run, cannot proceed.") + sys.exit(0) + +if not len(sys.argv) in (1, 4): + print("Usage: ci_inspect_logs.py [ ]") + sys.exit(1) + +if len(sys.argv) == 4: + results_file = sys.argv[1] + summary_file = sys.argv[2] + full_report_file = sys.argv[3] +else: + results_file = "/dev/null" + summary_file = full_report_file = "/dev/stdout" + +ALL_BOARD_DATA = json.loads(ALL_BOARD_DATA_STR) +ALL_BOARD_DATA = { b['board']: b for b in ALL_BOARD_DATA } + +JOB_URLS = json.loads(WORKFLOW_JOBS_STR) +JOB_URLS = { j['name']: j['url'] for j in JOB_URLS } + +for board_data in ALL_BOARD_DATA.values(): + # Extract common fields + artifact = board_data['artifact'] + board = board_data['board'] + variant = board_data['variant'] + subarch = board_data['subarch'] + + # Get job link for this build + job_link = JOB_URLS.get(f"Build for {board}") + if job_link: + job_link += "#step:5:1" + + BOARD_LOADERS[board] = LoaderEntry(artifact, board, variant, job_link) + if BOARD_LOADERS[board].status == FAILURE: + log_test(artifact, board, 'CI test', '', [], FAILURE, "Core data could not be read.") + continue + + soc = BOARD_LOADERS[board].config['CONFIG_SOC'] + board_data['soc'] = soc + REGIONS_BY_SOC[soc].update(BOARD_LOADERS[board].meminfo.keys()) + + # Get list of expected errors for this board/variant + exceptions = [] + if os.path.exists(f"variants/{variant}/known_example_issues.txt"): + with open(f"variants/{variant}/known_example_issues.txt", 'r') as f: + for line in f: + sketch_pattern = line.split('#')[0].strip() + if sketch_pattern: + exceptions.append(re.compile(f"^(ArduinoCore-zephyr/)?{sketch_pattern}")) + + # Get raw data from report file + for link_mode in ("static", "dynamic"): + # Get job link for this test + job_link = JOB_URLS.get(f"Test {board}:{link_mode}") + if job_link: + job_link += "#step:6:1" + + # Extract data from the report file + report_file = f"arduino-{subarch}-{board}-link_mode={link_mode}.json" + if not os.path.exists(report_file): + log_test(artifact, board, 'CI test', '', [], FAILURE, f"Report file for {link_mode} not found.", job_link) + continue # Skip to the next board + + try: + with open(report_file, 'r') as f: + report_data = json.load(f) + except Exception as e: + log_test(artifact, board, 'CI test', '', [], FAILURE, f"Error reading {link_mode} report file: {e}", job_link) + continue # Skip to the next board + + reports = report_data.get('boards', [{}])[0].get('sketches', []) + if not reports: + log_test(artifact, board, 'CI test', '', [], FAILURE, "Test report for {link_mode} is empty, check CI log.", job_link) + continue # Skip to the next board + + # Iterate through individual sketch reports + for report in reports: + sketch = report.get('name', 'unknown_sketch') + success = report.get('compilation_success', False) + issues = report.get('issues', []) + + # Replace long absolute paths with '...' for brevity. + sketch_issues = [ re.sub(r'(/.+?)((/[^/]+){3}):', r'...\2:', issue) for issue in issues ] + + if not success: + status = ERROR + elif len(sketch_issues): # Implies warnings/non-critical issues + status = WARNING + else: + status = PASS + + log_test(artifact, board, sketch, link_mode, exceptions, status, sketch_issues, job_link) + +ARTIFACTS = sorted(ARTIFACT_TESTS.keys()) + +# Begin output of the report +# -------------------------- + +ci_run_status = max(res.status for res in ARTIFACT_TESTS.values()) +ci_run_passed = ci_run_status in (PASS, WARNING, EXPECTED_ERROR) + +with open(summary_file, 'w') as f: + f_print = lambda *args, **kwargs: print(*args, file=f, **kwargs) + + print_summary() + +with open(full_report_file, 'w') as f: + f_print = lambda *args, **kwargs: print(*args, file=f, **kwargs) + + # Print the test matrix sections per artifact + for artifact in ARTIFACTS: + artifact_boards = sorted([ board for board in ARTIFACT_TESTS[artifact].boards if BOARD_TESTS[board].status != FAILURE ]) + + if not artifact_boards: + continue + + f_print(f"") + f_print("\n---\n") + + if any(BOARD_LOADERS[board].status != PASS for board in artifact_boards): + summary = f"{artifact} loader build warnings" + f_print(f"
{summary}

\n") + f_print("") + f_print("") + for board in artifact_boards: + if BOARD_LOADERS[board].status == PASS: + continue + f_print(f"") + f_print("
BoardWarnings
{board}
")
+                for warning in BOARD_LOADERS[board].warnings:
+                    f_print(warning)
+                f_print("
\n") + f_print("
\n") + + print_test_matrix(artifact, artifact_boards, "issues", sketch_filter=lambda res: res.status in (ERROR, EXPECTED_ERROR) or res.tests_with_invalid_exceptions) + + successful_tests = ARTIFACT_TESTS[artifact].counts[PASS] + ARTIFACT_TESTS[artifact].counts[WARNING] + warning_tests = ARTIFACT_TESTS[artifact].counts[WARNING] + if successful_tests: + summary = f"{successful_tests} successful {artifact} tests hidden" + if warning_tests: + summary += f" ({warning_tests} with warnings)" + + f_print(f"
{summary}

\n") + print_test_matrix(artifact, artifact_boards, "tests", sketch_filter=lambda res: res.status in (PASS, WARNING)) + f_print("
\n") + + f_print(f"
Memory usage report for {artifact}

") + print_mem_report(artifact, artifact_boards) + f_print("
") + +with open(results_file, 'w') as f: + if ci_run_passed: + f.write('PASSED\n') diff --git a/extra/ci_test_list.sh b/extra/ci_test_list.sh new file mode 100755 index 000000000..d8fc1f797 --- /dev/null +++ b/extra/ci_test_list.sh @@ -0,0 +1,105 @@ +#!/bin/bash +# +# This script generates a list of all libraries and their dependencies for use +# in GitHub Actions environment variables. It also generates a list of all example +# .ino files, excluding those specified in a variant's skip list. +# +# The core under test should be extracted in the 'ArduinoCore-zephyr' subdirectory. + +if [ "$#" -lt 2 ] ; then + echo "Usage: $0 [...]" + exit 1 +fi + +if [ -z "$GITHUB_ENV" ] || [ ! -d ArduinoCore-zephyr/ ]; then + echo "Not in a Github CI run, cannot proceed." + exit 1 +fi + +ARTIFACT=$1 +VARIANT_DIR="ArduinoCore-zephyr/variants/$2" +shift 2 + +search_for_sketches_in() { + local folder="$1" + find "$folder" -name *.ino 2>/dev/null | sed -e 's/^\.\///' +} + +fetch_and_extract() { + local temp_file=$(mktemp).tar.gz + local temp_dir=$(mktemp -d) + local link="$1" + local inner_folder="${temp_dir}/$2" + local output_folder="$3" + shift 3 + + wget -nv "$link" -O "$temp_file" + tar -xzf "$temp_file" -C "$temp_dir" + + mkdir -p "$(dirname $output_folder)" + mv $inner_folder "$output_folder" + if [ $# -eq 0 ] ; then + # Search entire project for tests + search_for_sketches_in "$output_folder" >> $ALL_TESTS + else + # Search only specified paths for tests + for item in "$@" ; do + search_for_sketches_in "$output_folder/${item}" >> $ALL_TESTS + done + fi + rm -rf $tmpdir +} + +get_latest_release() { + local folder="$1" + local repo="$2" + local project="${repo##*/}" + local url=$(curl -s "https://api.github.com/repos/${repo}/releases/latest" | jq -r '.tarball_url') + shift 2 + + echo "Getting latest release for ${repo}" + + fetch_and_extract "$url" "*-${project}-*" "ArduinoCore-zephyr/${folder}/${project}" "$@" +} + +get_branch_tip() { + local folder="$1" + local repo="$2" + local branch="$3" + local project="${repo##*/}" + local url="https://github.com/${repo}/archive/refs/heads/${branch}.tar.gz" + shift 3 + + echo "Getting branch ${branch} of ${repo}" + + fetch_and_extract "$url" "${project}-${branch}" "ArduinoCore-zephyr/${folder}/${project}" "$@" +} + +ALL_TESTS=$(mktemp) +search_for_sketches_in ArduinoCore-zephyr/libraries/ >> $ALL_TESTS +search_for_sketches_in ArduinoCore-zephyr/examples/ >> $ALL_TESTS + +# Source common and artifact-specific scripts to get additional libraries in +[ -f extra/artifacts/_common.test_setup.sh ] && . extra/artifacts/_common.test_setup.sh +[ -f extra/artifacts/${ARTIFACT}.test_setup.sh ] && . extra/artifacts/${ARTIFACT}.test_setup.sh + +echo "ALL_LIBRARIES<> $GITHUB_ENV +find ArduinoCore-zephyr/libraries/ -name library.properties | while read -r propfile; do + # Version constraints are ignored as they are not supported by compile-sketches + grep '^depends=' "$propfile" | cut -d= -f2- | tr ',' '\n' | sed -e 's/\s*(.*)\s*$//' | while read -r dep; do + [ -z "$dep" ] || printf " - name: \"%s\"\n" "$dep" >> $GITHUB_ENV + done +done +printf " - source-path: \"%s\"\n" $(find ArduinoCore-zephyr/libraries/ -maxdepth 1 -mindepth 1 -type d) >> $GITHUB_ENV +echo "EOF" >> $GITHUB_ENV + +if [ -f $VARIANT_DIR/skip_these_examples.txt ] ; then + cat $VARIANT_DIR/skip_these_examples.txt | sed -e 's/\s*#.*//' -e '/^\s*$/d' | while read -r pattern; do + sed -i -e "\\|^\\(ArduinoCore-zephyr/\\)\\?${pattern}|d" $ALL_TESTS + done +fi +echo "ALL_TESTS<> $GITHUB_ENV +cat $ALL_TESTS | while read -r infile; do + printf " - \"%s\"\n" "$(dirname "$infile")" >> $GITHUB_ENV +done +echo "EOF" >> $GITHUB_ENV diff --git a/extra/get_core_version.sh b/extra/get_core_version.sh index 5cfdf6b7c..dcba2945f 100755 --- a/extra/get_core_version.sh +++ b/extra/get_core_version.sh @@ -26,13 +26,22 @@ # If there are no tags at all (for example when run in a fork etc), it defaults # to "9.9.9-+". -VERSION=$(git describe --tags --exact-match 2>/dev/null) +# In CI, BRANCH_NAME points to the branch or tag ref that triggered the +# workflow run (e.g., "refs/heads/main" or "refs/tags/v1.2.3"). Without this, +# git describe would use HEAD, which is a temporary detachead commit. +# If BRANCH_NAME is not set, assume a local clone. Fall back to using HEAD +# and also include --dirty to test for uncommitted changes. + +GIT_TARGET=${BRANCH_NAME:---dirty HEAD} + +VERSION=$(git describe --tags --exact-match $GIT_TARGET 2>/dev/null) if [ -z "$VERSION" ]; then - VERSION=$(git describe --tags --dirty 2>/dev/null | + VERSION=$(git describe --tags $GIT_TARGET 2>/dev/null | + # (1.............) (2...)* sed 's/\.\([[:digit:]]\+\)\(-.*\)*-[[:digit:]]\+-g/ \1 \2 /' | awk '{ if (NF==3) { print $1 "." ($2+1) "-0.dev+" $3 } else { print $1 "." $2 $3 "-0.dev+" $4 }}') if [ -z "$VERSION" ]; then - VERSION="9.9.9-$(date '+%Y%m%d-%H%M%S')+$(git describe --always --dirty)" + VERSION="9.9.9-$(date '+%Y%m%d-%H%M%S')+$(git describe --always $GIT_TARGET)" fi fi echo $VERSION diff --git a/extra/package_core.sh b/extra/package_core.sh index 191678bc3..245b7e23e 100755 --- a/extra/package_core.sh +++ b/extra/package_core.sh @@ -49,6 +49,16 @@ for board in $EXCLUDED_BOARDS ; do # remove (even commented) lines for excluded boards sed -i "/^\(\s*#\s*\)\?${board}\./d" $TEMP_BOARDS done +# set proper maximum sizes for included variants +for variant in $INCLUDED_VARIANTS ; do + board=$(echo ${BOARD_DETAILS} | jq -cr "map(select(.variant == \"${variant}\")) | .[0].board") + # maximum sketch size: size of sketch partition (exact limit) + # maximum data size: configured LLEXT heap size (larger bound, real limit is smaller) + CODE_SIZE=$(( $(cat variants/${variant}/syms-static.ld | grep '_sketch_max_size' | cut -d '=' -f 2 | tr -d ');') )) + DATA_SIZE=$(( 1024*$(cat firmwares/zephyr-${variant}.config | grep 'LLEXT_HEAP_SIZE' | cut -d '=' -f 2) )) + sed -i -e "s/^${board}\.upload\.maximum_size=.*/${board}.upload.maximum_size=${CODE_SIZE}/" $TEMP_BOARDS + sed -i -e "s/^${board}\.upload\.maximum_data_size=.*/${board}.upload.maximum_data_size=${DATA_SIZE}/" $TEMP_BOARDS +done # remove multiple empty lines sed -i '/^$/N;/^\n$/D' $TEMP_BOARDS @@ -56,12 +66,12 @@ sed -i '/^$/N;/^\n$/D' $TEMP_BOARDS TEMP_PLATFORM=$(mktemp -p . | sed 's/\.\///') cat platform.txt > ${TEMP_PLATFORM} [ -z "$ARTIFACT_NAME" ] || sed -ie "s/^name=.*/name=${ARTIFACT_NAME}/" ${TEMP_PLATFORM} -sed -ie "s/^version=.*/version=$(extra/get_core_version.sh)/" ${TEMP_PLATFORM} +sed -i -e "s/^version=.*/version=$(extra/get_core_version.sh)/" ${TEMP_PLATFORM} declutter_file() { - # remove comments and empty lines + # remove comments, whitespace at EOL, '/' dir terminators and empty lines [ -f "$1" ] || return 0 - cat "$1" | sed -e 's/\s*#.*//' | grep -v '^\s*$' + cat "$1" | sed -e 's/\s*#.*//' -e 's/\s*$//' -e 's/\/$//' | grep -v '^\s*$' } # create the list of files and directories to include @@ -70,6 +80,7 @@ echo ${TEMP_BOARDS} >> ${TEMP_INC} echo ${TEMP_PLATFORM} >> ${TEMP_INC} declutter_file extra/artifacts/_common.inc >> ${TEMP_INC} declutter_file extra/artifacts/$ARTIFACT.inc >> ${TEMP_INC} +declutter_file extra/artifacts/$ARTIFACT.only >> ${TEMP_INC} for variant in $INCLUDED_VARIANTS ; do echo "- ${variant}" echo "variants/${variant}/" >> ${TEMP_INC} @@ -84,6 +95,9 @@ done TEMP_EXC=$(mktemp -p . | sed 's/\.\///') declutter_file extra/artifacts/_common.exc >> ${TEMP_EXC} declutter_file extra/artifacts/$ARTIFACT.exc >> ${TEMP_EXC} +for f in $(ls extra/artifacts/*.only | grep -v "$ARTIFACT.only") ; do + declutter_file $f >> ${TEMP_EXC} +done mkdir -p $(dirname ${OUTPUT_FILE}) tar -cjhf ${OUTPUT_FILE} -X ${TEMP_EXC} -T ${TEMP_INC} \ diff --git a/libraries/SPI/SPI.cpp b/libraries/SPI/SPI.cpp index 1a0b39115..974bdcc9b 100644 --- a/libraries/SPI/SPI.cpp +++ b/libraries/SPI/SPI.cpp @@ -33,8 +33,6 @@ void arduino::ZephyrSPI::transfer(void *buf, size_t count) { } int arduino::ZephyrSPI::transfer(void *buf, size_t len, const struct spi_config *config) { - int ret; - const struct spi_buf tx_buf = {.buf = buf, .len = len}; const struct spi_buf_set tx_buf_set = { .buffers = &tx_buf, @@ -51,9 +49,11 @@ int arduino::ZephyrSPI::transfer(void *buf, size_t len, const struct spi_config } void arduino::ZephyrSPI::usingInterrupt(int interruptNumber) { + ARG_UNUSED(interruptNumber); } void arduino::ZephyrSPI::notUsingInterrupt(int interruptNumber) { + ARG_UNUSED(interruptNumber); } void arduino::ZephyrSPI::beginTransaction(SPISettings settings) { @@ -96,12 +96,12 @@ void arduino::ZephyrSPI::beginTransaction(SPISettings settings) { // Set SPI configuration structure for 8-bit transfers memset(&config, 0, sizeof(struct spi_config)); config.operation = mode | SPI_WORD_SET(8); - config.frequency = max(SPI_MIN_CLOCK_FREQUENCY, settings.getClockFreq()); + config.frequency = max((uint32_t)SPI_MIN_CLOCK_FREQUENCY, settings.getClockFreq()); // Set SPI configuration structure for 16-bit transfers memset(&config16, 0, sizeof(struct spi_config)); config16.operation = mode | SPI_WORD_SET(16); - config16.frequency = max(SPI_MIN_CLOCK_FREQUENCY, settings.getClockFreq()); + config16.frequency = max((uint32_t)SPI_MIN_CLOCK_FREQUENCY, settings.getClockFreq()); } void arduino::ZephyrSPI::endTransaction(void) { diff --git a/loader/main.c b/loader/main.c index 7276f303e..cac8669ac 100644 --- a/loader/main.c +++ b/loader/main.c @@ -23,6 +23,8 @@ LOG_MODULE_REGISTER(sketch); #include #include +#include + #define HEADER_LEN 16 struct sketch_header_v1 { @@ -89,10 +91,23 @@ void llext_entry(void *arg0, void *arg1, void *arg2) { } #endif /* CONFIG_USERSPACE */ +/* Export Flash parameters for use by core building scripts */ __attribute__((retain)) const uintptr_t sketch_base_addr = DT_REG_ADDR(DT_GPARENT(DT_NODELABEL(user_sketch))) + DT_REG_ADDR(DT_NODELABEL(user_sketch)); __attribute__((retain)) const uintptr_t sketch_max_size = DT_REG_SIZE(DT_NODELABEL(user_sketch)); +#if DT_HAS_FIXED_PARTITION_LABEL(image_0) + #define LOADER_MAX_SIZE DT_REG_SIZE(DT_NODE_BY_FIXED_PARTITION_LABEL(image_0)) +#elif CONFIG_FLASH_LOAD_SIZE > 0 + #define LOADER_MAX_SIZE CONFIG_FLASH_LOAD_SIZE +#else + #ifndef CONFIG_FLASH_LOAD_OFFSET + #define CONFIG_FLASH_LOAD_OFFSET 0 + #endif + #define LOADER_MAX_SIZE (DT_REG_SIZE(DT_NODELABEL(flash0)) - CONFIG_FLASH_LOAD_OFFSET) +#endif +__attribute__((retain)) const uintptr_t loader_max_size = LOADER_MAX_SIZE; + static int loader(const struct shell *sh) { const struct flash_area *fa; int rc; diff --git a/loader/prj.conf b/loader/prj.conf index dc40e9533..567b081e3 100644 --- a/loader/prj.conf +++ b/loader/prj.conf @@ -4,6 +4,7 @@ CONFIG_USERSPACE=n CONFIG_ARM_MPU=n +CONFIG_BUILD_OUTPUT_META=y CONFIG_LOG=y CONFIG_LOG_MODE_IMMEDIATE=y diff --git a/platform.txt b/platform.txt index 604cc319e..eb9279e1b 100644 --- a/platform.txt +++ b/platform.txt @@ -40,7 +40,7 @@ compiler.zephyr.includes_file={build.variant.path}/includes.txt compiler.zephyr.cflags_file={build.variant.path}/cflags.txt compiler.zephyr.cxxflags_file={build.variant.path}/cxxflags.txt compiler.zephyr.macros="-imacros{build.variant.path}/llext-edk/include/zephyr/include/generated/zephyr/autoconf.h" "-imacros{build.variant.path}/llext-edk/include/zephyr/include/zephyr/toolchain/zephyr_stdint.h" -compiler.zephyr.common_cxxflags=-fdata-sections -ffunction-sections -fno-unwind-tables +compiler.zephyr.common_cxxflags=-fno-exceptions -fno-rtti -fdata-sections -ffunction-sections -fno-unwind-tables compiler.zephyr.common_ldflags=-fno-exceptions -fno-rtti -fno-threadsafe-statics -fno-unwind-tables -fno-use-cxa-atexit -lstdc++ -lsupc++ -lnosys -nostdlib compiler.zephyr.extra_cxxflags= @@ -54,7 +54,7 @@ build.boot_mode=wait upload.extension=elf-zsk.bin build.ldscript.path={runtime.platform.path}/variants/_ldscripts -build.link_command="{compiler.path}{compiler.c.elf.cmd}" "-L{build.path}" "-L{build.variant.path}" {compiler.c.elf.flags} {compiler.c.elf.extra_flags} {build.extra_flags} {build.extra_ldflags} {compiler.zephyr.common_ldflags} --specs=picolibc.specs --specs=nosys.specs {compiler.ldflags} {object_files} -Wl,--start-group "{build.path}/{archive_file}" {compiler.zephyr.extra_ldflags} {compiler.libraries.ldflags} -Wl,--end-group {build.link_args.{build.link_mode}} +build.link_command="{compiler.path}{compiler.c.elf.cmd}" "-L{build.path}" "-L{build.variant.path}" {compiler.c.elf.flags} {compiler.c.elf.extra_flags} {build.extra_flags} {build.extra_ldflags} {compiler.zephyr.common_ldflags} --specs=nano.specs --specs=nosys.specs {compiler.ldflags} {object_files} -Wl,--start-group "{build.path}/{archive_file}" {compiler.zephyr.extra_ldflags} {compiler.libraries.ldflags} -Wl,--end-group {build.link_args.{build.link_mode}} build.check_command-dynamic={build.link_command} {build.link_args.check-dynamic} -o "{build.path}/{build.project_name}_check.tmp" build.check_command-static=/bin/true @@ -63,7 +63,7 @@ build.combine_command={build.link_command} {build.link_args.build-{build.link_mo # link_args.* are included by any link_command depending on the link_mode build.link_args.dynamic=-e main -build.link_args.static=-lc -lm -lgcc -Wl,--wrap=random -Wl,--wrap=calloc -Wl,--wrap=free -Wl,--wrap=malloc -Wl,--wrap=realloc +build.link_args.static=-Wl,--wrap=random -Wl,--wrap=calloc -Wl,--wrap=free -Wl,--wrap=malloc -Wl,--wrap=realloc # link_args.check-* are used to check the build. Only LLEXT needs these to emulate a static build (no -r!). build.link_args.check-dynamic="-T{build.variant.path}/syms-dynamic.ld" "-T{build.ldscript.path}/memory-check.ld" "-T{build.ldscript.path}/build-static.ld" diff --git a/variants/_ldscripts/build-dynamic.ld b/variants/_ldscripts/build-dynamic.ld index c5e8dc41e..9b72a544c 100644 --- a/variants/_ldscripts/build-dynamic.ld +++ b/variants/_ldscripts/build-dynamic.ld @@ -19,7 +19,6 @@ SECTIONS { *stdexcept.o *eh_*.o *cow-stdexcept.o - *functexcept.o *cow-string-inst.o } diff --git a/variants/arduino_giga_r1_stm32h747xx_m7/arduino_giga_r1_stm32h747xx_m7.overlay b/variants/arduino_giga_r1_stm32h747xx_m7/arduino_giga_r1_stm32h747xx_m7.overlay index 6aba6b4b0..109a4ed32 100644 --- a/variants/arduino_giga_r1_stm32h747xx_m7/arduino_giga_r1_stm32h747xx_m7.overlay +++ b/variants/arduino_giga_r1_stm32h747xx_m7/arduino_giga_r1_stm32h747xx_m7.overlay @@ -340,10 +340,10 @@ }; +/delete-node/ &slot0_partition; + &flash0 { partitions { - /delete-node/ slot0_partition; - slot0_partition: partition@40000 { label = "image-0"; reg = <0x040000 0xa0000>; diff --git a/variants/arduino_giga_r1_stm32h747xx_m7/known_example_issues.txt b/variants/arduino_giga_r1_stm32h747xx_m7/known_example_issues.txt new file mode 100644 index 000000000..f80a141a5 --- /dev/null +++ b/variants/arduino_giga_r1_stm32h747xx_m7/known_example_issues.txt @@ -0,0 +1,8 @@ +# This file contains a list of examples that are shipped with the core (or used +# by continuous integration tests), but are known to be currently failing for +# this specific variant. +# +# Each line in this file is treated as a regular expression and will be matched +# against the path of each sketch found in this repo. If a match is found, the +# sketch compilation result will be ignored. + diff --git a/variants/arduino_giga_r1_stm32h747xx_m7/skip_these_examples.txt b/variants/arduino_giga_r1_stm32h747xx_m7/skip_these_examples.txt new file mode 100644 index 000000000..e49284a81 --- /dev/null +++ b/variants/arduino_giga_r1_stm32h747xx_m7/skip_these_examples.txt @@ -0,0 +1,8 @@ +# This file contains a list of examples that are shipped with the core (or used +# by continuous integration tests), but are not applicable for this variant, +# for example because it uses hardware features not present on the CPU or board. +# +# Each line in this file should contain the path to an example to exclude, +# relative to the root of the repository. + +libraries/Ethernet diff --git a/variants/arduino_nano_33_ble_nrf52840_sense/known_example_issues.txt b/variants/arduino_nano_33_ble_nrf52840_sense/known_example_issues.txt new file mode 100644 index 000000000..f80a141a5 --- /dev/null +++ b/variants/arduino_nano_33_ble_nrf52840_sense/known_example_issues.txt @@ -0,0 +1,8 @@ +# This file contains a list of examples that are shipped with the core (or used +# by continuous integration tests), but are known to be currently failing for +# this specific variant. +# +# Each line in this file is treated as a regular expression and will be matched +# against the path of each sketch found in this repo. If a match is found, the +# sketch compilation result will be ignored. + diff --git a/variants/arduino_nano_33_ble_nrf52840_sense/skip_these_examples.txt b/variants/arduino_nano_33_ble_nrf52840_sense/skip_these_examples.txt new file mode 100644 index 000000000..8b64f3484 --- /dev/null +++ b/variants/arduino_nano_33_ble_nrf52840_sense/skip_these_examples.txt @@ -0,0 +1,13 @@ +# This file contains a list of examples that are shipped with the core (or used +# by continuous integration tests), but are not applicable for this variant, +# for example because it uses hardware features not present on the CPU or board. +# +# Each line in this file should contain the path to an example to exclude, +# relative to the root of the repository. + +libraries/Arduino_SecureElement +libraries/Camera +libraries/Ethernet +libraries/Storage +libraries/WiFi +libraries/Zephyr_SDRAM diff --git a/variants/arduino_nano_matter_mgm240sd22vna/arduino_nano_matter_mgm240sd22vna.overlay b/variants/arduino_nano_matter_mgm240sd22vna/arduino_nano_matter_mgm240sd22vna.overlay index 1830d3617..59136b0fa 100644 --- a/variants/arduino_nano_matter_mgm240sd22vna/arduino_nano_matter_mgm240sd22vna.overlay +++ b/variants/arduino_nano_matter_mgm240sd22vna/arduino_nano_matter_mgm240sd22vna.overlay @@ -1,16 +1,5 @@ -&flash0 { - partitions { - compatible = "fixed-partitions"; - #address-cells = <1>; - #size-cells = <1>; - - /* Arduino user sketch partition */ - user_sketch: partition@C4000 { - reg = <0x000C4000 0x000B8000>; - label = "user"; - }; - }; -}; +/* Arduino user sketch partition */ +user_sketch: &slot1_partition {}; / { zephyr,user { diff --git a/variants/arduino_nano_matter_mgm240sd22vna/known_example_issues.txt b/variants/arduino_nano_matter_mgm240sd22vna/known_example_issues.txt new file mode 100644 index 000000000..b25687e11 --- /dev/null +++ b/variants/arduino_nano_matter_mgm240sd22vna/known_example_issues.txt @@ -0,0 +1,10 @@ +# This file contains a list of examples that are shipped with the core (or used +# by continuous integration tests), but are known to be currently failing for +# this specific variant. +# +# Each line in this file is treated as a regular expression and will be matched +# against the path of each sketch found in this repo. If a match is found, the +# sketch compilation result will be ignored. + +# no A0 analog pin support +examples/arduino-examples/examples/01.Basics/AnalogReadSerial diff --git a/variants/arduino_nano_matter_mgm240sd22vna/skip_these_examples.txt b/variants/arduino_nano_matter_mgm240sd22vna/skip_these_examples.txt new file mode 100644 index 000000000..2b4d23cad --- /dev/null +++ b/variants/arduino_nano_matter_mgm240sd22vna/skip_these_examples.txt @@ -0,0 +1,16 @@ +# This file contains a list of examples that are shipped with the core (or used +# by continuous integration tests), but are not applicable for this variant, +# for example because it uses hardware features not present on the CPU or board. +# +# Each line in this file should contain the path to an example to exclude, +# relative to the root of the repository. + +# no Serial1 object +examples/arduino-examples/examples/04.Communication/SerialPassthrough + +libraries/Arduino_SecureElement +libraries/Camera +libraries/Ethernet +libraries/Storage +libraries/WiFi +libraries/Zephyr_SDRAM diff --git a/variants/arduino_nicla_sense_me_nrf52832/arduino_nicla_sense_me_nrf52832.overlay b/variants/arduino_nicla_sense_me_nrf52832/arduino_nicla_sense_me_nrf52832.overlay index b9cd0dd2b..da560f03b 100644 --- a/variants/arduino_nicla_sense_me_nrf52832/arduino_nicla_sense_me_nrf52832.overlay +++ b/variants/arduino_nicla_sense_me_nrf52832/arduino_nicla_sense_me_nrf52832.overlay @@ -1,5 +1,15 @@ +/delete-node/ &slot0_partition; +/delete-node/ &slot1_partition; +/delete-node/ &scratch_partition; +/delete-node/ &storage_partition; + &flash0 { partitions { + slot0_partition: partition@10000 { + label = "image-0"; /* in zephyr/boards/arduino/nicla_sense_me/arduino_nicla_sense_me.dts:154 */ + reg = < 0x10000 0x60000 >; /* in zephyr/boards/arduino/nicla_sense_me/arduino_nicla_sense_me.dts:155 */ + }; + user_sketch: partition@70000 { label = "user"; reg = <0x070000 0x10000>; @@ -38,6 +48,6 @@ serials = <&uart0>; i2cs = <&i2c1>; - spis = <&spi1>; + spis = <>; /* spi0, conflicts with uart0 */ }; }; diff --git a/variants/arduino_nicla_sense_me_nrf52832/known_example_issues.txt b/variants/arduino_nicla_sense_me_nrf52832/known_example_issues.txt new file mode 100644 index 000000000..f80a141a5 --- /dev/null +++ b/variants/arduino_nicla_sense_me_nrf52832/known_example_issues.txt @@ -0,0 +1,8 @@ +# This file contains a list of examples that are shipped with the core (or used +# by continuous integration tests), but are known to be currently failing for +# this specific variant. +# +# Each line in this file is treated as a regular expression and will be matched +# against the path of each sketch found in this repo. If a match is found, the +# sketch compilation result will be ignored. + diff --git a/variants/arduino_nicla_sense_me_nrf52832/skip_these_examples.txt b/variants/arduino_nicla_sense_me_nrf52832/skip_these_examples.txt new file mode 100644 index 000000000..2b4d23cad --- /dev/null +++ b/variants/arduino_nicla_sense_me_nrf52832/skip_these_examples.txt @@ -0,0 +1,16 @@ +# This file contains a list of examples that are shipped with the core (or used +# by continuous integration tests), but are not applicable for this variant, +# for example because it uses hardware features not present on the CPU or board. +# +# Each line in this file should contain the path to an example to exclude, +# relative to the root of the repository. + +# no Serial1 object +examples/arduino-examples/examples/04.Communication/SerialPassthrough + +libraries/Arduino_SecureElement +libraries/Camera +libraries/Ethernet +libraries/Storage +libraries/WiFi +libraries/Zephyr_SDRAM diff --git a/variants/arduino_opta_stm32h747xx_m7/known_example_issues.txt b/variants/arduino_opta_stm32h747xx_m7/known_example_issues.txt new file mode 100644 index 000000000..8b392234b --- /dev/null +++ b/variants/arduino_opta_stm32h747xx_m7/known_example_issues.txt @@ -0,0 +1,12 @@ +# This file contains a list of examples that are shipped with the core (or used +# by continuous integration tests), but are known to be currently failing for +# this specific variant. +# +# Each line in this file is treated as a regular expression and will be matched +# against the path of each sketch found in this repo. If a match is found, the +# sketch compilation result will be ignored. + +# bug at core link time +libraries/Ethernet/examples/UDPSendReceiveString +libraries/Ethernet/examples/UdpNtpClient +libraries/Storage/examples/FlashFormat diff --git a/variants/arduino_opta_stm32h747xx_m7/skip_these_examples.txt b/variants/arduino_opta_stm32h747xx_m7/skip_these_examples.txt new file mode 100644 index 000000000..8394dcf0b --- /dev/null +++ b/variants/arduino_opta_stm32h747xx_m7/skip_these_examples.txt @@ -0,0 +1,9 @@ +# This file contains a list of examples that are shipped with the core (or used +# by continuous integration tests), but are not applicable for this variant, +# for example because it uses hardware features not present on the CPU or board. +# +# Each line in this file should contain the path to an example to exclude, +# relative to the root of the repository. + +libraries/Camera +libraries/Zephyr_SDRAM diff --git a/variants/arduino_portenta_c33_r7fa6m5bh3cfc/known_example_issues.txt b/variants/arduino_portenta_c33_r7fa6m5bh3cfc/known_example_issues.txt new file mode 100644 index 000000000..9b7ef5387 --- /dev/null +++ b/variants/arduino_portenta_c33_r7fa6m5bh3cfc/known_example_issues.txt @@ -0,0 +1,14 @@ +# This file contains a list of examples that are shipped with the core (or used +# by continuous integration tests), but are known to be currently failing for +# this specific variant. +# +# Each line in this file is treated as a regular expression and will be matched +# against the path of each sketch found in this repo. If a match is found, the +# sketch compilation result will be ignored. + +# needs porting the SE05X library from mbed +libraries/Arduino_SecureElement/ + +# bug at core link time +libraries/Ethernet/examples/UDPSendReceiveString +libraries/Ethernet/examples/UdpNtpClient diff --git a/variants/arduino_portenta_c33_r7fa6m5bh3cfc/skip_these_examples.txt b/variants/arduino_portenta_c33_r7fa6m5bh3cfc/skip_these_examples.txt new file mode 100644 index 000000000..a0e556dde --- /dev/null +++ b/variants/arduino_portenta_c33_r7fa6m5bh3cfc/skip_these_examples.txt @@ -0,0 +1,8 @@ +# This file contains a list of examples that are shipped with the core (or used +# by continuous integration tests), but are not applicable for this variant, +# for example because it uses hardware features not present on the CPU or board. +# +# Each line in this file should contain the path to an example to exclude, +# relative to the root of the repository. + +libraries/Camera diff --git a/variants/arduino_portenta_h7_stm32h747xx_m7/known_example_issues.txt b/variants/arduino_portenta_h7_stm32h747xx_m7/known_example_issues.txt new file mode 100644 index 000000000..8b392234b --- /dev/null +++ b/variants/arduino_portenta_h7_stm32h747xx_m7/known_example_issues.txt @@ -0,0 +1,12 @@ +# This file contains a list of examples that are shipped with the core (or used +# by continuous integration tests), but are known to be currently failing for +# this specific variant. +# +# Each line in this file is treated as a regular expression and will be matched +# against the path of each sketch found in this repo. If a match is found, the +# sketch compilation result will be ignored. + +# bug at core link time +libraries/Ethernet/examples/UDPSendReceiveString +libraries/Ethernet/examples/UdpNtpClient +libraries/Storage/examples/FlashFormat diff --git a/variants/arduino_portenta_h7_stm32h747xx_m7/skip_these_examples.txt b/variants/arduino_portenta_h7_stm32h747xx_m7/skip_these_examples.txt new file mode 100644 index 000000000..50059711b --- /dev/null +++ b/variants/arduino_portenta_h7_stm32h747xx_m7/skip_these_examples.txt @@ -0,0 +1,7 @@ +# This file contains a list of examples that are shipped with the core (or used +# by continuous integration tests), but are not applicable for this variant, +# for example because it uses hardware features not present on the CPU or board. +# +# Each line in this file should contain the path to an example to exclude, +# relative to the root of the repository. + diff --git a/variants/arduino_uno_q_stm32u585xx/arduino_uno_q_stm32u585xx.overlay b/variants/arduino_uno_q_stm32u585xx/arduino_uno_q_stm32u585xx.overlay index b37c40282..98d9552b9 100644 --- a/variants/arduino_uno_q_stm32u585xx/arduino_uno_q_stm32u585xx.overlay +++ b/variants/arduino_uno_q_stm32u585xx/arduino_uno_q_stm32u585xx.overlay @@ -54,13 +54,28 @@ }; */ +/delete-node/ &slot0_partition; +/delete-node/ &slot1_partition; +/delete-node/ &scratch_partition; +/delete-node/ &storage_partition; + &flash0 { partitions { - bootanimation: partition@90000 { - reg = <0x090000 DT_SIZE_K(64)>; + slot0_partition: partition@10000 { + label = "image-0"; + reg = < 0x10000 DT_SIZE_K(768) >; + }; + + bootanimation: partition@d0000 { + reg = < 0xd0000 DT_SIZE_K(192) >; }; - user_sketch: partition@f0000 { - reg = <0x0F0000 DT_SIZE_K(64)>; + + user_sketch: partition@100000 { + reg = < 0x100000 DT_SIZE_K(768) >; + }; + + storage_partition: partition@1c0000 { + reg = < 0x1c0000 DT_SIZE_K(256) >; }; }; }; diff --git a/variants/arduino_uno_q_stm32u585xx/known_example_issues.txt b/variants/arduino_uno_q_stm32u585xx/known_example_issues.txt new file mode 100644 index 000000000..f80a141a5 --- /dev/null +++ b/variants/arduino_uno_q_stm32u585xx/known_example_issues.txt @@ -0,0 +1,8 @@ +# This file contains a list of examples that are shipped with the core (or used +# by continuous integration tests), but are known to be currently failing for +# this specific variant. +# +# Each line in this file is treated as a regular expression and will be matched +# against the path of each sketch found in this repo. If a match is found, the +# sketch compilation result will be ignored. + diff --git a/variants/arduino_uno_q_stm32u585xx/skip_these_examples.txt b/variants/arduino_uno_q_stm32u585xx/skip_these_examples.txt new file mode 100644 index 000000000..078ff2e8d --- /dev/null +++ b/variants/arduino_uno_q_stm32u585xx/skip_these_examples.txt @@ -0,0 +1,9 @@ +# This file contains a list of examples that are shipped with the core (or used +# by continuous integration tests), but are not applicable for this variant, +# for example because it uses hardware features not present on the CPU or board. +# +# Each line in this file should contain the path to an example to exclude, +# relative to the root of the repository. + +libraries/ArduinoBLE +libraries/Arduino_RPClite/extras/integration_test diff --git a/variants/ek_ra8d1_r7fa8d1bhecbd/known_example_issues.txt b/variants/ek_ra8d1_r7fa8d1bhecbd/known_example_issues.txt new file mode 100644 index 000000000..f80a141a5 --- /dev/null +++ b/variants/ek_ra8d1_r7fa8d1bhecbd/known_example_issues.txt @@ -0,0 +1,8 @@ +# This file contains a list of examples that are shipped with the core (or used +# by continuous integration tests), but are known to be currently failing for +# this specific variant. +# +# Each line in this file is treated as a regular expression and will be matched +# against the path of each sketch found in this repo. If a match is found, the +# sketch compilation result will be ignored. + diff --git a/variants/ek_ra8d1_r7fa8d1bhecbd/skip_these_examples.txt b/variants/ek_ra8d1_r7fa8d1bhecbd/skip_these_examples.txt new file mode 100644 index 000000000..6ea1139b4 --- /dev/null +++ b/variants/ek_ra8d1_r7fa8d1bhecbd/skip_these_examples.txt @@ -0,0 +1,8 @@ +# This file contains a list of examples that are shipped with the core (or used +# by continuous integration tests), but are not applicable for this variant, +# for example because it uses hardware features not present on the CPU or board. +# +# Each line in this file should contain the path to an example to exclude, +# relative to the root of the repository. + +libraries/ diff --git a/variants/frdm_mcxn947_mcxn947_cpu0/known_example_issues.txt b/variants/frdm_mcxn947_mcxn947_cpu0/known_example_issues.txt new file mode 100644 index 000000000..f80a141a5 --- /dev/null +++ b/variants/frdm_mcxn947_mcxn947_cpu0/known_example_issues.txt @@ -0,0 +1,8 @@ +# This file contains a list of examples that are shipped with the core (or used +# by continuous integration tests), but are known to be currently failing for +# this specific variant. +# +# Each line in this file is treated as a regular expression and will be matched +# against the path of each sketch found in this repo. If a match is found, the +# sketch compilation result will be ignored. + diff --git a/variants/frdm_mcxn947_mcxn947_cpu0/skip_these_examples.txt b/variants/frdm_mcxn947_mcxn947_cpu0/skip_these_examples.txt new file mode 100644 index 000000000..6ea1139b4 --- /dev/null +++ b/variants/frdm_mcxn947_mcxn947_cpu0/skip_these_examples.txt @@ -0,0 +1,8 @@ +# This file contains a list of examples that are shipped with the core (or used +# by continuous integration tests), but are not applicable for this variant, +# for example because it uses hardware features not present on the CPU or board. +# +# Each line in this file should contain the path to an example to exclude, +# relative to the root of the repository. + +libraries/ diff --git a/variants/frdm_rw612_rw612/known_example_issues.txt b/variants/frdm_rw612_rw612/known_example_issues.txt new file mode 100644 index 000000000..f80a141a5 --- /dev/null +++ b/variants/frdm_rw612_rw612/known_example_issues.txt @@ -0,0 +1,8 @@ +# This file contains a list of examples that are shipped with the core (or used +# by continuous integration tests), but are known to be currently failing for +# this specific variant. +# +# Each line in this file is treated as a regular expression and will be matched +# against the path of each sketch found in this repo. If a match is found, the +# sketch compilation result will be ignored. + diff --git a/variants/frdm_rw612_rw612/skip_these_examples.txt b/variants/frdm_rw612_rw612/skip_these_examples.txt new file mode 100644 index 000000000..6ea1139b4 --- /dev/null +++ b/variants/frdm_rw612_rw612/skip_these_examples.txt @@ -0,0 +1,8 @@ +# This file contains a list of examples that are shipped with the core (or used +# by continuous integration tests), but are not applicable for this variant, +# for example because it uses hardware features not present on the CPU or board. +# +# Each line in this file should contain the path to an example to exclude, +# relative to the root of the repository. + +libraries/