mirror of
https://github.com/littlefs-project/littlefs.git
synced 2026-03-27 13:39:57 +00:00
Added post-release script, cleaned up workflows
This helps an outstanding maintainer annoyance: updating dependencies to bring in new versions on each littlefs release. But instead of adding a bunch of scripts to the tail end of the release workflow, the post-release script just triggers a single "repository_dispatch" event in the newly created littlefs.post-release repo. From there any number of post-release workflows can be run. This indirection should let the post-release scripts move much quicker than littlefs itself, which helps offset how fragile these sort of scripts are. --- Also finished cleaning up the workflows now that they are mostly working.
This commit is contained in:
26
.github/workflows/post-release.yml
vendored
Normal file
26
.github/workflows/post-release.yml
vendored
Normal file
@@ -0,0 +1,26 @@
|
||||
name: post-release
|
||||
on:
|
||||
release:
|
||||
branches: [master]
|
||||
types: [released]
|
||||
|
||||
jobs:
|
||||
post-release:
|
||||
runs-on: ubuntu-18.04
|
||||
steps:
|
||||
# trigger post-release in dependency repo, this indirection allows the
|
||||
# dependency repo to be updated often without affecting this repo. At
|
||||
# the time of this comment, the dependency repo is responsible for
|
||||
# creating PRs for other dependent repos post-release.
|
||||
- name: trigger-post-release
|
||||
continue-on-error: true
|
||||
run: |
|
||||
curl -sS -X POST -H "authorization: token ${{secrets.BOT_TOKEN}}" \
|
||||
"$GITHUB_API_URL/repos/${{secrets.POST_RELEASE_REPO}}/dispatches" \
|
||||
-d "$(jq -n '{
|
||||
event_type: "post-release",
|
||||
client_payload: {
|
||||
repo: env.GITHUB_REPOSITORY,
|
||||
version: "${{github.event.release.tag_name}}"}}' \
|
||||
| tee /dev/stderr)"
|
||||
|
||||
127
.github/workflows/release.yml
vendored
127
.github/workflows/release.yml
vendored
@@ -7,14 +7,13 @@ on:
|
||||
|
||||
jobs:
|
||||
release:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-18.04
|
||||
|
||||
# need to manually check for a couple things
|
||||
# - tests passed?
|
||||
# - we are the most recent commit on master?
|
||||
if: |
|
||||
github.event.workflow_run.conclusion == 'success' &&
|
||||
github.event.workflow_run.head_sha == github.sha
|
||||
if: ${{github.event.workflow_run.conclusion == 'success' &&
|
||||
github.event.workflow_run.head_sha == github.sha}}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
@@ -71,33 +70,78 @@ jobs:
|
||||
# try to find results from tests
|
||||
- name: collect-results
|
||||
run: |
|
||||
[ -e results/code-thumb.csv ] && \
|
||||
./scripts/code.py -u results/code-thumb.csv -s \
|
||||
| awk 'NR==2 {printf "Code size,%d B\n",$2}' \
|
||||
>> results.csv
|
||||
[ -e results/code-thumb-readonly.csv ] && \
|
||||
./scripts/code.py -u results/code-thumb-readonly.csv -s \
|
||||
| awk 'NR==2 {printf "Code size (readonly),%d B\n",$2}' \
|
||||
>> results.csv
|
||||
[ -e results/code-thumb-threadsafe.csv ] && \
|
||||
./scripts/code.py -u results/code-thumb-threadsafe.csv -s \
|
||||
| awk 'NR==2 {printf "Code size (threadsafe),%d B\n",$2}' \
|
||||
>> results.csv
|
||||
[ -e results/code-thumb-migrate.csv ] && \
|
||||
./scripts/code.py -u results/code-thumb-migrate.csv -s \
|
||||
| awk 'NR==2 {printf "Code size (migrate),%d B\n",$2}' \
|
||||
>> results.csv
|
||||
[ -e results/coverage.csv ] && \
|
||||
./scripts/coverage.py -u results/coverage.csv -s \
|
||||
| awk 'NR==2 {printf "Coverage,%.1f%% of %d lines\n",$4,$3}' \
|
||||
>> results.csv
|
||||
# previous results to compare against?
|
||||
[ -n "$LFS_PREV_VERSION" ] && curl -sS \
|
||||
"$GITHUB_API_URL/repos/$GITHUB_REPOSITORY/`
|
||||
`status/$LFS_PREV_VERSION" \
|
||||
| jq -re 'select(.sha != env.GITHUB_SHA) | .statuses[]' \
|
||||
>> prev-results.json \
|
||||
|| true
|
||||
|
||||
# unfortunately these each have their own format
|
||||
[ -e results/code-thumb.csv ] && ( \
|
||||
export PREV="$(jq -re '
|
||||
select(.context == "results / code").description
|
||||
| capture("Code size is (?<result>[0-9]+)").result' \
|
||||
prev-results.json || echo 0)"
|
||||
./scripts/code.py -u results/code-thumb.csv -s | awk '
|
||||
NR==2 {printf "Code size,%d B",$2}
|
||||
NR==2 && ENVIRON["PREV"]+0 != 0 {
|
||||
printf " (%+.1f%%)",100*($2-ENVIRON["PREV"])/ENVIRON["PREV"]}
|
||||
NR==2 {printf "\n"}' \
|
||||
>> results.csv)
|
||||
[ -e results/code-thumb-readonly.csv ] && ( \
|
||||
export PREV="$(jq -re '
|
||||
select(.context == "results / code (readonly)").description
|
||||
| capture("Code size is (?<result>[0-9]+)").result' \
|
||||
prev-results.json || echo 0)"
|
||||
./scripts/code.py -u results/code-thumb-readonly.csv -s | awk '
|
||||
NR==2 {printf "Code size (readonly),%d B",$2}
|
||||
NR==2 && ENVIRON["PREV"]+0 != 0 {
|
||||
printf " (%+.1f%%)",100*($2-ENVIRON["PREV"])/ENVIRON["PREV"]}
|
||||
NR==2 {printf "\n"}' \
|
||||
>> results.csv)
|
||||
[ -e results/code-thumb-threadsafe.csv ] && ( \
|
||||
export PREV="$(jq -re '
|
||||
select(.context == "results / code (threadsafe)").description
|
||||
| capture("Code size is (?<result>[0-9]+)").result' \
|
||||
prev-results.json || echo 0)"
|
||||
./scripts/code.py -u results/code-thumb-threadsafe.csv -s | awk '
|
||||
NR==2 {printf "Code size (threadsafe),%d B",$2}
|
||||
NR==2 && ENVIRON["PREV"]+0 != 0 {
|
||||
printf " (%+.1f%%)",100*($2-ENVIRON["PREV"])/ENVIRON["PREV"]}
|
||||
NR==2 {printf "\n"}' \
|
||||
>> results.csv)
|
||||
[ -e results/code-thumb-migrate.csv ] && ( \
|
||||
export PREV="$(jq -re '
|
||||
select(.context == "results / code (migrate)").description
|
||||
| capture("Code size is (?<result>[0-9]+)").result' \
|
||||
prev-results.json || echo 0)"
|
||||
./scripts/code.py -u results/code-thumb-migrate.csv -s | awk '
|
||||
NR==2 {printf "Code size (migrate),%d B",$2}
|
||||
NR==2 && ENVIRON["PREV"]+0 != 0 {
|
||||
printf " (%+.1f%%)",100*($2-ENVIRON["PREV"])/ENVIRON["PREV"]}
|
||||
NR==2 {printf "\n"}' \
|
||||
>> results.csv)
|
||||
[ -e results/coverage.csv ] && ( \
|
||||
export PREV="$(jq -re '
|
||||
select(.context == "results / coverage").description
|
||||
| capture("Coverage is (?<result>[0-9\\.]+)").result' \
|
||||
prev-results.json || echo 0)"
|
||||
./scripts/coverage.py -u results/coverage.csv -s | awk -F '[ /%]+' '
|
||||
NR==2 {printf "Coverage,%.1f%% of %d lines",$4,$3}
|
||||
NR==2 && ENVIRON["PREV"]+0 != 0 {
|
||||
printf " (%+.1f%%)",$4-ENVIRON["PREV"]}
|
||||
NR==2 {printf "\n"}' \
|
||||
>> results.csv)
|
||||
|
||||
# transpose to GitHub table
|
||||
[ -e results.csv ] || exit 0
|
||||
awk -F ',' '
|
||||
{label[NR]=$1; value[NR]=$2}
|
||||
END {
|
||||
for (r=1; r<=NR; r++) {printf "| %s ",label[r]}; printf "|\n";
|
||||
for (r=1; r<=NR; r++) {printf "|--:"}; printf "|\n";
|
||||
for (r=1; r<=NR; r++) {printf "|:--"}; printf "|\n";
|
||||
for (r=1; r<=NR; r++) {printf "| %s ",value[r]}; printf "|\n"}' \
|
||||
results.csv > results.txt
|
||||
echo "RESULTS:"
|
||||
@@ -106,20 +150,25 @@ jobs:
|
||||
# find changes from history
|
||||
- name: collect-changes
|
||||
run: |
|
||||
[ ! -z "$LFS_PREV_VERSION" ] || exit 0
|
||||
git log --oneline "$LFS_PREV_VERSION.." \
|
||||
--grep='^Merge' --invert-grep > changes.txt
|
||||
[ -n "$LFS_PREV_VERSION" ] || exit 0
|
||||
# use explicit link to github commit so that release notes can
|
||||
# be copied elsewhere
|
||||
git log "$LFS_PREV_VERSION.." \
|
||||
--grep='^Merge' --invert-grep \
|
||||
--format="format:[\`%h\`](`
|
||||
`https://github.com/$GITHUB_REPOSITORY/commit/%h) %s" \
|
||||
> changes.txt
|
||||
echo "CHANGES:"
|
||||
cat changes.txt
|
||||
|
||||
|
||||
# create and update major branches (vN and vN-prefix)
|
||||
- name: build-major-branches
|
||||
- name: create-major-branches
|
||||
run: |
|
||||
# create major branch
|
||||
git branch "v$LFS_VERSION_MAJOR" HEAD
|
||||
|
||||
# create major prefix branch
|
||||
git config user.name ${{secrets.BOT_USERNAME}}
|
||||
git config user.name ${{secrets.BOT_USER}}
|
||||
git config user.email ${{secrets.BOT_EMAIL}}
|
||||
git fetch "https://github.com/$GITHUB_REPOSITORY.git" \
|
||||
"v$LFS_VERSION_MAJOR-prefix" || true
|
||||
@@ -137,27 +186,19 @@ jobs:
|
||||
"v$LFS_VERSION_MAJOR-prefix"
|
||||
|
||||
# build release notes
|
||||
- name: build-release
|
||||
- name: create-release
|
||||
run: |
|
||||
# find changes since last release
|
||||
#if [ ! -z "$LFS_PREV_VERSION" ]
|
||||
#then
|
||||
# export CHANGES="$(git log --oneline "$LFS_PREV_VERSION.." \
|
||||
# --grep='^Merge' --invert-grep)"
|
||||
# printf "CHANGES\n%s\n\n" "$CHANGES"
|
||||
#fi
|
||||
|
||||
# create release and patch version tag (vN.N.N)
|
||||
# only draft if not a patch release
|
||||
[ -e results.txt ] && export RESULTS="$(cat results.txt)"
|
||||
[ -e changes.txt ] && export CHANGES="$(cat changes.txt)"
|
||||
curl -sS -H "authorization: token ${{secrets.BOT_TOKEN}}" \
|
||||
curl -sS -X POST -H "authorization: token ${{secrets.BOT_TOKEN}}" \
|
||||
"$GITHUB_API_URL/repos/$GITHUB_REPOSITORY/releases" \
|
||||
-d "$(jq -sR '{
|
||||
-d "$(jq -n '{
|
||||
tag_name: env.LFS_VERSION,
|
||||
name: env.LFS_VERSION | rtrimstr(".0"),
|
||||
target_commitish: "${{github.event.workflow_run.head_sha}}",
|
||||
draft: env.LFS_VERSION | endswith(".0"),
|
||||
body: [env.RESULTS, env.CHANGES | select(.)] | join("\n\n")}' \
|
||||
| tee /dev/stderr)" > /dev/null
|
||||
| tee /dev/stderr)"
|
||||
|
||||
|
||||
61
.github/workflows/status.yml
vendored
61
.github/workflows/status.yml
vendored
@@ -6,30 +6,21 @@ on:
|
||||
|
||||
jobs:
|
||||
status:
|
||||
runs-on: ubuntu-latest
|
||||
continue-on-error: true
|
||||
|
||||
runs-on: ubuntu-18.04
|
||||
steps:
|
||||
- run: echo "${{toJSON(github.event.workflow_run)}}"
|
||||
|
||||
# custom statuses?
|
||||
- uses: dawidd6/action-download-artifact@v2
|
||||
continue-on-error: true
|
||||
with:
|
||||
workflow: ${{github.event.workflow_run.name}}
|
||||
run_id: ${{github.event.workflow_run.id}}
|
||||
name: status
|
||||
path: status
|
||||
- name: update-status
|
||||
continue-on-error: true
|
||||
run: |
|
||||
# TODO remove this
|
||||
ls status
|
||||
for f in status/*.json
|
||||
do
|
||||
cat $f
|
||||
done
|
||||
|
||||
shopt -s nullglob
|
||||
for s in status/*.json
|
||||
for s in $(shopt -s nullglob ; echo status/*.json)
|
||||
do
|
||||
# parse requested status
|
||||
export STATE="$(jq -er '.state' $s)"
|
||||
@@ -43,7 +34,7 @@ jobs:
|
||||
export TARGET_STEP="$(jq -er '.target_step // ""' $s)"
|
||||
curl -sS -H "authorization: token ${{secrets.BOT_TOKEN}}" \
|
||||
"$GITHUB_API_URL/repos/$GITHUB_REPOSITORY/actions/runs/`
|
||||
`${{github.event.workflow_run.id}}/jobs" \
|
||||
`${{github.event.workflow_run.id}}/jobs" \
|
||||
| jq -er '.jobs[]
|
||||
| select(.name == env.TARGET_JOB)
|
||||
| .html_url
|
||||
@@ -51,46 +42,14 @@ jobs:
|
||||
+ ((.steps[]
|
||||
| select(.name == env.TARGET_STEP)
|
||||
| "#step:\(.number):0") // "")'))"
|
||||
# TODO remove this
|
||||
# print for debugging
|
||||
echo "$(jq -nc '{
|
||||
state: env.STATE,
|
||||
context: env.CONTEXT,
|
||||
description: env.DESCRIPTION,
|
||||
target_url: env.TARGET_URL}')"
|
||||
# update status
|
||||
curl -sS -H "authorization: token ${{secrets.BOT_TOKEN}}" \
|
||||
-X POST "$GITHUB_API_URL/repos/$GITHUB_REPOSITORY/statuses/`
|
||||
curl -sS -X POST -H "authorization: token ${{secrets.BOT_TOKEN}}" \
|
||||
"$GITHUB_API_URL/repos/$GITHUB_REPOSITORY/statuses/`
|
||||
`${{github.event.workflow_run.head_sha}}" \
|
||||
-d "$(jq -nc '{
|
||||
-d "$(jq -n '{
|
||||
state: env.STATE,
|
||||
context: env.CONTEXT,
|
||||
description: env.DESCRIPTION,
|
||||
target_url: env.TARGET_URL}')"
|
||||
|
||||
#if jq -er '.target_url' $s
|
||||
#then
|
||||
# export TARGET_URL="$(jq -er '.target_url' $s)"
|
||||
#elif jq -er '.target_job' $s
|
||||
#then
|
||||
#
|
||||
#fi
|
||||
|
||||
target_url: env.TARGET_URL}' \
|
||||
| tee /dev/stderr)"
|
||||
done
|
||||
|
||||
|
||||
|
||||
|
||||
# - id: status
|
||||
# run: |
|
||||
# echo "::set-output name=description::$(cat statuses/x86_64.txt | tr '\n' ' ')"
|
||||
# - uses: octokit/request-action@v2.x
|
||||
# with:
|
||||
# route: POST /repos/{repo}/status/{sha}
|
||||
# repo: ${{github.repository}}
|
||||
# sha: ${{github.event.status.sha}}
|
||||
# context: ${{github.event.status.context}}
|
||||
# state: ${{github.event.status.state}}
|
||||
# description: ${{steps.status.outputs.description}}
|
||||
# target_url: ${{github.event.status.target_url}}
|
||||
#
|
||||
|
||||
176
.github/workflows/test.yml
vendored
176
.github/workflows/test.yml
vendored
@@ -8,7 +8,7 @@ env:
|
||||
jobs:
|
||||
# run tests
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-18.04
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
@@ -38,7 +38,7 @@ jobs:
|
||||
|
||||
# cross-compile with ARM Thumb (32-bit, little-endian)
|
||||
- name: install-thumb
|
||||
if: matrix.arch == 'thumb'
|
||||
if: ${{matrix.arch == 'thumb'}}
|
||||
run: |
|
||||
sudo apt-get install -qq \
|
||||
gcc-arm-linux-gnueabi \
|
||||
@@ -50,7 +50,7 @@ jobs:
|
||||
qemu-arm -version
|
||||
# cross-compile with MIPS (32-bit, big-endian)
|
||||
- name: install-mips
|
||||
if: matrix.arch == 'mips'
|
||||
if: ${{matrix.arch == 'mips'}}
|
||||
run: |
|
||||
sudo apt-get install -qq \
|
||||
gcc-mips-linux-gnu \
|
||||
@@ -62,7 +62,7 @@ jobs:
|
||||
qemu-mips -version
|
||||
# cross-compile with PowerPC (32-bit, big-endian)
|
||||
- name: install-powerpc
|
||||
if: matrix.arch == 'powerpc'
|
||||
if: ${{matrix.arch == 'powerpc'}}
|
||||
run: |
|
||||
sudo apt-get install -qq \
|
||||
gcc-powerpc-linux-gnu \
|
||||
@@ -76,71 +76,71 @@ jobs:
|
||||
# make sure example can at least compile
|
||||
- name: test-example
|
||||
run: |
|
||||
sed -n '/``` c/,/```/{/```/d; p}' README.md > test.c && \
|
||||
sed -n '/``` c/,/```/{/```/d; p}' README.md > test.c
|
||||
make all CFLAGS+=" \
|
||||
-Duser_provided_block_device_read=NULL \
|
||||
-Duser_provided_block_device_prog=NULL \
|
||||
-Duser_provided_block_device_erase=NULL \
|
||||
-Duser_provided_block_device_sync=NULL \
|
||||
-include stdio.h"
|
||||
rm test.c
|
||||
|
||||
# # test configurations
|
||||
# # normal+reentrant tests
|
||||
# - name: test-default
|
||||
# run: |
|
||||
# make clean
|
||||
# make test TESTFLAGS+="-nrk"
|
||||
# # NOR flash: read/prog = 1 block = 4KiB
|
||||
# - name: test-nor
|
||||
# run: |
|
||||
# make clean
|
||||
# make test TESTFLAGS+="-nrk \
|
||||
# -DLFS_READ_SIZE=1 -DLFS_BLOCK_SIZE=4096"
|
||||
# # SD/eMMC: read/prog = 512 block = 512
|
||||
# - name: test-emmc
|
||||
# run: |
|
||||
# make clean
|
||||
# make test TESTFLAGS+="-nrk \
|
||||
# -DLFS_READ_SIZE=512 -DLFS_BLOCK_SIZE=512"
|
||||
# # NAND flash: read/prog = 4KiB block = 32KiB
|
||||
# - name: test-nand
|
||||
# run: |
|
||||
# make clean
|
||||
# make test TESTFLAGS+="-nrk \
|
||||
# -DLFS_READ_SIZE=4096 -DLFS_BLOCK_SIZE=\(32*1024\)"
|
||||
# # other extreme geometries that are useful for various corner cases
|
||||
# - name: test-no-intrinsics
|
||||
# run: |
|
||||
# make clean
|
||||
# make test TESTFLAGS+="-nrk \
|
||||
# -DLFS_NO_INTRINSICS"
|
||||
# - name: test-byte-writes
|
||||
# # it just takes too long to test byte-level writes when in qemu,
|
||||
# # should be plenty covered by the other configurations
|
||||
# if: matrix.arch == 'x86_64'
|
||||
# run: |
|
||||
# make clean
|
||||
# make test TESTFLAGS+="-nrk \
|
||||
# -DLFS_READ_SIZE=1 -DLFS_CACHE_SIZE=1"
|
||||
# - name: test-block-cycles
|
||||
# run: |
|
||||
# make clean
|
||||
# make test TESTFLAGS+="-nrk \
|
||||
# -DLFS_BLOCK_CYCLES=1"
|
||||
# - name: test-odd-block-count
|
||||
# run: |
|
||||
# make clean
|
||||
# make test TESTFLAGS+="-nrk \
|
||||
# -DLFS_BLOCK_COUNT=1023 -DLFS_LOOKAHEAD_SIZE=256"
|
||||
# - name: test-odd-block-size
|
||||
# run: |
|
||||
# make clean
|
||||
# make test TESTFLAGS+="-nrk \
|
||||
# -DLFS_READ_SIZE=11 -DLFS_BLOCK_SIZE=704"
|
||||
# test configurations
|
||||
# normal+reentrant tests
|
||||
- name: test-default
|
||||
run: |
|
||||
make clean
|
||||
make test TESTFLAGS+="-nrk"
|
||||
# NOR flash: read/prog = 1 block = 4KiB
|
||||
- name: test-nor
|
||||
run: |
|
||||
make clean
|
||||
make test TESTFLAGS+="-nrk \
|
||||
-DLFS_READ_SIZE=1 -DLFS_BLOCK_SIZE=4096"
|
||||
# SD/eMMC: read/prog = 512 block = 512
|
||||
- name: test-emmc
|
||||
run: |
|
||||
make clean
|
||||
make test TESTFLAGS+="-nrk \
|
||||
-DLFS_READ_SIZE=512 -DLFS_BLOCK_SIZE=512"
|
||||
# NAND flash: read/prog = 4KiB block = 32KiB
|
||||
- name: test-nand
|
||||
run: |
|
||||
make clean
|
||||
make test TESTFLAGS+="-nrk \
|
||||
-DLFS_READ_SIZE=4096 -DLFS_BLOCK_SIZE=\(32*1024\)"
|
||||
# other extreme geometries that are useful for various corner cases
|
||||
- name: test-no-intrinsics
|
||||
run: |
|
||||
make clean
|
||||
make test TESTFLAGS+="-nrk \
|
||||
-DLFS_NO_INTRINSICS"
|
||||
- name: test-byte-writes
|
||||
# it just takes too long to test byte-level writes when in qemu,
|
||||
# should be plenty covered by the other configurations
|
||||
if: ${{matrix.arch == 'x86_64'}}
|
||||
run: |
|
||||
make clean
|
||||
make test TESTFLAGS+="-nrk \
|
||||
-DLFS_READ_SIZE=1 -DLFS_CACHE_SIZE=1"
|
||||
- name: test-block-cycles
|
||||
run: |
|
||||
make clean
|
||||
make test TESTFLAGS+="-nrk \
|
||||
-DLFS_BLOCK_CYCLES=1"
|
||||
- name: test-odd-block-count
|
||||
run: |
|
||||
make clean
|
||||
make test TESTFLAGS+="-nrk \
|
||||
-DLFS_BLOCK_COUNT=1023 -DLFS_LOOKAHEAD_SIZE=256"
|
||||
- name: test-odd-block-size
|
||||
run: |
|
||||
make clean
|
||||
make test TESTFLAGS+="-nrk \
|
||||
-DLFS_READ_SIZE=11 -DLFS_BLOCK_SIZE=704"
|
||||
|
||||
# upload coveragefor later coverage
|
||||
# upload coverage for later coverage
|
||||
- name: upload-coverage
|
||||
continue-on-error: true
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: coverage
|
||||
@@ -149,7 +149,6 @@ jobs:
|
||||
|
||||
# update results
|
||||
- name: results-code
|
||||
continue-on-error: true
|
||||
run: |
|
||||
mkdir -p results
|
||||
make clean
|
||||
@@ -161,7 +160,6 @@ jobs:
|
||||
-DLFS_NO_ERROR" \
|
||||
CODEFLAGS+="-o results/code-${{matrix.arch}}.csv"
|
||||
- name: results-code-readonly
|
||||
continue-on-error: true
|
||||
run: |
|
||||
mkdir -p results
|
||||
make clean
|
||||
@@ -174,7 +172,6 @@ jobs:
|
||||
-DLFS_READONLY" \
|
||||
CODEFLAGS+="-o results/code-${{matrix.arch}}-readonly.csv"
|
||||
- name: results-code-threadsafe
|
||||
continue-on-error: true
|
||||
run: |
|
||||
mkdir -p results
|
||||
make clean
|
||||
@@ -187,7 +184,6 @@ jobs:
|
||||
-DLFS_THREADSAFE" \
|
||||
CODEFLAGS+="-o results/code-${{matrix.arch}}-threadsafe.csv"
|
||||
- name: results-code-migrate
|
||||
continue-on-error: true
|
||||
run: |
|
||||
mkdir -p results
|
||||
make clean
|
||||
@@ -200,7 +196,6 @@ jobs:
|
||||
-DLFS_MIGRATE" \
|
||||
CODEFLAGS+="-o results/code-${{matrix.arch}}-migrate.csv"
|
||||
- name: upload-results
|
||||
continue-on-error: true
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: results
|
||||
@@ -208,28 +203,25 @@ jobs:
|
||||
# limit reporting to Thumb, otherwise there would be too many numbers
|
||||
# flying around for the results to be easily readable
|
||||
- name: collect-status
|
||||
continue-on-error: true
|
||||
if: matrix.arch == 'thumb'
|
||||
if: ${{matrix.arch == 'thumb'}}
|
||||
run: |
|
||||
mkdir -p status
|
||||
for f in results/code*.csv
|
||||
for f in $(shopt -s nullglob ; echo results/code*.csv)
|
||||
do
|
||||
[ -e "$f" ] || continue
|
||||
export STEP="results-code$(
|
||||
echo $f | sed -n 's/.*code-.*-\(.*\).csv/-\1/p')"
|
||||
export CONTEXT="results / code$(
|
||||
echo $f | sed -n 's/.*code-.*-\(.*\).csv/ (\1)/p')"
|
||||
export PREV="$(curl -sS \
|
||||
"$GITHUB_API_URL/repos/$GITHUB_REPOSITORY/status/master" \
|
||||
| jq -re "select(.sha != env.GITHUB_SHA) | .statuses[]
|
||||
| jq -re 'select(.sha != env.GITHUB_SHA) | .statuses[]
|
||||
| select(.context == env.CONTEXT).description
|
||||
| capture(\"Code size is (?<result>[0-9]+)\").result" \
|
||||
| capture("Code size is (?<result>[0-9]+)").result' \
|
||||
|| echo 0)"
|
||||
echo $PREV
|
||||
export DESCRIPTION="$(./scripts/code.py -u $f -s | awk '
|
||||
NR==2 {printf "Code size is %d B",$2}
|
||||
NR==2 && ENVIRON["PREV"] != 0 {
|
||||
printf " (%+.1f%%)",100*($2-ENVIRON["PREV"])/$2}')"
|
||||
NR==2 && ENVIRON["PREV"]+0 != 0 {
|
||||
printf " (%+.1f%%)",100*($2-ENVIRON["PREV"])/ENVIRON["PREV"]}')"
|
||||
jq -n '{
|
||||
state: "success",
|
||||
context: env.CONTEXT,
|
||||
@@ -240,8 +232,7 @@ jobs:
|
||||
echo $f | sed -n 's/.*code-.*-\(.*\).csv/-\1/p').json
|
||||
done
|
||||
- name: upload-status
|
||||
continue-on-error: true
|
||||
if: matrix.arch == 'thumb'
|
||||
if: ${{matrix.arch == 'thumb'}}
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: status
|
||||
@@ -250,7 +241,7 @@ jobs:
|
||||
|
||||
# run under Valgrind to check for memory errors
|
||||
valgrind:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-18.04
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: install
|
||||
@@ -264,14 +255,14 @@ jobs:
|
||||
sudo apt-get update -qq
|
||||
sudo apt-get install -qq valgrind
|
||||
valgrind --version
|
||||
# # normal tests, we don't need to test all geometries
|
||||
# - name: test-valgrind
|
||||
# run: make test TESTFLAGS+="-k --valgrind"
|
||||
# normal tests, we don't need to test all geometries
|
||||
- name: test-valgrind
|
||||
run: make test TESTFLAGS+="-k --valgrind"
|
||||
|
||||
# self-host with littlefs-fuse for a fuzz-like test
|
||||
fuse:
|
||||
runs-on: ubuntu-latest
|
||||
if: "!endsWith(github.ref, '-prefix')"
|
||||
runs-on: ubuntu-18.04
|
||||
if: ${{!endsWith(github.ref, '-prefix')}}
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: install
|
||||
@@ -316,8 +307,8 @@ jobs:
|
||||
|
||||
# test migration using littlefs-fuse
|
||||
migrate:
|
||||
runs-on: ubuntu-latest
|
||||
if: "!endsWith(github.ref, '-prefix')"
|
||||
runs-on: ubuntu-18.04
|
||||
if: ${{!endsWith(github.ref, '-prefix')}}
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: install
|
||||
@@ -383,9 +374,8 @@ jobs:
|
||||
|
||||
# collect coverage info
|
||||
coverage:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-18.04
|
||||
needs: [test]
|
||||
continue-on-error: true
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: install
|
||||
@@ -393,7 +383,7 @@ jobs:
|
||||
sudo apt-get update -qq
|
||||
sudo apt-get install -qq python3 python3-pip lcov
|
||||
sudo pip3 install toml
|
||||
# yes we continue-on-error on every step, continue-on-error
|
||||
# yes we continue-on-error nearly every step, continue-on-error
|
||||
# at job level apparently still marks a job as failed, which isn't
|
||||
# what we want
|
||||
- uses: actions/download-artifact@v2
|
||||
@@ -410,12 +400,10 @@ jobs:
|
||||
./scripts/coverage.py results/coverage.info -o results/coverage.csv
|
||||
- name: upload-results
|
||||
uses: actions/upload-artifact@v2
|
||||
continue-on-error: true
|
||||
with:
|
||||
name: results
|
||||
path: results
|
||||
- name: collect-status
|
||||
continue-on-error: true
|
||||
run: |
|
||||
mkdir -p status
|
||||
[ -e results/coverage.csv ] || exit 0
|
||||
@@ -423,15 +411,14 @@ jobs:
|
||||
export CONTEXT="results / coverage"
|
||||
export PREV="$(curl -sS \
|
||||
"$GITHUB_API_URL/repos/$GITHUB_REPOSITORY/status/master" \
|
||||
| jq -re "select(.sha != env.GITHUB_SHA) | .statuses[]
|
||||
| jq -re 'select(.sha != env.GITHUB_SHA) | .statuses[]
|
||||
| select(.context == env.CONTEXT).description
|
||||
| capture(\"Coverage is (?<result>[0-9\\\\.]+)\").result" \
|
||||
| capture("Coverage is (?<result>[0-9\\.]+)").result' \
|
||||
|| echo 0)"
|
||||
export DESCRIPTION="$(
|
||||
./scripts/coverage.py -u results/coverage.csv -s \
|
||||
| awk -F '[ /%]+' '
|
||||
./scripts/coverage.py -u results/coverage.csv -s | awk -F '[ /%]+' '
|
||||
NR==2 {printf "Coverage is %.1f%% of %d lines",$4,$3}
|
||||
NR==2 && ENVIRON["PREV"] != 0 {
|
||||
NR==2 && ENVIRON["PREV"]+0 != 0 {
|
||||
printf " (%+.1f%%)",$4-ENVIRON["PREV"]}')"
|
||||
jq -n '{
|
||||
state: "success",
|
||||
@@ -442,7 +429,6 @@ jobs:
|
||||
| tee status/coverage.json
|
||||
- name: upload-status
|
||||
uses: actions/upload-artifact@v2
|
||||
continue-on-error: true
|
||||
with:
|
||||
name: status
|
||||
path: status
|
||||
|
||||
Reference in New Issue
Block a user