forked from Imagelibrary/littlefs
Added GitHub workflows to run tests
Mostly taken from .travis.yml, biggest changes were around how to get the status updates to work. We can't use a token on PRs the same way we could in Travis, so instead we use a second workflow that checks every pull request for "status" artifacts, and create the actual statuses in the "workflow_run" event, where we have full access to repo secrets.
This commit is contained in:
97
.github/workflows/status.yml
vendored
Normal file
97
.github/workflows/status.yml
vendored
Normal file
@@ -0,0 +1,97 @@
|
||||
name: status
|
||||
on:
|
||||
workflow_run:
|
||||
workflows: test
|
||||
types: completed
|
||||
|
||||
jobs:
|
||||
status:
|
||||
runs-on: ubuntu-latest
|
||||
continue-on-error: true
|
||||
|
||||
steps:
|
||||
- run: echo "${{toJSON(github.event.workflow_run)}}"
|
||||
|
||||
# custom statuses?
|
||||
- uses: dawidd6/action-download-artifact@v2
|
||||
with:
|
||||
workflow: ${{github.event.workflow_run.name}}
|
||||
run_id: ${{github.event.workflow_run.id}}
|
||||
name: status
|
||||
path: status
|
||||
- name: update-status
|
||||
run: |
|
||||
# TODO remove this
|
||||
ls status
|
||||
for f in status/*.json
|
||||
do
|
||||
cat $f
|
||||
done
|
||||
|
||||
shopt -s nullglob
|
||||
for s in status/*.json
|
||||
do
|
||||
# parse requested status
|
||||
export STATE="$(jq -er '.state' $s)"
|
||||
export CONTEXT="$(jq -er '.context' $s)"
|
||||
export DESCRIPTION="$(jq -er '.description' $s)"
|
||||
# help lookup URL for job/steps because GitHub makes
|
||||
# it VERY HARD to link to specific jobs
|
||||
export TARGET_URL="$(
|
||||
jq -er '.target_url // empty' $s || (
|
||||
export TARGET_JOB="$(jq -er '.target_job' $s)"
|
||||
export TARGET_STEP="$(jq -er '.target_step // ""' $s)"
|
||||
curl -sS -H "authorization: token ${{secrets.GITHUB_TOKEN}}" \
|
||||
"$GITHUB_API_URL/repos/$GITHUB_REPOSITORY/actions/runs/`
|
||||
`${{github.event.workflow_run.id}}/jobs" \
|
||||
| jq -er '.jobs[]
|
||||
| select(.name == env.TARGET_JOB)
|
||||
| .html_url + ((.steps[]
|
||||
| select(.name == env.TARGET_STEP)
|
||||
| "#step:\(.number):0") // "")'
|
||||
)
|
||||
)"
|
||||
# TODO remove this
|
||||
# print for debugging
|
||||
echo "$(jq -nc '{
|
||||
state: env.STATE,
|
||||
context: env.CONTEXT,
|
||||
description: env.DESCRIPTION,
|
||||
target_url: env.TARGET_URL}')"
|
||||
# update status
|
||||
curl -sS -H "authorization: token ${{secrets.GITHUB_TOKEN}}" \
|
||||
-X POST \
|
||||
"$GITHUB_API_URL/repos/$GITHUB_REPOSITORY/statuses/`
|
||||
`${{github.event.workflow_run.head_sha}}" \
|
||||
-d "$(jq -nc '{
|
||||
state: env.STATE,
|
||||
context: env.CONTEXT,
|
||||
description: env.DESCRIPTION,
|
||||
target_url: env.TARGET_URL}')"
|
||||
|
||||
#if jq -er '.target_url' $s
|
||||
#then
|
||||
# export TARGET_URL="$(jq -er '.target_url' $s)"
|
||||
#elif jq -er '.target_job' $s
|
||||
#then
|
||||
#
|
||||
#fi
|
||||
|
||||
done
|
||||
|
||||
|
||||
|
||||
|
||||
# - id: status
|
||||
# run: |
|
||||
# echo "::set-output name=description::$(cat statuses/x86_64.txt | tr '\n' ' ')"
|
||||
# - uses: octokit/request-action@v2.x
|
||||
# with:
|
||||
# route: POST /repos/{repo}/status/{sha}
|
||||
# repo: ${{github.repository}}
|
||||
# sha: ${{github.event.status.sha}}
|
||||
# context: ${{github.event.status.context}}
|
||||
# state: ${{github.event.status.state}}
|
||||
# description: ${{steps.status.outputs.description}}
|
||||
# target_url: ${{github.event.status.target_url}}
|
||||
#
|
||||
238
.github/workflows/test.yml
vendored
Normal file
238
.github/workflows/test.yml
vendored
Normal file
@@ -0,0 +1,238 @@
|
||||
name: test
|
||||
on: [push, pull_request]
|
||||
|
||||
env:
|
||||
CFLAGS: -Werror
|
||||
MAKEFLAGS: -j
|
||||
|
||||
jobs:
|
||||
# run tests
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
arch: [x86_64, thumb, mips, powerpc]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: install
|
||||
run: |
|
||||
# need toml, also pip3 isn't installed by default?
|
||||
sudo apt-get update
|
||||
sudo apt-get install python3 python3-pip
|
||||
sudo pip3 install toml
|
||||
mkdir status
|
||||
# cross-compile with ARM Thumb (32-bit, little-endian)
|
||||
- name: install-thumb
|
||||
if: matrix.arch == 'thumb'
|
||||
run: |
|
||||
sudo apt-get install \
|
||||
gcc-arm-linux-gnueabi \
|
||||
libc6-dev-armel-cross \
|
||||
qemu-user
|
||||
echo "CC=arm-linux-gnueabi-gcc -mthumb --static" >> $GITHUB_ENV
|
||||
echo "EXEC=qemu-arm" >> $GITHUB_ENV
|
||||
arm-linux-gnueabi-gcc --version
|
||||
qemu-arm -version
|
||||
# cross-compile with MIPS (32-bit, big-endian)
|
||||
- name: install-mips
|
||||
if: matrix.arch == 'mips'
|
||||
run: |
|
||||
sudo apt-get install \
|
||||
gcc-mips-linux-gnu \
|
||||
libc6-dev-mips-cross \
|
||||
qemu-user
|
||||
echo "CC=mips-linux-gnu-gcc --static" >> $GITHUB_ENV
|
||||
echo "EXEC=qemu-mips" >> $GITHUB_ENV
|
||||
mips-linux-gnu-gcc --version
|
||||
qemu-mips -version
|
||||
# cross-compile with PowerPC (32-bit, big-endian)
|
||||
- name: install-powerpc
|
||||
if: matrix.arch == 'powerpc'
|
||||
run: |
|
||||
sudo apt-get install \
|
||||
gcc-powerpc-linux-gnu \
|
||||
libc6-dev-powerpc-cross \
|
||||
qemu-user
|
||||
echo "CC=powerpc-linux-gnu-gcc --static" >> $GITHUB_ENV
|
||||
echo "EXEC=qemu-ppc" >> $GITHUB_ENV
|
||||
powerpc-linux-gnu-gcc --version
|
||||
qemu-ppc -version
|
||||
# test configurations
|
||||
# make sure example can at least compile
|
||||
- name: test-example
|
||||
run: |
|
||||
sed -n '/``` c/,/```/{/```/d; p}' README.md > test.c && \
|
||||
make all CFLAGS+=" \
|
||||
-Duser_provided_block_device_read=NULL \
|
||||
-Duser_provided_block_device_prog=NULL \
|
||||
-Duser_provided_block_device_erase=NULL \
|
||||
-Duser_provided_block_device_sync=NULL \
|
||||
-include stdio.h"
|
||||
# # normal+reentrant tests
|
||||
# - name: test-default
|
||||
# run: make test SCRIPTFLAGS+="-nrk"
|
||||
# # NOR flash: read/prog = 1 block = 4KiB
|
||||
# - name: test-nor
|
||||
# run: make test SCRIPTFLAGS+="-nrk
|
||||
# -DLFS_READ_SIZE=1 -DLFS_BLOCK_SIZE=4096"
|
||||
# # SD/eMMC: read/prog = 512 block = 512
|
||||
# - name: test-emmc
|
||||
# run: make test SCRIPTFLAGS+="-nrk
|
||||
# -DLFS_READ_SIZE=512 -DLFS_BLOCK_SIZE=512"
|
||||
# # NAND flash: read/prog = 4KiB block = 32KiB
|
||||
# - name: test-nand
|
||||
# run: make test SCRIPTFLAGS+="-nrk
|
||||
# -DLFS_READ_SIZE=4096 -DLFS_BLOCK_SIZE=\(32*1024\)"
|
||||
# # other extreme geometries that are useful for various corner cases
|
||||
# - name: test-no-intrinsics
|
||||
# run: make test SCRIPTFLAGS+="-nrk
|
||||
# -DLFS_NO_INTRINSICS"
|
||||
# - name: test-byte-writes
|
||||
# run: make test SCRIPTFLAGS+="-nrk
|
||||
# -DLFS_READ_SIZE=1 -DLFS_CACHE_SIZE=1"
|
||||
# - name: test-block-cycles
|
||||
# run: make test SCRIPTFLAGS+="-nrk
|
||||
# -DLFS_BLOCK_CYCLES=1"
|
||||
# - name: test-odd-block-count
|
||||
# run: make test SCRIPTFLAGS+="-nrk
|
||||
# -DLFS_BLOCK_COUNT=1023 -DLFS_LOOKAHEAD_SIZE=256"
|
||||
# - name: test-odd-block-size
|
||||
# run: make test SCRIPTFLAGS+="-nrk
|
||||
# -DLFS_READ_SIZE=11 -DLFS_BLOCK_SIZE=704"
|
||||
|
||||
# update results
|
||||
- uses: actions/checkout@v2
|
||||
if: github.ref != 'refs/heads/master'
|
||||
continue-on-error: true
|
||||
with:
|
||||
ref: master
|
||||
path: master
|
||||
|
||||
- name: results-code
|
||||
continue-on-error: true
|
||||
run: |
|
||||
export OBJ="$(ls lfs*.c | sed 's/\.c/\.o/' | tr '\n' ' ')"
|
||||
export CFLAGS+=" \
|
||||
-DLFS_NO_ASSERT \
|
||||
-DLFS_NO_DEBUG \
|
||||
-DLFS_NO_WARN \
|
||||
-DLFS_NO_ERROR"
|
||||
if [ -d master ]
|
||||
then
|
||||
make -C master clean code OBJ="$OBJ" \
|
||||
SCRIPTFLAGS+="-qo code.csv" \
|
||||
&& export SCRIPTFLAGS+="-d master/code.csv"
|
||||
fi
|
||||
make clean code OBJ="$OBJ" \
|
||||
SCRIPTFLAGS+="-o code.csv"
|
||||
- name: results-code-readonly
|
||||
continue-on-error: true
|
||||
run: |
|
||||
export OBJ="$(ls lfs*.c | sed 's/\.c/\.o/' | tr '\n' ' ')"
|
||||
export CFLAGS+=" \
|
||||
-DLFS_NO_ASSERT \
|
||||
-DLFS_NO_DEBUG \
|
||||
-DLFS_NO_WARN \
|
||||
-DLFS_NO_ERROR \
|
||||
-DLFS_READONLY"
|
||||
if [ -d master ]
|
||||
then
|
||||
make -C master clean code OBJ="$OBJ" \
|
||||
SCRIPTFLAGS+="-qo code-readonly.csv" \
|
||||
&& export SCRIPTFLAGS+="-d master/code-readonly.csv"
|
||||
fi
|
||||
# TODO remove this OBJ
|
||||
make clean code OBJ="$OBJ" \
|
||||
SCRIPTFLAGS+="-o code-readonly.csv"
|
||||
- name: results-code-threadsafe
|
||||
continue-on-error: true
|
||||
run: |
|
||||
export OBJ="$(ls lfs*.c | sed 's/\.c/\.o/' | tr '\n' ' ')"
|
||||
export CFLAGS+=" \
|
||||
-DLFS_NO_ASSERT \
|
||||
-DLFS_NO_DEBUG \
|
||||
-DLFS_NO_WARN \
|
||||
-DLFS_NO_ERROR \
|
||||
-DLFS_THREADSAFE"
|
||||
if [ -d master ]
|
||||
then
|
||||
make -C master clean code OBJ="$OBJ" \
|
||||
SCRIPTFLAGS+="-qo code-threadsafe.csv" \
|
||||
&& export SCRIPTFLAGS+="-d master/code-threadsafe.csv"
|
||||
fi
|
||||
make clean code OBJ="$OBJ" \
|
||||
SCRIPTFLAGS+="-o code-threadsafe.csv"
|
||||
- name: results-code-migrate
|
||||
continue-on-error: true
|
||||
run: |
|
||||
export OBJ="$(ls lfs*.c | sed 's/\.c/\.o/' | tr '\n' ' ')"
|
||||
export CFLAGS+=" \
|
||||
-DLFS_NO_ASSERT \
|
||||
-DLFS_NO_DEBUG \
|
||||
-DLFS_NO_WARN \
|
||||
-DLFS_NO_ERROR \
|
||||
-DLFS_MIGRATE"
|
||||
if [ -d master ]
|
||||
then
|
||||
make -C master clean code OBJ="$OBJ" \
|
||||
SCRIPTFLAGS+="-qo code-migrate.csv" \
|
||||
&& export SCRIPTFLAGS+="-d master/code-migrate.csv"
|
||||
fi
|
||||
make clean code OBJ="$OBJ" \
|
||||
SCRIPTFLAGS+="-o code-migrate.csv"
|
||||
# limit reporting to Thumb, otherwise there would be too many numbers
|
||||
# flying around for the results to be easily readable
|
||||
- name: collect-status
|
||||
continue-on-error: true
|
||||
if: matrix.arch == 'thumb'
|
||||
run: |
|
||||
mkdir -p status
|
||||
shopt -s nullglob
|
||||
for f in code*.csv
|
||||
do
|
||||
export STEP="results-code$(
|
||||
echo $f | sed -n 's/code-\(.*\).csv/-\1/p')"
|
||||
export CONTEXT="results / code$(
|
||||
echo $f | sed -n 's/code-\(.*\).csv/ (\1)/p')"
|
||||
export DESCRIPTION="Code size is $(
|
||||
./scripts/code.py -i $f -S $(
|
||||
[ -e master/$f ] && echo "-d master/$f"))"
|
||||
jq -nc '{
|
||||
state: "success",
|
||||
context: env.CONTEXT,
|
||||
description: env.DESCRIPTION,
|
||||
target_job: "test (${{matrix.arch}})",
|
||||
target_step: env.STEP}' \
|
||||
> status/code$(echo $f | sed -n 's/code-\(.*\).csv/-\1/p').json
|
||||
done
|
||||
- name: upload-status
|
||||
continue-on-error: true
|
||||
if: matrix.arch == 'thumb'
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: status
|
||||
path: status
|
||||
retention-days: 1
|
||||
|
||||
# run under Valgrind to check for memory errors
|
||||
valgrind:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: install
|
||||
run: |
|
||||
# need toml, also pip3 isn't installed by default?
|
||||
sudo apt-get update
|
||||
sudo apt-get install python3 python3-pip
|
||||
sudo pip3 install toml
|
||||
- name: install-valgrind
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install valgrind
|
||||
valgrind --version
|
||||
# # normal tests, we don't need to test all geometries
|
||||
# - name: test-valgrind
|
||||
# run: make test SCRIPTFLAGS+="-k --valgrind"
|
||||
11
Makefile
11
Makefile
@@ -29,8 +29,7 @@ override CFLAGS += -std=c99 -Wall -pedantic
|
||||
override CFLAGS += -Wextra -Wshadow -Wjump-misses-init -Wundef
|
||||
|
||||
ifdef VERBOSE
|
||||
override TFLAGS += -v
|
||||
override SFLAGS += -v
|
||||
override SCRIPTFLAGS += -v
|
||||
endif
|
||||
|
||||
|
||||
@@ -41,14 +40,14 @@ asm: $(ASM)
|
||||
size: $(OBJ)
|
||||
$(SIZE) -t $^
|
||||
|
||||
code_size:
|
||||
./scripts/code_size.py $(SFLAGS)
|
||||
code:
|
||||
./scripts/code.py $(SCRIPTFLAGS)
|
||||
|
||||
test:
|
||||
./scripts/test.py $(TFLAGS)
|
||||
./scripts/test.py $(EXEC:%=--exec=%) $(SCRIPTFLAGS)
|
||||
.SECONDEXPANSION:
|
||||
test%: tests/test$$(firstword $$(subst \#, ,%)).toml
|
||||
./scripts/test.py $@ $(TFLAGS)
|
||||
./scripts/test.py $@ $(EXEC:%=--exec=%) $(SCRIPTFLAGS)
|
||||
|
||||
-include $(DEP)
|
||||
|
||||
|
||||
4
lfs.c
4
lfs.c
@@ -4723,7 +4723,7 @@ static int lfs_rawmigrate(lfs_t *lfs, const struct lfs_config *cfg) {
|
||||
|
||||
lfs1_entry_tole32(&entry1.d);
|
||||
err = lfs_dir_commit(lfs, &dir2, LFS_MKATTRS(
|
||||
{LFS_MKTAG(LFS_TYPE_CREATE, id, 0)},
|
||||
{LFS_MKTAG(LFS_TYPE_CREATE, id, 0), NULL},
|
||||
{LFS_MKTAG_IF_ELSE(isdir,
|
||||
LFS_TYPE_DIR, id, entry1.d.nlen,
|
||||
LFS_TYPE_REG, id, entry1.d.nlen),
|
||||
@@ -4828,7 +4828,7 @@ static int lfs_rawmigrate(lfs_t *lfs, const struct lfs_config *cfg) {
|
||||
|
||||
lfs_superblock_tole32(&superblock);
|
||||
err = lfs_dir_commit(lfs, &dir2, LFS_MKATTRS(
|
||||
{LFS_MKTAG(LFS_TYPE_CREATE, 0, 0)},
|
||||
{LFS_MKTAG(LFS_TYPE_CREATE, 0, 0), NULL},
|
||||
{LFS_MKTAG(LFS_TYPE_SUPERBLOCK, 0, 8), "littlefs"},
|
||||
{LFS_MKTAG(LFS_TYPE_INLINESTRUCT, 0, sizeof(superblock)),
|
||||
&superblock}));
|
||||
|
||||
@@ -40,7 +40,7 @@ $(foreach target,$(SRC),$(eval $(FLATTEN)))
|
||||
-include %(sizedir)s/*.d
|
||||
.SECONDARY:
|
||||
|
||||
%%.size: $(foreach t,$(subst /,.,$(SRC:.c=.size)),%%.$t)
|
||||
%%.size: $(foreach t,$(subst /,.,$(OBJ:.o=.size)),%%.$t)
|
||||
cat $^ > $@
|
||||
"""
|
||||
CATS = {
|
||||
@@ -31,7 +31,7 @@ $(foreach target,$(SRC),$(eval $(FLATTEN)))
|
||||
-include tests/*.d
|
||||
|
||||
.SECONDARY:
|
||||
%.test: %.test.o $(foreach f,$(subst /,.,$(SRC:.c=.o)),%.$f)
|
||||
%.test: %.test.o $(foreach f,$(subst /,.,$(OBJ)),%.$f)
|
||||
$(CC) $(CFLAGS) $^ $(LFLAGS) -o $@
|
||||
"""
|
||||
GLOBALS = """
|
||||
@@ -771,7 +771,7 @@ if __name__ == "__main__":
|
||||
help="Run reentrant tests with simulated power-loss.")
|
||||
parser.add_argument('-V', '--valgrind', action='store_true',
|
||||
help="Run non-leaky tests under valgrind to check for memory leaks.")
|
||||
parser.add_argument('-e', '--exec', default=[], type=lambda e: e.split(' '),
|
||||
parser.add_argument('-e', '--exec', default=[], type=lambda e: e.split(),
|
||||
help="Run tests with another executable prefixed on the command line.")
|
||||
parser.add_argument('-d', '--disk',
|
||||
help="Specify a file to use for persistent/reentrant tests.")
|
||||
|
||||
Reference in New Issue
Block a user