This repository has been archived by the owner on Sep 21, 2024. It is now read-only.
-
Notifications
You must be signed in to change notification settings - Fork 37
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
feat: Periodic syndication checks to IPFS Kubo (#685)
- Loading branch information
Showing
14 changed files
with
318 additions
and
95 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,125 @@ | ||
on: | ||
workflow_call: | ||
|
||
name: 'Workflow Analysis' | ||
|
||
jobs: | ||
report-test-flakes: | ||
name: 'Report test flakes' | ||
runs-on: ubuntu-latest | ||
permissions: | ||
contents: read | ||
pull-requests: write | ||
steps: | ||
- uses: actions/download-artifact@v3 | ||
- name: Parse test results | ||
id: parse-test-results | ||
run: | | ||
sudo apt-get install colorized-logs | ||
echo -n 'results=[' >> $GITHUB_OUTPUT | ||
ENTRIES="" | ||
for RESULTS_DIR in test-results-*/ ; do | ||
mapfile -t target <$RESULTS_DIR/target | ||
PLATFORM="${target[0]}" | ||
FEATURES="${target[1]}" | ||
TOOLCHAIN="${target[2]}" | ||
LOG_PATH="$RESULTS_DIR/log" | ||
csplit -q "$LOG_PATH" %^------------% | ||
SUMMARY="" | ||
if [[ -f "./xx00" ]]; then | ||
SUMMARY=$(tail ./xx00 -n+2 | ansi2txt | jq -M --compact-output --raw-input --slurp . | sed -e 's/\\/\\\\/g') | ||
else | ||
continue | ||
fi | ||
ENTRY="{\"platform\":\"$PLATFORM\",\"features\":\"$FEATURES\",\"toolchain\":\"$TOOLCHAIN\",\"summary\":$SUMMARY}" | ||
if [ -z "$ENTRIES" ]; then | ||
ENTRIES="$ENTRY" | ||
else | ||
ENTRIES="$ENTRIES,$ENTRY" | ||
fi | ||
done | ||
echo -n "$ENTRIES ]" >> $GITHUB_OUTPUT | ||
- name: Report test flakes | ||
uses: actions/github-script@v6 | ||
with: | ||
github-token: ${{ secrets.GITHUB_TOKEN }} | ||
script: | | ||
const results = JSON.parse(`${{ steps.parse-test-results.outputs.results }}`); | ||
const { data: comments } = await github.rest.issues.listComments({ | ||
owner: context.repo.owner, | ||
repo: context.repo.repo, | ||
issue_number: context.issue.number, | ||
}); | ||
const testFlakeAnalysisHeader = 'Test flake analysis'; | ||
const existingComment = comments.find(comment => { | ||
return comment.user.type === 'Bot' && comment.body.includes(testFlakeAnalysisHeader) | ||
}); | ||
let body = ''; | ||
if (results.length == 0) { | ||
body = "No test results to analyze. Maybe none of the test runs passed?"; | ||
} else { | ||
let table = "\n\n| status | platform | features | toolchain |\n|:---:|---|---|---|\n"; | ||
const flakeSummaries = []; | ||
for (result of results) { | ||
const isFlakey = result.summary.indexOf("FLAKY") > 0; | ||
table += `| ${ isFlakey ? "🟡" : "🟢" } | \`${ result.platform }\` | \`${ result.features }\` | \`${result.toolchain}\` |\n`; | ||
if (isFlakey) { | ||
flakeSummaries.push(`#### Flake summary for \`${ result.platform }\`, \`${ result.features }\`, \`${ result.toolchain }\` | ||
\`\`\`shell | ||
${ result.summary } | ||
\`\`\``); | ||
} | ||
} | ||
if (flakeSummaries.length == 0) { | ||
body += '\nNo flakes detected 🎉\n\n' | ||
} | ||
body += table; | ||
if (flakeSummaries.length > 0) { | ||
body += "\n\n"; | ||
body += flakeSummaries.join('\n\n'); | ||
} | ||
} | ||
body = `### ${testFlakeAnalysisHeader} | ||
${body}`; | ||
if (existingComment) { | ||
github.rest.issues.updateComment({ | ||
owner: context.repo.owner, | ||
repo: context.repo.repo, | ||
comment_id: existingComment.id, | ||
body | ||
}); | ||
} else { | ||
github.rest.issues.createComment({ | ||
issue_number: context.issue.number, | ||
owner: context.repo.owner, | ||
repo: context.repo.repo, | ||
body | ||
}); | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -37,39 +37,6 @@ jobs: | |
swift build --sanitize=address | ||
swift test --sanitize=address | ||
run-test-suite-windows: | ||
runs-on: windows-latest | ||
steps: | ||
- uses: actions/checkout@v3 | ||
- uses: Swatinem/rust-cache@v2 | ||
- name: 'Setup Rust' | ||
run: | | ||
curl -sSf https://sh.rustup.rs | sh -s -- -y | ||
- name: 'Install environment packages' | ||
run: | | ||
choco install -y cmake protoc openssl | ||
shell: sh | ||
- name: 'Install IPFS Kubo' | ||
uses: ibnesayeed/setup-ipfs@master | ||
with: | ||
ipfs_version: v0.17.0 | ||
run_daemon: true | ||
- name: 'Run Rust native target tests' | ||
# Increase stack size on Windows tests; (non-main) threads are spawned with 2MB | ||
# default stack size, which `orb_can_render_peers_in_the_sphere_address_book` | ||
# uses (at time of writing) slightly more than 2MB. While we could set the thread | ||
# stack size at runtime (via tokio's `thread_stack_size`), it appears to not solve the | ||
# problem, possibly due to the harness thread overflowing (e.g. a non-main thread that | ||
# we can't configure within the test). In lieu of that, set RUST_MIN_STACK to increase | ||
# the stack sizes of threads created by tokio within tests, as well as the test harness | ||
# threads themselves. | ||
# | ||
# While our main thread isn't under fire here, notating this for future use: | ||
# https://users.rust-lang.org/t/stack-overflow-when-compiling-on-windows-10/50818/8 | ||
run: $env:RUST_MIN_STACK = '4000000'; cargo test --features test-kubo,helpers | ||
env: | ||
NOOSPHERE_LOG: deafening | ||
|
||
run-linting-linux: | ||
runs-on: ubuntu-latest | ||
steps: | ||
|
@@ -89,15 +56,31 @@ jobs: | |
- name: 'Run Linter' | ||
run: cargo clippy --all -- -D warnings | ||
|
||
run-test-suite-linux: | ||
runs-on: ubuntu-latest | ||
run-rust-test-suite: | ||
name: 'Run Rust test suite' | ||
strategy: | ||
matrix: | ||
features: ['test-kubo,headers', 'test-kubo,headers,rocksdb'] | ||
platform: ['ubuntu-latest', 'windows-latest', 'macos-13'] | ||
toolchain: ['stable'] | ||
exclude: | ||
- platform: 'windows-latest' | ||
features: 'test-kubo,headers,rocksdb' | ||
runs-on: ${{ matrix.platform }} | ||
steps: | ||
- uses: actions/checkout@v3 | ||
- uses: Swatinem/rust-cache@v2 | ||
- name: 'Setup Rust' | ||
run: | | ||
curl -sSf https://sh.rustup.rs | sh -s -- -y | ||
- name: 'Install environment packages' | ||
rustup toolchain install ${{matrix.toolchain}} | ||
- name: 'Install environment packages (Windows)' | ||
if: ${{ matrix.platform == 'windows-latest' }} | ||
run: | | ||
choco install -y cmake protoc openssl | ||
shell: sh | ||
- name: 'Install environment packages (Linux)' | ||
if: ${{ matrix.platform == 'ubuntu-latest' }} | ||
run: | | ||
sudo apt-get update -qqy | ||
sudo apt-get install jq protobuf-compiler cmake | ||
|
@@ -106,28 +89,55 @@ jobs: | |
with: | ||
ipfs_version: v0.17.0 | ||
run_daemon: true | ||
- name: 'Run Rust native target tests' | ||
run: NOOSPHERE_LOG=deafening cargo test --features test-kubo,headers | ||
|
||
run-test-suite-linux-rocksdb: | ||
runs-on: ubuntu-latest | ||
steps: | ||
- uses: actions/checkout@v3 | ||
- uses: Swatinem/rust-cache@v2 | ||
- name: 'Setup Rust' | ||
- name: Install cargo-binstall | ||
uses: cargo-bins/[email protected] | ||
- name: Install binaries from cargo | ||
run: | | ||
curl -sSf https://sh.rustup.rs | sh -s -- -y | ||
- name: 'Install environment packages' | ||
cargo +${{ matrix.toolchain }} binstall cargo-nextest --no-confirm --force | ||
- name: 'Run Rust tests' | ||
shell: bash | ||
run: | | ||
sudo apt-get update -qqy | ||
sudo apt-get install jq protobuf-compiler cmake libclang-dev | ||
- name: 'Install IPFS Kubo' | ||
uses: ibnesayeed/setup-ipfs@master | ||
mkdir -p test-results | ||
echo "${{ matrix.platform }} | ||
${{ matrix.features }} | ||
${{ matrix.toolchain}}" > test-results/target | ||
FEATURES=""; | ||
if [ -n "${{matrix.features}}" ]; then | ||
FEATURES="--features ${{matrix.features}}" | ||
fi | ||
# Increase stack size on Windows tests; (non-main) threads are spawned with 2MB | ||
# default stack size, which `orb_can_render_peers_in_the_sphere_address_book` | ||
# uses (at time of writing) slightly more than 2MB. While we could set the thread | ||
# stack size at runtime (via tokio's `thread_stack_size`), it appears to not solve the | ||
# problem, possibly due to the harness thread overflowing (e.g. a non-main thread that | ||
# we can't configure within the test). In lieu of that, set RUST_MIN_STACK to increase | ||
# the stack sizes of threads created by tokio within tests, as well as the test harness | ||
# threads themselves. | ||
# | ||
# While our main thread isn't under fire here, notating this for future use: | ||
# https://users.rust-lang.org/t/stack-overflow-when-compiling-on-windows-10/50818/8 | ||
if [[ "${{matrix.platform}}" == "windows-latest" ]] | ||
export RUST_MIN_STACK = '4000000' | ||
fi | ||
cargo +${{ matrix.toolchain }} nextest run $FEATURES --retries 5 --color always 2>&1 | tee test-results/log | ||
env: | ||
NOOSPHERE_LOG: academic | ||
- uses: actions/upload-artifact@v3 | ||
with: | ||
ipfs_version: v0.17.0 | ||
run_daemon: true | ||
- name: 'Run Rust native target tests (RocksDB)' | ||
run: NOOSPHERE_LOG=defeaning cargo test -p noosphere -p noosphere-storage --features rocksdb,test-kubo | ||
name: test-results-${{ hashFiles('./test-results') }} | ||
path: ./test-results | ||
|
||
report-test-flakes: | ||
name: 'Report test flakes (Linux)' | ||
needs: ['run-rust-test-suite'] | ||
if: always() | ||
uses: ./.github/workflows/report_test_flakes.yaml | ||
secrets: inherit | ||
|
||
run-test-suite-linux-c: | ||
runs-on: ubuntu-latest | ||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Oops, something went wrong.