Skip to content

retry: add cross-runtime multiprocess semaphores #179

retry: add cross-runtime multiprocess semaphores

retry: add cross-runtime multiprocess semaphores #179

Workflow file for this run

name: test-ts
on:
push:
branches:
- main
- stable
- 'releases/**'
tags:
- '*'
pull_request:
workflow_dispatch:
jobs:
lint_ts:
runs-on: ubuntu-latest
defaults:
run:
working-directory: abxbus-ts
steps:
- uses: actions/checkout@v4
- uses: pnpm/action-setup@v4
with:
version: 10
- uses: actions/setup-node@v4
with:
node-version: 22
cache: pnpm
cache-dependency-path: abxbus-ts/pnpm-lock.yaml
- run: pnpm install --frozen-lockfile
- run: pnpm exec prettier --check .
- run: pnpm exec eslint .
- run: pnpm run typecheck
find_ts_tests:
runs-on: ubuntu-latest
outputs:
TS_TASKS: ${{ steps.lsgrep.outputs.TS_TASKS }}
# [{ "kind": "test" | "example", "name": "eventbus_basics" }, ...]
TS_TEST_TASKS: ${{ steps.lsgrep.outputs.TS_TEST_TASKS }}
# [{ "kind": "test", "name": "eventbus_basics" }, ...]
steps:
- uses: actions/checkout@v4
- id: lsgrep
run: |
TS_TEST_TASKS="$(
find abxbus-ts/tests -maxdepth 1 -type f -name '*.test.ts' ! -name 'eventbus_performance.test.ts' \
| sort \
| sed 's|^abxbus-ts/tests/||' \
| sed 's|\.test\.ts$||' \
| jq -R -s -c 'split("\n")[:-1] | map({kind: "test", name: .})'
)"
TS_EXAMPLE_TASKS="$(
(
if [[ -d abxbus-ts/examples ]]; then
find abxbus-ts/examples -maxdepth 1 -type f -name '*.ts' | sort
fi
) \
| sed 's|^abxbus-ts/examples/||' \
| sed 's|\.ts$||' \
| jq -R -s -c 'split("\n")[:-1] | map({kind: "example", name: .})'
)"
TS_TASKS="$(jq -cn --argjson tests "$TS_TEST_TASKS" --argjson examples "$TS_EXAMPLE_TASKS" '$tests + $examples')"
echo "TS_TEST_TASKS=${TS_TEST_TASKS}" >> "$GITHUB_OUTPUT"
echo "TS_TASKS=${TS_TASKS}" >> "$GITHUB_OUTPUT"
echo "$TS_TASKS"
- name: Check that at least one test file is found
run: |
if [[ -z "${{ steps.lsgrep.outputs.TS_TEST_TASKS }}" || "${{ steps.lsgrep.outputs.TS_TEST_TASKS }}" == "[]" ]]; then
echo "Failed to find any *.test.ts files in abxbus-ts/tests/ folder!" > /dev/stderr
exit 1
fi
tests:
needs:
- lint_ts
- find_ts_tests
runs-on: ubuntu-latest
strategy:
matrix:
task: ${{ fromJson(needs.find_ts_tests.outputs.TS_TASKS || '[{"kind":"error","name":"FAILED_TO_DISCOVER_TASKS"}]') }}
# autodiscovers all files in abxbus-ts/tests/*.test.ts and abxbus-ts/examples/*.ts
# - { kind: "test", name: "eventbus_basics" }
# - { kind: "example", name: "simple" }
# ... and more
name: ts-${{ matrix.task.kind }}-${{ matrix.task.name }}
defaults:
run:
working-directory: abxbus-ts
steps:
- uses: actions/checkout@v4
- name: Check that the previous step managed to find some tasks for us to run
run: |
if [[ "${{ matrix.task.kind }}" == "error" ]]; then
echo "Failed get list of tasks from find_ts_tests job" > /dev/stderr
exit 1
fi
- uses: pnpm/action-setup@v4
with:
version: 10
- uses: actions/setup-node@v4
with:
node-version: 22
cache: pnpm
cache-dependency-path: abxbus-ts/pnpm-lock.yaml
- name: Install bridge service binaries
if: matrix.task.kind == 'test' && matrix.task.name == 'bridges'
run: |
sudo apt-get update
sudo apt-get install -y redis-server nats-server postgresql sqlite3
PG_INITDB_PATH="$(find /usr/lib/postgresql -type f -name initdb | head -n 1)"
PG_BINDIR="$(dirname "${PG_INITDB_PATH}")"
if [[ -z "${PG_BINDIR}" || ! -x "${PG_BINDIR}/initdb" || ! -x "${PG_BINDIR}/postgres" ]]; then
echo "Failed to locate PostgreSQL binaries (initdb/postgres)" > /dev/stderr
exit 1
fi
echo "${PG_BINDIR}" >> "${GITHUB_PATH}"
export PATH="${PG_BINDIR}:${PATH}"
redis-server --version
nats-server --version
initdb --version
postgres --version
sqlite3 --version
- run: pnpm install --frozen-lockfile
- uses: astral-sh/setup-uv@v6
if: matrix.task.kind == 'test' && matrix.task.name == 'cross_runtime_roundtrip'
with:
enable-cache: true
activate-environment: true
- name: Install Python deps for cross-runtime roundtrip tests
if: matrix.task.kind == 'test' && matrix.task.name == 'cross_runtime_roundtrip'
run: |
cd ..
uv sync --dev --all-extras
- name: Verify bridge optional Node deps
if: matrix.task.kind == 'test' && matrix.task.name == 'bridges'
run: |
node - <<'NODE'
const required = ['ioredis', 'nats', 'pg']
for (const pkg of required) {
require.resolve(pkg)
}
console.log('optional bridge deps resolve OK')
NODE
- name: Prepare coverage directory
run: |
rm -rf .v8-coverage
mkdir -p .v8-coverage
- name: Run test with coverage
if: matrix.task.kind == 'test'
run: NODE_V8_COVERAGE=.v8-coverage NODE_OPTIONS='--expose-gc' node --expose-gc --test --experimental-test-coverage --import tsx tests/${{ matrix.task.name }}.test.ts
- name: Run example
if: matrix.task.kind == 'example'
run: NODE_V8_COVERAGE=.v8-coverage NODE_OPTIONS='--expose-gc' node --expose-gc --import tsx examples/${{ matrix.task.name }}.ts
- name: Upload raw coverage data
uses: actions/upload-artifact@v4
with:
name: ts-coverage-${{ matrix.task.kind }}-${{ matrix.task.name }}
path: |
abxbus-ts/.v8-coverage
pyproject.toml
retention-days: 7
include-hidden-files: true
if: always()
coverage:
needs: tests
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: pnpm/action-setup@v4
with:
version: 10
- uses: actions/setup-node@v4
with:
node-version: 22
cache: pnpm
cache-dependency-path: abxbus-ts/pnpm-lock.yaml
- run: cd abxbus-ts && pnpm install --frozen-lockfile
- name: Download all coverage data
uses: actions/download-artifact@v4
with:
pattern: ts-coverage-*
path: coverage-data/
- name: Combine coverage data
run: |
mkdir -p abxbus-ts/.v8-coverage-merged
counter=1
while IFS= read -r -d '' coverage_file; do
cp "$coverage_file" "abxbus-ts/.v8-coverage-merged/$counter-$(basename "$coverage_file")"
counter=$((counter + 1))
done < <(find coverage-data -type f -name "*.json" -print0)
if [[ "$counter" -eq 1 ]]; then
echo "No V8 coverage JSON files found in downloaded artifacts" > /dev/stderr
exit 1
fi
- name: Build merged coverage report
run: |
cd abxbus-ts
set -o pipefail
mkdir -p coverage
pnpm dlx c8 report \
--temp-directory .v8-coverage-merged \
--report-dir coverage \
--reporter=html \
--reporter=text \
--reporter=json-summary \
--exclude-after-remap \
-n 'src/**/*.ts' \
-x 'src/bridge*.ts' \
-x 'src/optional_deps.ts' | tee coverage/text-report.txt
node <<'NODE'
const fs = require('fs');
const summaryPath = 'coverage/coverage-summary.json';
const summary = JSON.parse(fs.readFileSync(summaryPath, 'utf8'));
const entries = Object.entries(summary);
const total = summary.total;
const files = entries
.filter(([name]) => name !== 'total')
.sort((a, b) => String(a[0]).localeCompare(String(b[0])));
const esc = (s) => String(s).replace(/\|/g, '\\|');
const row = (name, m) => {
const stmtsTotal = Number(m.statements.total || 0);
const stmtsCovered = Number(m.statements.covered || 0);
const stmtsMiss = Math.max(stmtsTotal - stmtsCovered, 0);
return `| ${esc(name)} | ${stmtsTotal} | ${stmtsMiss} | ${Number(m.statements.pct || 0).toFixed(2)}% | ${Number(m.branches.pct || 0).toFixed(2)}% | ${Number(m.functions.pct || 0).toFixed(2)}% | ${Number(m.lines.pct || 0).toFixed(2)}% |`;
};
const lines = [];
lines.push('### TypeScript combined coverage');
lines.push('');
lines.push('| Name | Stmts | Miss | Cover | Branch | Funcs | Lines |');
lines.push('| --- | ---: | ---: | ---: | ---: | ---: | ---: |');
lines.push(row('TOTAL', total));
for (const [name, metrics] of files) {
lines.push(row(name, metrics));
}
lines.push('');
const summaryFile = process.env.GITHUB_STEP_SUMMARY;
fs.appendFileSync(summaryFile, lines.join('\n'));
NODE
- name: Fail if TypeScript coverage is <50%
run: |
cd abxbus-ts
pnpm dlx c8 report \
--temp-directory .v8-coverage-merged \
--reporter=text-summary \
--exclude-after-remap \
-n 'src/**/*.ts' \
-x 'src/bridge*.ts' \
-x 'src/optional_deps.ts' \
--check-coverage \
--lines 50 > /dev/null
- name: Upload merged coverage report
id: upload_ts_coverage_report
uses: actions/upload-artifact@v4
with:
name: ts-coverage-report
path: |
abxbus-ts/coverage/
pyproject.toml
retention-days: 7
- name: Append TypeScript coverage artifact link
run: |
echo "" >> "$GITHUB_STEP_SUMMARY"
echo "[Download TypeScript HTML coverage artifact (ts-coverage-report)](https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}/artifacts/${{ steps.upload_ts_coverage_report.outputs.artifact-id }})" >> "$GITHUB_STEP_SUMMARY"
perf:
runs-on: ubuntu-latest
outputs:
perf_stats: ${{ steps.export_perf.outputs.perf_stats }}
defaults:
run:
working-directory: abxbus-ts
steps:
- uses: actions/checkout@v4
- uses: pnpm/action-setup@v4
with:
version: 10
- uses: actions/setup-node@v4
with:
node-version: 22
cache: pnpm
cache-dependency-path: abxbus-ts/pnpm-lock.yaml
- uses: oven-sh/setup-bun@v2
- uses: denoland/setup-deno@v2
with:
deno-version: v2.x
- run: pnpm install --frozen-lockfile
- name: Run TypeScript runtime perf
run: pnpm run perf | tee ts_perf.log
- name: Export TypeScript perf stats
id: export_perf
run: |
python - <<'PY'
import os
import re
from pathlib import Path
text = Path("ts_perf.log").read_text(encoding="utf-8", errors="replace")
lines = [line.strip() for line in text.splitlines()]
stat_lines = [
line
for line in lines
if re.match(r"^\[(node|bun|deno|browser)\]\s.+:\s.*latency=", line)
]
if not stat_lines:
stat_lines = ["unable to parse ts perf stats; see job log"]
stats = "\n".join(stat_lines)
with open(os.environ["GITHUB_OUTPUT"], "a", encoding="utf-8") as fh:
fh.write("perf_stats<<EOF\n")
fh.write(stats + "\n")
fh.write("EOF\n")
with open(os.environ["GITHUB_STEP_SUMMARY"], "a", encoding="utf-8") as fh:
fh.write("### TypeScript Perf Stats\n\n```\n")
fh.write(stats + "\n")
fh.write("```\n")
PY