From 2752e01fe7d667932ec5a88e7feeb9733f3f9d41 Mon Sep 17 00:00:00 2001 From: Jelle Raaijmakers Date: Fri, 21 Mar 2025 09:46:56 +0100 Subject: [PATCH] CI: Add js-benchmarks workflow This workflow starts after a successful js-artifacts workflow, picks up the JS repl binary and runs our js-benchmarks tool. It does not yet publish or otherwise store the benchmark results, but it's a start! --- .github/workflows/js-benchmarks.yml | 61 +++++++++++++++++++++++++++++ 1 file changed, 61 insertions(+) create mode 100644 .github/workflows/js-benchmarks.yml diff --git a/.github/workflows/js-benchmarks.yml b/.github/workflows/js-benchmarks.yml new file mode 100644 index 00000000000..0128067f506 --- /dev/null +++ b/.github/workflows/js-benchmarks.yml @@ -0,0 +1,61 @@ +name: 'Run the JS benchmarks' + +on: + workflow_run: + workflows: ['Package the js repl as a binary artifact'] + branches: [master] + types: + - completed + +jobs: + js-benchmarks: + runs-on: js-benchmarks-runner + if: ${{ github.repository == 'LadybirdBrowser/ladybird' && github.event.workflow_run.conclusion == 'success' }} + + concurrency: + group: js-benchmarks + + steps: + - name: 'Checkout LadybirdBrowser/js-benchmarks' + uses: actions/checkout@v4 + with: + repository: LadybirdBrowser/js-benchmarks + path: js-benchmarks + + - name: 'Install dependencies' + shell: bash + run: | + sudo apt-get update + sudo apt-get install -y clang++-19 python3-venv + sudo update-alternatives --install /usr/bin/clang clang /usr/bin/clang-19 100 + sudo update-alternatives --install /usr/bin/clang++ clang++ /usr/bin/clang++-19 100 + + - name: 'Download JS repl artifact' + id: download-artifact + uses: dawidd6/action-download-artifact@v9 + with: + workflow: js-artifacts.yml + name: ladybird-js-Linux-x86_64 + path: js-repl + + - name: 'Extract JS repl' + shell: bash + run: | + cd js-repl + tar -xvzf ladybird-js-Linux-x86_64.tar.gz + + - name: 'Run the JS benchmarks' + shell: bash + run: | + cd js-benchmarks + python3 -m venv .venv + source .venv/bin/activate + python3 -m pip install -r requirements.txt + ./run.py --executable=${{ github.workspace }}/js-repl/bin/js --iterations=5 + + - name: 'Save results as an artifact' + uses: actions/upload-artifact@v4 + with: + name: js-benchmarks-results + path: js-benchmarks/results.json + retention-days: 93