Skip to content

build(chrome): update the pinned browser version to 145.0.7568.0 #137

build(chrome): update the pinned browser version to 145.0.7568.0

build(chrome): update the pinned browser version to 145.0.7568.0 #137

Workflow file for this run

# This workflow will run measure performance in different environments.
name: Measure performance
# Declare default permissions as read only.
permissions: read-all
env:
DEBUG: 'bidi:server:*,bidi:mapper:*'
DEBUG_DEPTH: 10
FORCE_COLOR: 3
PIP_DISABLE_PIP_VERSION_CHECK: 1
on:
merge_group:
pull_request:
push:
branches: 'main'
workflow_dispatch:
inputs:
verbose:
description: Verbose logging
default: false
required: false
type: boolean
concurrency:
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
cancel-in-progress: true
jobs:
performance_metric:
name: ${{ matrix.kind }}-${{ matrix.os }}-${{ matrix.head }}
strategy:
fail-fast: false
matrix:
# TODO(#876): Add Windows CI.
os: [ubuntu-latest, macos-latest]
head: [headful, 'new-headless', 'old-headless']
# `cd` runs e2e via `chromedriver`. `node` runs tests using `NodeJS` runner.
kind: [cd, node]
runs-on: ${{ matrix.os }}
steps:
- name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Set up Node.js
uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0
with:
node-version-file: '.nvmrc'
cache: npm
- name: Disable AppArmor
if: ${{ matrix.os == 'ubuntu-latest' }}
# https://chromium.googlesource.com/chromium/src/+/main/docs/security/apparmor-userns-restrictions.md
run: echo 0 | sudo tee /proc/sys/kernel/apparmor_restrict_unprivileged_userns
- uses: google/wireit@f21db1f3a6a4db31f42787a958cf2a18308effed # setup-github-actions-caching/v2.0.3
- name: Install and build npm dependencies
run: npm ci
# Install chrome, chromedriver and headless shell is required to keep them cached.
- name: Install all chrome binaries if needed
uses: ./.github/actions/setup-chrome-binaries
- name: Set up Python
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
with:
python-version: '3.11'
cache: pipenv
- name: Install pipenv
run: pip install pipenv
- name: Install python dependencies
run: pipenv install
- name: Run E2E performance tests with xvfb-run
if: matrix.os == 'ubuntu-latest' && matrix.head == 'headful'
timeout-minutes: 20
run: >
xvfb-run --auto-servernum
npm run e2e:${{ matrix.head }}
--
-k test_performance_
-s
| tee tests_output.txt
env:
VERBOSE: ${{ github.event.inputs.verbose }}
CHROMEDRIVER: ${{ matrix.kind == 'cd' }}
- name: Run E2E performance tests
if: matrix.os != 'ubuntu-latest' || matrix.head != 'headful'
timeout-minutes: 20
run: >
npm run e2e:${{ matrix.head }}
--
-k test_performance_
-s
| tee tests_output.txt
env:
VERBOSE: ${{ github.event.inputs.verbose }}
CHROMEDRIVER: ${{ matrix.kind == 'cd' }}
- name: Extract and store performance metrics
id: extract_metrics
run: |
grep 'PERF_METRIC:' tests_output.txt | sed 's/^PERF_METRIC://' | sed "s/^/${{ matrix.os }}-${{ matrix.head }}-${{ matrix.kind }}:/" > performance_metrics.txt
echo "Extracted performance metrics:"
cat performance_metrics.txt
- name: Upload performance result as artifact
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
with:
name: performance-metrics-${{ matrix.os }}-${{ matrix.head }}-${{ matrix.kind }}
path: performance_metrics.txt
# Do not store these artifacts for long, as they will be present in the `all-performance-metrics`.
retention-days: 1
combine_metrics:
if: ${{ !cancelled() }}
name: Combine performance metrics
environment:
name: github-pages
url: ${{ steps.deployment.outputs.page_url }}
permissions:
deployments: write
# Add perf metrics to PR's comment
pull-requests: write
contents: write
runs-on: ubuntu-latest
needs: performance_metric
steps:
- name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Setup Pages
uses: actions/configure-pages@983d7736d9b0ae728b81ab479565c72886d7745b # v5.0.0
- name: Download all performance metrics artifacts
uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
with:
path: artifacts
- name: Combine metrics into a single file
run: |
find artifacts -name "performance_metrics.txt" -exec cat {} + > all_performance_metrics.txt
echo "Combined performance metrics:"
cat all_performance_metrics.txt
# Convert to JSON format suitable for github-action-benchmark
echo "[" > all_performance_metrics.json
first_metric=true
while IFS= read -r line; do
if [ "$first_metric" = false ]; then
echo "," >> all_performance_metrics.json
fi
key=${line%:*}
value=${line##*:}
echo " {\"name\": \"$key\", \"value\": $value, \"unit\": \"ms\"}" >> all_performance_metrics.json
first_metric=false
done < all_performance_metrics.txt
echo "]" >> all_performance_metrics.json
echo "Combined performance metrics in JSON format:"
cat all_performance_metrics.json
- name: Upload combined performance metrics
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
with:
name: all-performance-metrics
path: |
all_performance_metrics.txt
all_performance_metrics.json
- name: Store benchmark result
uses: benchmark-action/github-action-benchmark@v1
with:
tool: 'customSmallerIsBetter'
output-file-path: all_performance_metrics.json
gh-pages-branch: gh-pages
benchmark-data-dir-path: bench/
github-token: ${{ secrets.GITHUB_TOKEN }}
# Comment only when run in PR.
comment-always: ${{ github.ref != 'refs/heads/main' }}
# Push and deploy automatically on merge to main.
auto-push: ${{ github.ref == 'refs/heads/main' }}