devops: move to a new infra (#16845)
This commit is contained in:
Родитель
a07a4a25a2
Коммит
4f6a94b563
|
@ -1,31 +0,0 @@
|
|||
name: "Trigger: Chromium Builds"
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
ref:
|
||||
description: 'Playwright SHA / ref to build Chromium'
|
||||
required: true
|
||||
default: 'main'
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- release-*
|
||||
paths:
|
||||
- browser_patches/chromium/BUILD_NUMBER
|
||||
- .github/workflows/trigger_build_chromium.yml
|
||||
|
||||
jobs:
|
||||
trigger:
|
||||
name: "trigger"
|
||||
runs-on: ubuntu-20.04
|
||||
steps:
|
||||
- run: |
|
||||
curl -X POST \
|
||||
-H "Accept: application/vnd.github.v3+json" \
|
||||
-H "Authorization: token ${GH_TOKEN}" \
|
||||
--data "{\"event_type\": \"build_chromium\", \"client_payload\": {\"ref\": \"${GHREF}\"}}" \
|
||||
https://api.github.com/repos/microsoft/playwright-internal/dispatches
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.REPOSITORY_DISPATCH_PERSONAL_ACCESS_TOKEN }}
|
||||
GHREF: ${{ github.event_name == 'workflow_dispatch' && github.event.inputs.ref || github.sha }}
|
|
@ -1,31 +0,0 @@
|
|||
name: "Trigger: Chromium Tip Of Tree Builds"
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
ref:
|
||||
description: 'Playwright SHA / ref to build Chromium Tip Of Tree'
|
||||
required: true
|
||||
default: 'main'
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- release-*
|
||||
paths:
|
||||
- browser_patches/chromium-tip-of-tree/BUILD_NUMBER
|
||||
- .github/workflows/trigger_build_chromium_tip_of_tree.yml
|
||||
|
||||
jobs:
|
||||
trigger:
|
||||
name: "trigger"
|
||||
runs-on: ubuntu-20.04
|
||||
steps:
|
||||
- run: |
|
||||
curl -X POST \
|
||||
-H "Accept: application/vnd.github.v3+json" \
|
||||
-H "Authorization: token ${GH_TOKEN}" \
|
||||
--data "{\"event_type\": \"build_chromium_tip_of_tree\", \"client_payload\": {\"ref\": \"${GHREF}\"}}" \
|
||||
https://api.github.com/repos/microsoft/playwright-internal/dispatches
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.REPOSITORY_DISPATCH_PERSONAL_ACCESS_TOKEN }}
|
||||
GHREF: ${{ github.event_name == 'workflow_dispatch' && github.event.inputs.ref || github.sha }}
|
|
@ -1,33 +0,0 @@
|
|||
name: "Trigger: Chromium with Symbols Builds"
|
||||
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
ref:
|
||||
description: 'Playwright SHA / ref to build Chromium With Symbols'
|
||||
required: true
|
||||
default: 'main'
|
||||
release:
|
||||
types: [published]
|
||||
push:
|
||||
branches:
|
||||
- release-*
|
||||
paths:
|
||||
- browser_patches/chromium/BUILD_NUMBER
|
||||
- .github/workflows/trigger_build_chromium_with_symbols.yml
|
||||
|
||||
jobs:
|
||||
trigger:
|
||||
name: "trigger"
|
||||
runs-on: ubuntu-20.04
|
||||
steps:
|
||||
- run: |
|
||||
curl -X POST \
|
||||
-H "Accept: application/vnd.github.v3+json" \
|
||||
-H "Authorization: token ${GH_TOKEN}" \
|
||||
--data "{\"event_type\": \"build_chromium_with_symbols\", \"client_payload\": {\"ref\": \"${GHREF}\"}}" \
|
||||
https://api.github.com/repos/microsoft/playwright-internal/dispatches
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.REPOSITORY_DISPATCH_PERSONAL_ACCESS_TOKEN }}
|
||||
GHREF: ${{ github.event_name == 'workflow_dispatch' && github.event.inputs.ref || github.sha }}
|
|
@ -1,24 +0,0 @@
|
|||
name: "FFMPEG Builder"
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- release-*
|
||||
paths:
|
||||
- browser_patches/ffmpeg/BUILD_NUMBER
|
||||
- .github/workflows/trigger_build_ffmpeg.yml
|
||||
|
||||
jobs:
|
||||
trigger:
|
||||
name: "trigger"
|
||||
runs-on: ubuntu-20.04
|
||||
steps:
|
||||
- run: |
|
||||
curl -X POST \
|
||||
-H "Accept: application/vnd.github.v3+json" \
|
||||
-H "Authorization: token ${GH_TOKEN}" \
|
||||
--data '{"event_type": "build_ffmpeg"}' \
|
||||
https://api.github.com/repos/microsoft/playwright-internal/dispatches
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.REPOSITORY_DISPATCH_PERSONAL_ACCESS_TOKEN }}
|
|
@ -1,24 +0,0 @@
|
|||
name: "Firefox Builder"
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- release-*
|
||||
paths:
|
||||
- browser_patches/firefox/BUILD_NUMBER
|
||||
- .github/workflows/trigger_build_firefox.yml
|
||||
|
||||
jobs:
|
||||
trigger:
|
||||
name: "trigger"
|
||||
runs-on: ubuntu-20.04
|
||||
steps:
|
||||
- run: |
|
||||
curl -X POST \
|
||||
-H "Accept: application/vnd.github.v3+json" \
|
||||
-H "Authorization: token ${GH_TOKEN}" \
|
||||
--data '{"event_type": "build_firefox"}' \
|
||||
https://api.github.com/repos/microsoft/playwright-internal/dispatches
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.REPOSITORY_DISPATCH_PERSONAL_ACCESS_TOKEN }}
|
|
@ -1,24 +0,0 @@
|
|||
name: "Firefox Beta Builder"
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- release-*
|
||||
paths:
|
||||
- browser_patches/firefox-beta/BUILD_NUMBER
|
||||
- .github/workflows/trigger_build_firefox_beta.yml
|
||||
|
||||
jobs:
|
||||
trigger:
|
||||
name: "trigger"
|
||||
runs-on: ubuntu-20.04
|
||||
steps:
|
||||
- run: |
|
||||
curl -X POST \
|
||||
-H "Accept: application/vnd.github.v3+json" \
|
||||
-H "Authorization: token ${GH_TOKEN}" \
|
||||
--data '{"event_type": "build_firefox_beta"}' \
|
||||
https://api.github.com/repos/microsoft/playwright-internal/dispatches
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.REPOSITORY_DISPATCH_PERSONAL_ACCESS_TOKEN }}
|
|
@ -1,24 +0,0 @@
|
|||
name: "WebKit Builder"
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- release-*
|
||||
paths:
|
||||
- browser_patches/webkit/BUILD_NUMBER
|
||||
- .github/workflows/trigger_build_webkit.yml
|
||||
|
||||
jobs:
|
||||
trigger:
|
||||
name: "trigger"
|
||||
runs-on: ubuntu-20.04
|
||||
steps:
|
||||
- run: |
|
||||
curl -X POST \
|
||||
-H "Accept: application/vnd.github.v3+json" \
|
||||
-H "Authorization: token ${GH_TOKEN}" \
|
||||
--data '{"event_type": "build_webkit"}' \
|
||||
https://api.github.com/repos/microsoft/playwright-internal/dispatches
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.REPOSITORY_DISPATCH_PERSONAL_ACCESS_TOKEN }}
|
|
@ -1,24 +0,0 @@
|
|||
name: "WinLDD Builder"
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- release-*
|
||||
paths:
|
||||
- browser_patches/winldd/BUILD_NUMBER
|
||||
- .github/workflows/trigger_build_winldd.yml
|
||||
|
||||
jobs:
|
||||
trigger:
|
||||
name: "trigger"
|
||||
runs-on: ubuntu-20.04
|
||||
steps:
|
||||
- run: |
|
||||
curl -X POST \
|
||||
-H "Accept: application/vnd.github.v3+json" \
|
||||
-H "Authorization: token ${GH_TOKEN}" \
|
||||
--data '{"event_type": "build_winldd"}' \
|
||||
https://api.github.com/repos/microsoft/playwright-internal/dispatches
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.REPOSITORY_DISPATCH_PERSONAL_ACCESS_TOKEN }}
|
|
@ -1,194 +0,0 @@
|
|||
- [Contributing Browser Patches](#Contributing-browser-patches)
|
||||
* [1. Setting up local browser checkout](#1-setting-up-local-browser-checkout)
|
||||
* [2. Developing a new change](#2-developing-a-new-change)
|
||||
* [3. Exporting your change to playwright repo](#3-exporting-your-change-to-playwright-repo)
|
||||
* [4. Rolling Playwright to the new browser build](#4-rolling-playwright-to-the-new-browser-build)
|
||||
- [Cheatsheet](#cheatsheet)
|
||||
* [Firefox](#firefox)
|
||||
- [stack trace](#stack-trace)
|
||||
- [logging](#logging)
|
||||
* [WebKit](#webkit)
|
||||
- [Debugging Windows](#degugging-windows)
|
||||
- [Enable core dumps on Linux](#enable-core-dumps-on-linux)
|
||||
|
||||
# Contributing Browser Patches
|
||||
|
||||
Firefox and WebKit have additional patches atop to expose necessary capabilities.
|
||||
|
||||
Ideally, all these changes should be upstreamed.
|
||||
For the time being, it is possible to setup a browser checkout
|
||||
and develop from there.
|
||||
|
||||
[WebKit upstream status](webkit/upstream_status.md)
|
||||
|
||||
## 1. Setting up local browser checkout
|
||||
|
||||
From the `playwright` repo, run the following command:
|
||||
|
||||
```bash
|
||||
$ ./browser_patches/prepare_checkout.sh firefox
|
||||
```
|
||||
(you can optionally pass "webkit" for a webkit checkout)
|
||||
|
||||
This will create a firefox checkout at `$HOME/firefox`
|
||||
|
||||
> **NOTE:** this command downloads GBs of data.
|
||||
|
||||
|
||||
This command will:
|
||||
- create a `browser_upstream` remote in the checkout
|
||||
- create a `playwright-build` branch and apply all playwright-required patches to it.
|
||||
|
||||
## 2. Developing a new change
|
||||
|
||||
### Creating new branch
|
||||
|
||||
You want to create a new branch off the `playwright-build` branch.
|
||||
|
||||
Assuming that you're under `$HOME/firefox` checkout:
|
||||
|
||||
```bash
|
||||
$ git checkout -b my-new-feature playwright-build
|
||||
$ # develop my feature on the my-new-feature branch ....
|
||||
```
|
||||
|
||||
### Building
|
||||
|
||||
Each browser has corresponding build script. `--full` options normally takes care of also installing required build dependencies on Linux.
|
||||
|
||||
```bash
|
||||
./browser_patches/firefox/build.sh --full
|
||||
```
|
||||
|
||||
### Running tests with local browser build
|
||||
|
||||
Playwright test suite may run against local browser build without bundling it.
|
||||
```bash
|
||||
# Run webkit tests with local webkit build
|
||||
WKPATH=./browser_patches/webkit/pw_run.sh npm run wtest
|
||||
|
||||
# Run firefox tests with local firefox build on macos
|
||||
FFPATH=/tmp/repackaged-firefox/firefox/Nightly.app/Contents/MacOS/firefox npm run ftest
|
||||
|
||||
# Run chromium tests with local chromium build on linux
|
||||
CRPATH=~/chromium/src/out/Release/chrome npm run ctest
|
||||
```
|
||||
|
||||
### Flakiness dashboard
|
||||
|
||||
You can look at the [flakiness dashboard](http://flaky.aslushnikov.com/) to see recent history of any playwright test.
|
||||
|
||||
## 3. Exporting your change to playwright repo
|
||||
|
||||
Once you're happy with the work you did in the browser-land, you want to export it to the `playwright` repo.
|
||||
|
||||
Assuming that you're in the root of the `playwright` repo and that your browser checkout has your feature branch checked out:
|
||||
|
||||
```bash
|
||||
$ ./browser_patches/export.sh firefox
|
||||
```
|
||||
|
||||
This script will:
|
||||
- create a new patch and put it to the `./browser_patches/firefox/patches/`
|
||||
- update the `./browser_patches/firefox/UPSTREAM_CONFIG.sh` if necessary
|
||||
- bump the `./browser_patches/firefox/BUILD_NUMBER` number.
|
||||
|
||||
The script will assume Firefox checkout is located at `$HOME/firefox`
|
||||
|
||||
Send a PR to the Playwright repo to be reviewed.
|
||||
|
||||
## 4. Rolling Playwright to the new browser build
|
||||
|
||||
Once the patch has been committed, the build bots will kick in, compile and upload a new browser version to all the platforms. Then you can roll the browser:
|
||||
|
||||
```bash
|
||||
$ node utils/roll_browser.js chromium 123456
|
||||
```
|
||||
|
||||
# Cheatsheet
|
||||
|
||||
## See browser stdout/stderr
|
||||
|
||||
Set the `DEBUG=pw:browser` environment variable to see it.
|
||||
|
||||
## Firefox
|
||||
|
||||
### Debug build
|
||||
|
||||
When compiling set the `FF_DEBUG_BUILD=1` environment variable.
|
||||
|
||||
#### Stack trace
|
||||
|
||||
In `//mozglue/misc/StackWalk.cpp` add
|
||||
|
||||
```c++
|
||||
#define MOZ_DEMANGLE_SYMBOLS 1
|
||||
```
|
||||
|
||||
In native code use
|
||||
|
||||
```c++
|
||||
#include "mozilla/StackWalk.h"
|
||||
// ...
|
||||
MozWalkTheStack(stderr);
|
||||
```
|
||||
|
||||
If the stack trace is still mangled `cat` it to `tools/rb/fix_linux_stack.py`
|
||||
|
||||
#### Logging
|
||||
|
||||
Upstream documentation: https://firefox-source-docs.mozilla.org/xpcom/logging.html
|
||||
|
||||
```bash
|
||||
MOZ_LOG=nsHttp:5
|
||||
```
|
||||
|
||||
Module name is a string passed to the `mozilla::LazyLogModule` of the corresponding component, e.g.:
|
||||
|
||||
```c++
|
||||
LazyLogModule gHttpLog("nsHttp");
|
||||
```
|
||||
|
||||
Inside Juggler, you can use `dump('foo\n')`.
|
||||
|
||||
## WebKit
|
||||
|
||||
#### Logging
|
||||
|
||||
Inside Objective-C you can use [NSLog](https://developer.apple.com/documentation/foundation/1395275-nslog).
|
||||
|
||||
```
|
||||
NSLog(@"Foobar value: %@", value);
|
||||
```
|
||||
|
||||
#### Debugging windows
|
||||
|
||||
In `Source\WTF\wtf\win\DbgHelperWin.cpp` replace
|
||||
|
||||
```#if !defined(NDEBUG)``` with ```#if 1```
|
||||
|
||||
Then regular `WTFReportBacktrace()` works.
|
||||
|
||||
#### Debugging linux
|
||||
|
||||
`WTFReportBacktrace()` has been broken since [r283707](https://github.com/WebKit/WebKit/commit/de4ba48c8f229bc45042b543a514f6d88b551a64), see [this comment](https://bugs.webkit.org/show_bug.cgi?id=181916#c96). Revert that change locally to make backtraces work again. Otherwise addr2line -f can still be used to map addresses to function names.
|
||||
|
||||
#### Enable core dumps on Linux
|
||||
|
||||
```bash
|
||||
mkdir -p /tmp/coredumps
|
||||
sudo bash -c 'echo "/tmp/coredumps/core-pid_%p.dump" > /proc/sys/kernel/core_pattern'
|
||||
ulimit -c unlimited
|
||||
```
|
||||
|
||||
Then to read stack traces run the following command:
|
||||
```bash
|
||||
# To find out crashing process name
|
||||
file core-pid_29652.dump
|
||||
# Point gdb to the local binary of the crashed process and the core file
|
||||
gdb $HOME/.cache/ms-playwright/webkit-1292/minibrowser-gtk/WebKitWebProcess core-pid_29652
|
||||
# Inside gdb update .so library search path to the local one
|
||||
set solib-search-path /home/yurys/.cache/ms-playwright/webkit-1292/minibrowser-gtk
|
||||
# Finally print backtrace
|
||||
bt
|
||||
```
|
|
@ -1,39 +0,0 @@
|
|||
#!/bin/bash
|
||||
set -e
|
||||
set +x
|
||||
|
||||
trap "cd $(pwd -P)" EXIT
|
||||
cd "$(dirname "$0")"
|
||||
|
||||
|
||||
if [[ ($1 == '--help') || ($1 == '-h') ]]; then
|
||||
echo "usage: build.sh [firefox|webkit|firefox-beta]"
|
||||
echo
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if [[ $# == 0 ]]; then
|
||||
echo "missing browser: 'firefox' or 'webkit'"
|
||||
echo "try './build.sh --help' for more information"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
CMD="$1"
|
||||
shift
|
||||
if [[ ("$CMD" == "firefox") || ("$CMD" == "firefox/") || ("$CMD" == "ff") ]]; then
|
||||
bash ./firefox/build.sh "$@"
|
||||
elif [[ ("$CMD" == "firefox-beta") || ("$CMD" == "ff-beta") ]]; then
|
||||
bash ./firefox-beta/build.sh "$@"
|
||||
elif [[ ("$CMD" == "webkit") || ("$CMD" == "webkit/") || ("$CMD" == "wk") ]]; then
|
||||
bash ./webkit/build.sh "$@"
|
||||
elif [[ ("$CMD" == "chromium") || ("$CMD" == "chromium/") || ("$CMD" == "cr") ]]; then
|
||||
bash ./chromium/build.sh "$@"
|
||||
elif [[ ("$CMD" == "winldd") ]]; then
|
||||
bash ./winldd/build.sh "$@"
|
||||
elif [[ ("$CMD" == "ffmpeg") ]]; then
|
||||
bash ./ffmpeg/build.sh "$@"
|
||||
else
|
||||
echo ERROR: unknown browser to build - "$CMD"
|
||||
exit 1
|
||||
fi
|
||||
|
|
@ -1,548 +0,0 @@
|
|||
#!/bin/bash
|
||||
set -e
|
||||
set +x
|
||||
set -o pipefail
|
||||
|
||||
if [[ ($1 == '--help') || ($1 == '-h') ]]; then
|
||||
echo "usage: $(basename "$0") [firefox-linux|firefox-win64|webkit-gtk|webkit-wpe|webkit-gtk-wpe|webkit-win64|webkit-mac-10.15] [-f|--force]"
|
||||
echo
|
||||
echo "Prepares checkout under browser folder, applies patches, builds, archives, and uploads if build is missing."
|
||||
echo "Script will bail out early if the build for the browser version is already present."
|
||||
echo
|
||||
echo "Pass -f to upload anyway."
|
||||
echo
|
||||
echo "NOTE: This script is safe to run in a cronjob - it aquires a lock so that it does not run twice."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if [[ $# == 0 ]]; then
|
||||
echo "missing build flavor!"
|
||||
echo "try './$(basename "$0") --help' for more information"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
CURRENT_ARCH="$(uname -m)"
|
||||
CURRENT_HOST_OS="$(uname)"
|
||||
CURRENT_HOST_OS_VERSION=""
|
||||
if [[ "$CURRENT_HOST_OS" == "Darwin" ]]; then
|
||||
CURRENT_HOST_OS_VERSION=$(sw_vers -productVersion | grep -o '^\d\+.\d\+')
|
||||
elif [[ "$CURRENT_HOST_OS" == "Linux" ]]; then
|
||||
CURRENT_HOST_OS="$(bash -c 'source /etc/os-release && echo $NAME')"
|
||||
CURRENT_HOST_OS_VERSION="$(bash -c 'source /etc/os-release && echo $VERSION_ID')"
|
||||
fi
|
||||
|
||||
BROWSER_NAME=""
|
||||
BROWSER_DISPLAY_NAME=""
|
||||
EXTRA_BUILD_ARGS=""
|
||||
EXTRA_ARCHIVE_ARGS=""
|
||||
BUILD_FLAVOR="$1"
|
||||
BUILD_BLOB_NAME=""
|
||||
EXPECTED_HOST_OS=""
|
||||
EXPECTED_HOST_OS_VERSION=""
|
||||
EXPECTED_ARCH="x86_64"
|
||||
BUILDS_LIST="EXPECTED_BUILDS"
|
||||
|
||||
# ===========================
|
||||
# WINLDD COMPILATION
|
||||
# ===========================
|
||||
if [[ "$BUILD_FLAVOR" == "winldd-win64" ]]; then
|
||||
BROWSER_NAME="winldd"
|
||||
EXPECTED_HOST_OS="MINGW"
|
||||
BUILD_BLOB_NAME="winldd-win64.zip"
|
||||
|
||||
|
||||
# ===========================
|
||||
# FFMPEG COMPILATION
|
||||
# ===========================
|
||||
elif [[ "$BUILD_FLAVOR" == "ffmpeg-mac" ]]; then
|
||||
BROWSER_NAME="ffmpeg"
|
||||
EXTRA_BUILD_ARGS="--mac --full"
|
||||
EXPECTED_HOST_OS="Darwin"
|
||||
EXPECTED_HOST_OS_VERSION="11.6"
|
||||
BUILD_BLOB_NAME="ffmpeg-mac.zip"
|
||||
elif [[ "$BUILD_FLAVOR" == "ffmpeg-mac-arm64" ]]; then
|
||||
BROWSER_NAME="ffmpeg"
|
||||
EXTRA_BUILD_ARGS="--mac --full"
|
||||
EXPECTED_HOST_OS="Darwin"
|
||||
EXPECTED_HOST_OS_VERSION="11.6"
|
||||
EXPECTED_ARCH="arm64"
|
||||
BUILD_BLOB_NAME="ffmpeg-mac-arm64.zip"
|
||||
elif [[ "$BUILD_FLAVOR" == "ffmpeg-linux" ]]; then
|
||||
BROWSER_NAME="ffmpeg"
|
||||
EXTRA_BUILD_ARGS="--linux"
|
||||
EXPECTED_HOST_OS="Ubuntu"
|
||||
EXPECTED_HOST_OS_VERSION="20.04"
|
||||
BUILD_BLOB_NAME="ffmpeg-linux.zip"
|
||||
elif [[ "$BUILD_FLAVOR" == "ffmpeg-linux-arm64" ]]; then
|
||||
BROWSER_NAME="ffmpeg"
|
||||
EXTRA_BUILD_ARGS="--cross-compile-linux-arm64"
|
||||
EXPECTED_HOST_OS="Ubuntu"
|
||||
EXPECTED_HOST_OS_VERSION="20.04"
|
||||
BUILD_BLOB_NAME="ffmpeg-linux-arm64.zip"
|
||||
elif [[ "$BUILD_FLAVOR" == "ffmpeg-cross-compile-win64" ]]; then
|
||||
BROWSER_NAME="ffmpeg"
|
||||
EXTRA_BUILD_ARGS="--cross-compile-win64"
|
||||
EXPECTED_HOST_OS="Ubuntu"
|
||||
EXPECTED_HOST_OS_VERSION="20.04"
|
||||
BUILD_BLOB_NAME="ffmpeg-win64.zip"
|
||||
|
||||
# ===========================
|
||||
# CHROMIUM COMPILATION
|
||||
# ===========================
|
||||
elif [[ "$BUILD_FLAVOR" == "chromium-win64" ]]; then
|
||||
BROWSER_NAME="chromium"
|
||||
EXTRA_BUILD_ARGS="--full --goma"
|
||||
EXPECTED_HOST_OS="MINGW"
|
||||
BUILD_BLOB_NAME="chromium-win64.zip"
|
||||
elif [[ "$BUILD_FLAVOR" == "chromium-mac" ]]; then
|
||||
BROWSER_NAME="chromium"
|
||||
EXTRA_BUILD_ARGS="--full --goma"
|
||||
EXPECTED_HOST_OS="Darwin"
|
||||
EXPECTED_HOST_OS_VERSION="12.2"
|
||||
BUILD_BLOB_NAME="chromium-mac.zip"
|
||||
elif [[ "$BUILD_FLAVOR" == "chromium-mac-arm64" ]]; then
|
||||
BROWSER_NAME="chromium"
|
||||
EXTRA_BUILD_ARGS="--arm64 --full --goma"
|
||||
EXPECTED_HOST_OS="Darwin"
|
||||
EXPECTED_HOST_OS_VERSION="12.2"
|
||||
BUILD_BLOB_NAME="chromium-mac-arm64.zip"
|
||||
elif [[ "$BUILD_FLAVOR" == "chromium-linux" ]]; then
|
||||
BROWSER_NAME="chromium"
|
||||
EXTRA_BUILD_ARGS="--full --goma"
|
||||
EXPECTED_HOST_OS="Ubuntu"
|
||||
EXPECTED_HOST_OS_VERSION="18.04"
|
||||
BUILD_BLOB_NAME="chromium-linux.zip"
|
||||
elif [[ "$BUILD_FLAVOR" == "chromium-linux-arm64" ]]; then
|
||||
BROWSER_NAME="chromium"
|
||||
EXTRA_BUILD_ARGS="--arm64 --full --goma"
|
||||
EXPECTED_HOST_OS="Ubuntu"
|
||||
EXPECTED_HOST_OS_VERSION="20.04"
|
||||
BUILD_BLOB_NAME="chromium-linux-arm64.zip"
|
||||
|
||||
# ===========================
|
||||
# CHROMIUM-TIP-OF-TREE COMPILATION
|
||||
# ===========================
|
||||
elif [[ "$BUILD_FLAVOR" == "chromium-tip-of-tree-win64" ]]; then
|
||||
BROWSER_NAME="chromium-tip-of-tree"
|
||||
EXTRA_BUILD_ARGS="--full --goma"
|
||||
EXPECTED_HOST_OS="MINGW"
|
||||
BUILD_BLOB_NAME="chromium-tip-of-tree-win64.zip"
|
||||
elif [[ "$BUILD_FLAVOR" == "chromium-tip-of-tree-mac" ]]; then
|
||||
BROWSER_NAME="chromium-tip-of-tree"
|
||||
EXTRA_BUILD_ARGS="--full --goma"
|
||||
EXPECTED_HOST_OS="Darwin"
|
||||
EXPECTED_HOST_OS_VERSION="12.2"
|
||||
BUILD_BLOB_NAME="chromium-tip-of-tree-mac.zip"
|
||||
elif [[ "$BUILD_FLAVOR" == "chromium-tip-of-tree-mac-arm64" ]]; then
|
||||
BROWSER_NAME="chromium-tip-of-tree"
|
||||
EXTRA_BUILD_ARGS="--arm64 --full --goma"
|
||||
EXPECTED_HOST_OS="Darwin"
|
||||
EXPECTED_HOST_OS_VERSION="12.2"
|
||||
BUILD_BLOB_NAME="chromium-tip-of-tree-mac-arm64.zip"
|
||||
elif [[ "$BUILD_FLAVOR" == "chromium-tip-of-tree-linux" ]]; then
|
||||
BROWSER_NAME="chromium-tip-of-tree"
|
||||
EXTRA_BUILD_ARGS="--full --goma"
|
||||
EXPECTED_HOST_OS="Ubuntu"
|
||||
EXPECTED_HOST_OS_VERSION="18.04"
|
||||
BUILD_BLOB_NAME="chromium-tip-of-tree-linux.zip"
|
||||
elif [[ "$BUILD_FLAVOR" == "chromium-tip-of-tree-linux-arm64" ]]; then
|
||||
BROWSER_NAME="chromium-tip-of-tree"
|
||||
EXTRA_BUILD_ARGS="--arm64 --full --goma"
|
||||
EXPECTED_HOST_OS="Ubuntu"
|
||||
EXPECTED_HOST_OS_VERSION="20.04"
|
||||
BUILD_BLOB_NAME="chromium-tip-of-tree-linux-arm64.zip"
|
||||
|
||||
# ===========================
|
||||
# CHROMIUM-WITH-SYMBOLS COMPILATION
|
||||
# ===========================
|
||||
elif [[ "$BUILD_FLAVOR" == "chromium-with-symbols-win64" ]]; then
|
||||
BROWSER_NAME="chromium"
|
||||
BROWSER_DISPLAY_NAME="chromium-with-symbols"
|
||||
EXTRA_BUILD_ARGS="--symbols --full --goma"
|
||||
EXPECTED_HOST_OS="MINGW"
|
||||
BUILD_BLOB_NAME="chromium-with-symbols-win64.zip"
|
||||
BUILDS_LIST="EXPECTED_BUILDS_WITH_SYMBOLS"
|
||||
elif [[ "$BUILD_FLAVOR" == "chromium-with-symbols-mac" ]]; then
|
||||
BROWSER_NAME="chromium"
|
||||
BROWSER_DISPLAY_NAME="chromium-with-symbols"
|
||||
EXTRA_BUILD_ARGS="--symbols --full --goma"
|
||||
EXPECTED_HOST_OS="Darwin"
|
||||
EXPECTED_HOST_OS_VERSION="12.2"
|
||||
BUILD_BLOB_NAME="chromium-with-symbols-mac.zip"
|
||||
BUILDS_LIST="EXPECTED_BUILDS_WITH_SYMBOLS"
|
||||
elif [[ "$BUILD_FLAVOR" == "chromium-with-symbols-mac-arm64" ]]; then
|
||||
BROWSER_NAME="chromium"
|
||||
BROWSER_DISPLAY_NAME="chromium-with-symbols"
|
||||
EXTRA_BUILD_ARGS="--arm64 --symbols --full --goma"
|
||||
EXPECTED_HOST_OS="Darwin"
|
||||
EXPECTED_HOST_OS_VERSION="12.2"
|
||||
BUILD_BLOB_NAME="chromium-with-symbols-mac-arm64.zip"
|
||||
BUILDS_LIST="EXPECTED_BUILDS_WITH_SYMBOLS"
|
||||
elif [[ "$BUILD_FLAVOR" == "chromium-with-symbols-linux" ]]; then
|
||||
BROWSER_NAME="chromium"
|
||||
BROWSER_DISPLAY_NAME="chromium-with-symbols"
|
||||
EXTRA_BUILD_ARGS="--symbols --full --goma"
|
||||
EXPECTED_HOST_OS="Ubuntu"
|
||||
EXPECTED_HOST_OS_VERSION="18.04"
|
||||
BUILD_BLOB_NAME="chromium-with-symbols-linux.zip"
|
||||
BUILDS_LIST="EXPECTED_BUILDS_WITH_SYMBOLS"
|
||||
elif [[ "$BUILD_FLAVOR" == "chromium-with-symbols-linux-arm64" ]]; then
|
||||
BROWSER_NAME="chromium"
|
||||
BROWSER_DISPLAY_NAME="chromium-with-symbols-arm64"
|
||||
EXTRA_BUILD_ARGS="--arm64 --symbols --full --goma"
|
||||
EXPECTED_HOST_OS="Ubuntu"
|
||||
EXPECTED_HOST_OS_VERSION="20.04"
|
||||
BUILD_BLOB_NAME="chromium-with-symbols-linux-arm64.zip"
|
||||
BUILDS_LIST="EXPECTED_BUILDS_WITH_SYMBOLS"
|
||||
|
||||
# ===========================
|
||||
# FIREFOX COMPILATION
|
||||
# ===========================
|
||||
elif [[ "$BUILD_FLAVOR" == "firefox-ubuntu-18.04" ]]; then
|
||||
BROWSER_NAME="firefox"
|
||||
EXTRA_BUILD_ARGS="--full"
|
||||
EXPECTED_HOST_OS="Ubuntu"
|
||||
EXPECTED_HOST_OS_VERSION="18.04"
|
||||
BUILD_BLOB_NAME="firefox-ubuntu-18.04.zip"
|
||||
elif [[ "$BUILD_FLAVOR" == "firefox-ubuntu-20.04" ]]; then
|
||||
BROWSER_NAME="firefox"
|
||||
EXTRA_BUILD_ARGS="--full"
|
||||
EXPECTED_HOST_OS="Ubuntu"
|
||||
EXPECTED_HOST_OS_VERSION="20.04"
|
||||
BUILD_BLOB_NAME="firefox-ubuntu-20.04.zip"
|
||||
elif [[ "$BUILD_FLAVOR" == "firefox-ubuntu-20.04-arm64" ]]; then
|
||||
BROWSER_NAME="firefox"
|
||||
EXTRA_BUILD_ARGS="--full"
|
||||
EXPECTED_ARCH="aarch64"
|
||||
EXPECTED_HOST_OS="Ubuntu"
|
||||
EXPECTED_HOST_OS_VERSION="20.04"
|
||||
BUILD_BLOB_NAME="firefox-ubuntu-20.04-arm64.zip"
|
||||
elif [[ "$BUILD_FLAVOR" == "firefox-ubuntu-22.04" ]]; then
|
||||
BROWSER_NAME="firefox"
|
||||
EXTRA_BUILD_ARGS="--full"
|
||||
EXPECTED_HOST_OS="Ubuntu"
|
||||
EXPECTED_HOST_OS_VERSION="22.04"
|
||||
BUILD_BLOB_NAME="firefox-ubuntu-22.04.zip"
|
||||
elif [[ "$BUILD_FLAVOR" == "firefox-ubuntu-22.04-arm64" ]]; then
|
||||
BROWSER_NAME="firefox"
|
||||
EXTRA_BUILD_ARGS="--full"
|
||||
EXPECTED_ARCH="aarch64"
|
||||
EXPECTED_HOST_OS="Ubuntu"
|
||||
EXPECTED_HOST_OS_VERSION="22.04"
|
||||
BUILD_BLOB_NAME="firefox-ubuntu-22.04-arm64.zip"
|
||||
elif [[ "$BUILD_FLAVOR" == "firefox-debian-11" ]]; then
|
||||
BROWSER_NAME="firefox"
|
||||
EXTRA_BUILD_ARGS="--full"
|
||||
EXPECTED_HOST_OS="Debian"
|
||||
EXPECTED_HOST_OS_VERSION="11"
|
||||
BUILD_BLOB_NAME="firefox-debian-11.zip"
|
||||
elif [[ "$BUILD_FLAVOR" == "firefox-mac-11" ]]; then
|
||||
BROWSER_NAME="firefox"
|
||||
EXTRA_BUILD_ARGS="--full"
|
||||
EXPECTED_HOST_OS="Darwin"
|
||||
EXPECTED_HOST_OS_VERSION="11.6"
|
||||
EXPECTED_ARCH="x86_64"
|
||||
BUILD_BLOB_NAME="firefox-mac-11.zip"
|
||||
elif [[ "$BUILD_FLAVOR" == "firefox-mac-11-arm64" ]]; then
|
||||
BROWSER_NAME="firefox"
|
||||
EXTRA_BUILD_ARGS="--full"
|
||||
EXPECTED_HOST_OS="Darwin"
|
||||
EXPECTED_HOST_OS_VERSION="11.6"
|
||||
EXPECTED_ARCH="arm64"
|
||||
BUILD_BLOB_NAME="firefox-mac-11-arm64.zip"
|
||||
elif [[ "$BUILD_FLAVOR" == "firefox-win64" ]]; then
|
||||
BROWSER_NAME="firefox"
|
||||
EXTRA_BUILD_ARGS="--full"
|
||||
EXPECTED_HOST_OS="MINGW"
|
||||
BUILD_BLOB_NAME="firefox-win64.zip"
|
||||
# This is the architecture that is set by mozilla-build bash.
|
||||
EXPECTED_ARCH="i686"
|
||||
|
||||
|
||||
# ===============================
|
||||
# FIREFOX-BETA COMPILATION
|
||||
# ===============================
|
||||
elif [[ "$BUILD_FLAVOR" == "firefox-beta-ubuntu-18.04" ]]; then
|
||||
BROWSER_NAME="firefox-beta"
|
||||
EXTRA_BUILD_ARGS="--full"
|
||||
EXPECTED_HOST_OS="Ubuntu"
|
||||
EXPECTED_HOST_OS_VERSION="18.04"
|
||||
BUILD_BLOB_NAME="firefox-beta-ubuntu-18.04.zip"
|
||||
elif [[ "$BUILD_FLAVOR" == "firefox-beta-ubuntu-20.04" ]]; then
|
||||
BROWSER_NAME="firefox-beta"
|
||||
EXTRA_BUILD_ARGS="--full"
|
||||
EXPECTED_HOST_OS="Ubuntu"
|
||||
EXPECTED_HOST_OS_VERSION="20.04"
|
||||
BUILD_BLOB_NAME="firefox-beta-ubuntu-20.04.zip"
|
||||
elif [[ "$BUILD_FLAVOR" == "firefox-beta-ubuntu-20.04-arm64" ]]; then
|
||||
BROWSER_NAME="firefox-beta"
|
||||
EXTRA_BUILD_ARGS="--full"
|
||||
EXPECTED_ARCH="aarch64"
|
||||
EXPECTED_HOST_OS="Ubuntu"
|
||||
EXPECTED_HOST_OS_VERSION="20.04"
|
||||
BUILD_BLOB_NAME="firefox-beta-ubuntu-20.04-arm64.zip"
|
||||
elif [[ "$BUILD_FLAVOR" == "firefox-beta-ubuntu-22.04" ]]; then
|
||||
BROWSER_NAME="firefox-beta"
|
||||
EXTRA_BUILD_ARGS="--full"
|
||||
EXPECTED_HOST_OS="Ubuntu"
|
||||
EXPECTED_HOST_OS_VERSION="22.04"
|
||||
BUILD_BLOB_NAME="firefox-beta-ubuntu-22.04.zip"
|
||||
elif [[ "$BUILD_FLAVOR" == "firefox-beta-ubuntu-22.04-arm64" ]]; then
|
||||
BROWSER_NAME="firefox-beta"
|
||||
EXTRA_BUILD_ARGS="--full"
|
||||
EXPECTED_ARCH="aarch64"
|
||||
EXPECTED_HOST_OS="Ubuntu"
|
||||
EXPECTED_HOST_OS_VERSION="22.04"
|
||||
BUILD_BLOB_NAME="firefox-beta-ubuntu-22.04-arm64.zip"
|
||||
elif [[ "$BUILD_FLAVOR" == "firefox-beta-debian-11" ]]; then
|
||||
BROWSER_NAME="firefox-beta"
|
||||
EXTRA_BUILD_ARGS="--full"
|
||||
EXPECTED_HOST_OS="Debian"
|
||||
EXPECTED_HOST_OS_VERSION="11"
|
||||
BUILD_BLOB_NAME="firefox-beta-debian-11.zip"
|
||||
elif [[ "$BUILD_FLAVOR" == "firefox-beta-mac-11" ]]; then
|
||||
BROWSER_NAME="firefox-beta"
|
||||
EXTRA_BUILD_ARGS="--full"
|
||||
EXPECTED_HOST_OS="Darwin"
|
||||
EXPECTED_HOST_OS_VERSION="11.6"
|
||||
EXPECTED_ARCH="x86_64"
|
||||
BUILD_BLOB_NAME="firefox-beta-mac-11.zip"
|
||||
elif [[ "$BUILD_FLAVOR" == "firefox-beta-mac-11-arm64" ]]; then
|
||||
BROWSER_NAME="firefox-beta"
|
||||
EXTRA_BUILD_ARGS="--full"
|
||||
EXPECTED_HOST_OS="Darwin"
|
||||
EXPECTED_HOST_OS_VERSION="11.6"
|
||||
EXPECTED_ARCH="arm64"
|
||||
BUILD_BLOB_NAME="firefox-beta-mac-11-arm64.zip"
|
||||
elif [[ "$BUILD_FLAVOR" == "firefox-beta-win64" ]]; then
|
||||
BROWSER_NAME="firefox-beta"
|
||||
EXTRA_BUILD_ARGS="--full"
|
||||
EXPECTED_HOST_OS="MINGW"
|
||||
BUILD_BLOB_NAME="firefox-beta-win64.zip"
|
||||
# This is the architecture that is set by mozilla-build bash.
|
||||
EXPECTED_ARCH="i686"
|
||||
|
||||
# ===========================
|
||||
# WEBKIT COMPILATION
|
||||
# ===========================
|
||||
elif [[ "$BUILD_FLAVOR" == "webkit-debian-11" ]]; then
|
||||
BROWSER_NAME="webkit"
|
||||
EXTRA_BUILD_ARGS="--full"
|
||||
EXPECTED_HOST_OS="Debian"
|
||||
EXPECTED_HOST_OS_VERSION="11"
|
||||
BUILD_BLOB_NAME="webkit-debian-11.zip"
|
||||
elif [[ "$BUILD_FLAVOR" == "webkit-ubuntu-18.04" ]]; then
|
||||
BROWSER_NAME="webkit"
|
||||
EXTRA_BUILD_ARGS="--full"
|
||||
EXPECTED_HOST_OS="Ubuntu"
|
||||
EXPECTED_HOST_OS_VERSION="18.04"
|
||||
BUILD_BLOB_NAME="webkit-ubuntu-18.04.zip"
|
||||
elif [[ "$BUILD_FLAVOR" == "webkit-ubuntu-20.04" ]]; then
|
||||
BROWSER_NAME="webkit"
|
||||
EXTRA_BUILD_ARGS="--full"
|
||||
EXPECTED_HOST_OS="Ubuntu"
|
||||
EXPECTED_HOST_OS_VERSION="20.04"
|
||||
BUILD_BLOB_NAME="webkit-ubuntu-20.04.zip"
|
||||
elif [[ "$BUILD_FLAVOR" == "webkit-ubuntu-20.04-arm64" ]]; then
|
||||
BROWSER_NAME="webkit"
|
||||
EXTRA_BUILD_ARGS="--full"
|
||||
EXPECTED_HOST_OS="Ubuntu"
|
||||
EXPECTED_HOST_OS_VERSION="20.04"
|
||||
EXPECTED_ARCH="aarch64"
|
||||
BUILD_BLOB_NAME="webkit-ubuntu-20.04-arm64.zip"
|
||||
elif [[ "$BUILD_FLAVOR" == "webkit-ubuntu-22.04" ]]; then
|
||||
BROWSER_NAME="webkit"
|
||||
EXTRA_BUILD_ARGS="--full"
|
||||
EXPECTED_HOST_OS="Ubuntu"
|
||||
EXPECTED_HOST_OS_VERSION="22.04"
|
||||
BUILD_BLOB_NAME="webkit-ubuntu-22.04.zip"
|
||||
elif [[ "$BUILD_FLAVOR" == "webkit-ubuntu-22.04-arm64" ]]; then
|
||||
BROWSER_NAME="webkit"
|
||||
EXTRA_BUILD_ARGS="--full"
|
||||
EXPECTED_HOST_OS="Ubuntu"
|
||||
EXPECTED_HOST_OS_VERSION="22.04"
|
||||
EXPECTED_ARCH="aarch64"
|
||||
BUILD_BLOB_NAME="webkit-ubuntu-22.04-arm64.zip"
|
||||
elif [[ "$BUILD_FLAVOR" == "webkit-win64" ]]; then
|
||||
BROWSER_NAME="webkit"
|
||||
EXPECTED_HOST_OS="MINGW"
|
||||
BUILD_BLOB_NAME="webkit-win64.zip"
|
||||
elif [[ "$BUILD_FLAVOR" == "webkit-mac-10.15" ]]; then
|
||||
BROWSER_NAME="webkit"
|
||||
EXPECTED_HOST_OS="Darwin"
|
||||
EXPECTED_HOST_OS_VERSION="10.15"
|
||||
BUILD_BLOB_NAME="webkit-mac-10.15.zip"
|
||||
elif [[ "$BUILD_FLAVOR" == "webkit-mac-12" ]]; then
|
||||
BROWSER_NAME="webkit"
|
||||
EXPECTED_HOST_OS="Darwin"
|
||||
EXPECTED_HOST_OS_VERSION="12.2"
|
||||
BUILD_BLOB_NAME="webkit-mac-12.zip"
|
||||
elif [[ "$BUILD_FLAVOR" == "webkit-mac-12-arm64" ]]; then
|
||||
BROWSER_NAME="webkit"
|
||||
EXPECTED_HOST_OS="Darwin"
|
||||
EXPECTED_HOST_OS_VERSION="12.2"
|
||||
EXPECTED_ARCH="arm64"
|
||||
BUILD_BLOB_NAME="webkit-mac-12-arm64.zip"
|
||||
elif [[ "$BUILD_FLAVOR" == "webkit-mac-11" ]]; then
|
||||
BROWSER_NAME="webkit"
|
||||
EXPECTED_HOST_OS="Darwin"
|
||||
EXPECTED_HOST_OS_VERSION="11.6"
|
||||
BUILD_BLOB_NAME="webkit-mac-11.zip"
|
||||
elif [[ "$BUILD_FLAVOR" == "webkit-mac-11-arm64" ]]; then
|
||||
BROWSER_NAME="webkit"
|
||||
EXPECTED_HOST_OS="Darwin"
|
||||
EXPECTED_HOST_OS_VERSION="11.6"
|
||||
EXPECTED_ARCH="arm64"
|
||||
BUILD_BLOB_NAME="webkit-mac-11-arm64.zip"
|
||||
|
||||
|
||||
# ===========================
|
||||
# Unknown input
|
||||
# ===========================
|
||||
else
|
||||
echo ERROR: unknown build flavor - "$BUILD_FLAVOR"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ -z "$BROWSER_DISPLAY_NAME" ]]; then
|
||||
BROWSER_DISPLAY_NAME="${BROWSER_NAME}"
|
||||
fi
|
||||
|
||||
if [[ "$CURRENT_ARCH" != "$EXPECTED_ARCH" ]]; then
|
||||
echo "ERROR: cannot build $BUILD_FLAVOR"
|
||||
echo " -- expected arch: $EXPECTED_ARCH"
|
||||
echo " -- current arch: $CURRENT_ARCH"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ "$CURRENT_HOST_OS" != $EXPECTED_HOST_OS* ]]; then
|
||||
echo "ERROR: cannot build $BUILD_FLAVOR"
|
||||
echo " -- expected OS: $EXPECTED_HOST_OS"
|
||||
echo " -- current OS: $CURRENT_HOST_OS"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ "$CURRENT_HOST_OS_VERSION" != "$EXPECTED_HOST_OS_VERSION" ]]; then
|
||||
echo "ERROR: cannot build $BUILD_FLAVOR"
|
||||
echo " -- expected OS Version: $EXPECTED_HOST_OS_VERSION"
|
||||
echo " -- current OS Version: $CURRENT_HOST_OS_VERSION"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ $(uname) == MINGW* || "$(uname)" == MSYS* ]]; then
|
||||
ZIP_PATH="$PWD/archive-$BROWSER_NAME.zip"
|
||||
LOG_PATH="$PWD/log-$BROWSER_NAME.zip"
|
||||
else
|
||||
ZIP_PATH="/tmp/archive-$BROWSER_NAME.zip"
|
||||
LOG_PATH="/tmp/log-$BROWSER_NAME.zip"
|
||||
fi
|
||||
|
||||
if [[ -f "$ZIP_PATH" ]]; then
|
||||
echo "Archive $ZIP_PATH already exists - remove and re-run the script."
|
||||
exit 1
|
||||
fi
|
||||
trap "rm -rf ${ZIP_PATH}; rm -rf ${LOG_PATH}; cd $(pwd -P);" INT TERM EXIT
|
||||
cd "$(dirname "$0")"
|
||||
BUILD_NUMBER=$(head -1 ./$BROWSER_NAME/BUILD_NUMBER)
|
||||
BUILD_BLOB_PATH="${BROWSER_NAME}/${BUILD_NUMBER}/${BUILD_BLOB_NAME}"
|
||||
LOG_BLOB_NAME="${BUILD_BLOB_NAME%.zip}.log.gz"
|
||||
LOG_BLOB_PATH="${BROWSER_NAME}/${BUILD_NUMBER}/${LOG_BLOB_NAME}"
|
||||
|
||||
# pull from upstream and check if a new build has to be uploaded.
|
||||
if ! [[ ($2 == '-f') || ($2 == '--force') ]]; then
|
||||
if ./upload.sh "${BUILD_BLOB_PATH}" --check; then
|
||||
echo "Build is already uploaded - no changes."
|
||||
exit 0
|
||||
fi
|
||||
else
|
||||
echo "Force-rebuilding the build."
|
||||
fi
|
||||
|
||||
function generate_and_upload_browser_build {
|
||||
echo "-- preparing checkout"
|
||||
if ! ./prepare_checkout.sh $BROWSER_NAME; then
|
||||
return 20
|
||||
fi
|
||||
|
||||
echo "-- cleaning"
|
||||
if ! ./$BROWSER_NAME/clean.sh; then
|
||||
return 21
|
||||
fi
|
||||
|
||||
echo "-- building"
|
||||
if ! ./$BROWSER_NAME/build.sh $EXTRA_BUILD_ARGS; then
|
||||
return 22
|
||||
fi
|
||||
|
||||
echo "-- archiving to $ZIP_PATH"
|
||||
if ! ./$BROWSER_NAME/archive.sh "$ZIP_PATH" $EXTRA_ARCHIVE_ARGS; then
|
||||
return 23
|
||||
fi
|
||||
|
||||
echo "-- uploading"
|
||||
if ! ./upload.sh "$BUILD_BLOB_PATH" "$ZIP_PATH"; then
|
||||
return 24
|
||||
fi
|
||||
return 0
|
||||
}
|
||||
|
||||
function create_roll_into_playwright_pr {
|
||||
curl -X POST \
|
||||
-H "Accept: application/vnd.github.v3+json" \
|
||||
-H "Authorization: token ${GH_TOKEN}" \
|
||||
--data '{"event_type": "roll_into_pw", "client_payload": {"browser": "'"$1"'", "revision": "'"$2"'"}}' \
|
||||
https://api.github.com/repos/microsoft/playwright/dispatches
|
||||
}
|
||||
|
||||
BUILD_ALIAS="$BUILD_FLAVOR r$BUILD_NUMBER"
|
||||
node send_telegram_message.js "$BUILD_ALIAS -- started"
|
||||
|
||||
if generate_and_upload_browser_build 2>&1 | ./sanitize_and_compress_log.js $LOG_PATH; then
|
||||
# Report successful build. Note: MINGW might not have `du` command.
|
||||
UPLOAD_SIZE=""
|
||||
if command -v du >/dev/null && command -v awk >/dev/null; then
|
||||
UPLOAD_SIZE="$(du -h "$ZIP_PATH" | awk '{print $1}') "
|
||||
fi
|
||||
node send_telegram_message.js "$BUILD_ALIAS -- ${UPLOAD_SIZE}uploaded"
|
||||
|
||||
# Check if we uploaded the last build.
|
||||
(
|
||||
for i in $(cat "${BROWSER_NAME}/${BUILDS_LIST}"); do
|
||||
URL="https://playwright2.blob.core.windows.net/builds/${BROWSER_NAME}/${BUILD_NUMBER}/$i"
|
||||
if ! [[ $(curl -s -L -I "$URL" | head -1 | cut -f2 -d' ') == 200 ]]; then
|
||||
# Exit subshell
|
||||
echo "Missing build at ${URL}"
|
||||
exit
|
||||
fi
|
||||
done;
|
||||
LAST_COMMIT_MESSAGE=$(git log --format=%s -n 1 HEAD -- "./${BROWSER_NAME}/BUILD_NUMBER")
|
||||
node send_telegram_message.js "<b>${BROWSER_DISPLAY_NAME} r${BUILD_NUMBER} COMPLETE! ✅</b> ${LAST_COMMIT_MESSAGE}"
|
||||
if [[ "${BROWSER_DISPLAY_NAME}" != "chromium-with-symbols" ]]; then
|
||||
create_roll_into_playwright_pr $BROWSER_NAME $BUILD_NUMBER
|
||||
fi
|
||||
)
|
||||
else
|
||||
RESULT_CODE="$?"
|
||||
if (( RESULT_CODE == 10 )); then
|
||||
FAILED_STEP="./download_gtk_and_wpe_and_zip_together.sh"
|
||||
elif (( RESULT_CODE == 11 )); then
|
||||
FAILED_STEP="./upload.sh"
|
||||
elif (( RESULT_CODE == 20 )); then
|
||||
FAILED_STEP="./prepare_checkout.sh"
|
||||
elif (( RESULT_CODE == 21 )); then
|
||||
FAILED_STEP="./clean.sh"
|
||||
elif (( RESULT_CODE == 22 )); then
|
||||
FAILED_STEP="./build.sh"
|
||||
elif (( RESULT_CODE == 23 )); then
|
||||
FAILED_STEP="./archive.sh"
|
||||
elif (( RESULT_CODE == 24 )); then
|
||||
FAILED_STEP="./upload.sh"
|
||||
else
|
||||
FAILED_STEP="<unknown step>"
|
||||
fi
|
||||
# Upload logs only in case of failure and report failure.
|
||||
./upload.sh "${LOG_BLOB_PATH}" ${LOG_PATH} || true
|
||||
node send_telegram_message.js "$BUILD_ALIAS -- ${FAILED_STEP} failed! ❌ <a href='https://playwright.azureedge.net/builds/${LOG_BLOB_PATH}'>${LOG_BLOB_NAME}</a> -- <a href='$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID'>GitHub Action Logs</a>"
|
||||
exit 1
|
||||
fi
|
||||
|
|
@ -1 +0,0 @@
|
|||
1037
|
|
@ -1,5 +0,0 @@
|
|||
chromium-tip-of-tree-mac.zip
|
||||
chromium-tip-of-tree-mac-arm64.zip
|
||||
chromium-tip-of-tree-linux.zip
|
||||
chromium-tip-of-tree-linux-arm64.zip
|
||||
chromium-tip-of-tree-win64.zip
|
|
@ -1,3 +0,0 @@
|
|||
# CURRENT_VERSION: 107.0.5260.0
|
||||
# BRANCH_BASE_POSITION: 1039004
|
||||
BRANCH_COMMIT="36307074a546485e40ad2295e546e3a681596689"
|
|
@ -1,9 +0,0 @@
|
|||
#!/bin/bash
|
||||
set -e
|
||||
set +x
|
||||
|
||||
trap "cd $(pwd -P)" EXIT
|
||||
cd "$(dirname "$0")"
|
||||
SCRIPT_FOLDER=$(pwd -P)
|
||||
|
||||
bash "../chromium/archive.sh" "$@"
|
|
@ -1,9 +0,0 @@
|
|||
#!/bin/bash
|
||||
set -e
|
||||
set +x
|
||||
|
||||
trap "cd $(pwd -P)" EXIT
|
||||
cd "$(dirname "$0")"
|
||||
SCRIPT_FOLDER=$(pwd -P)
|
||||
|
||||
bash "../chromium/build.sh" "$@"
|
|
@ -1,9 +0,0 @@
|
|||
#!/bin/bash
|
||||
set -e
|
||||
set +x
|
||||
|
||||
trap "cd $(pwd -P)" EXIT
|
||||
cd "$(dirname "$0")"
|
||||
SCRIPT_FOLDER=$(pwd -P)
|
||||
|
||||
bash "../chromium/clean.sh" "$@"
|
|
@ -1,32 +0,0 @@
|
|||
#!/bin/bash
|
||||
set -e
|
||||
set +x
|
||||
|
||||
trap "cd $(pwd -P)" EXIT
|
||||
cd "$(dirname "$0")"
|
||||
SCRIPT_FOLDER=$(pwd -P)
|
||||
|
||||
# 1. get current version
|
||||
CURRENT_BETA_VERSION=$(curl https://omahaproxy.appspot.com/all | grep "win64,canary," | cut -d ',' -f 3)
|
||||
VERSION_INFO_JSON=$(curl "https://omahaproxy.appspot.com/deps.json?version=$CURRENT_BETA_VERSION")
|
||||
|
||||
NODE_SCRIPT=$(cat <<EOF
|
||||
const json = JSON.parse(fs.readFileSync(0));
|
||||
console.log([
|
||||
'# CURRENT_VERSION: ' + json.chromium_version,
|
||||
'# BRANCH_BASE_POSITION: ' + json.chromium_base_position,
|
||||
'BRANCH_COMMIT="' + json.chromium_base_commit + '"',
|
||||
].join('\n'));
|
||||
EOF
|
||||
)
|
||||
NEW_CONFIG=$(echo "${VERSION_INFO_JSON}" | node -e "${NODE_SCRIPT}")
|
||||
CURRENT_CONFIG=$(cat "${SCRIPT_FOLDER}/UPSTREAM_CONFIG.sh")
|
||||
|
||||
if [[ "${CURRENT_CONFIG}" == "${NEW_CONFIG}" ]]; then
|
||||
echo "No changes!"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
echo "${NEW_CONFIG}" > "${SCRIPT_FOLDER}/UPSTREAM_CONFIG.sh"
|
||||
BUILD_NUMBER=$(cat "${SCRIPT_FOLDER}/BUILD_NUMBER")
|
||||
echo $(( $BUILD_NUMBER + 1 )) > "${SCRIPT_FOLDER}/BUILD_NUMBER"
|
|
@ -1,3 +0,0 @@
|
|||
/output
|
||||
/depot_tools
|
||||
/electron-build-tools
|
|
@ -1 +0,0 @@
|
|||
1022
|
|
@ -1,5 +0,0 @@
|
|||
chromium-mac.zip
|
||||
chromium-mac-arm64.zip
|
||||
chromium-linux.zip
|
||||
chromium-linux-arm64.zip
|
||||
chromium-win64.zip
|
|
@ -1,5 +0,0 @@
|
|||
chromium-with-symbols-mac.zip
|
||||
chromium-with-symbols-mac-arm64.zip
|
||||
chromium-with-symbols-linux.zip
|
||||
chromium-with-symbols-linux-arm64.zip
|
||||
chromium-with-symbols-win64.zip
|
|
@ -1,3 +0,0 @@
|
|||
# CURRENT_VERSION: 105.0.5195.52
|
||||
# BRANCH_BASE_POSITION: 1027018
|
||||
BRANCH_COMMIT="f6b3c2f7e67f9f47e936acbcb765fa557b670851"
|
|
@ -1,92 +0,0 @@
|
|||
#!/bin/bash
|
||||
set -e
|
||||
set +x
|
||||
|
||||
trap "cd $(pwd -P)" EXIT
|
||||
cd "$(dirname "$0")"
|
||||
SCRIPT_PATH=$(pwd -P)
|
||||
source "${SCRIPT_PATH}/../utils.sh"
|
||||
|
||||
if [[ ("$1" == "-h") || ("$1" == "--help") ]]; then
|
||||
echo "usage: $(basename "$0") [output-absolute-path]"
|
||||
echo
|
||||
echo "Generate distributable .zip archive from ./output folder that was previously downloaded."
|
||||
echo
|
||||
exit 0
|
||||
fi
|
||||
|
||||
ZIP_PATH=$1
|
||||
|
||||
if [[ $ZIP_PATH != /* ]]; then
|
||||
echo "ERROR: path $ZIP_PATH is not absolute"
|
||||
exit 1
|
||||
fi
|
||||
if [[ $ZIP_PATH != *.zip ]]; then
|
||||
echo "ERROR: path $ZIP_PATH must have .zip extension"
|
||||
exit 1
|
||||
fi
|
||||
if [[ -f $ZIP_PATH ]]; then
|
||||
echo "ERROR: path $ZIP_PATH exists; can't do anything."
|
||||
exit 1
|
||||
fi
|
||||
if ! [[ -d $(dirname "$ZIP_PATH") ]]; then
|
||||
echo "ERROR: folder for path $($ZIP_PATH) does not exist."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ -z "${CR_CHECKOUT_PATH}" ]]; then
|
||||
CR_CHECKOUT_PATH="$HOME/chromium"
|
||||
fi
|
||||
if [[ ! -d "${CR_CHECKOUT_PATH}/src" ]]; then
|
||||
echo "ERROR: CR_CHECKOUT_PATH does not have src/ subfolder; is this a chromium checkout?"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
CHROMIUM_FOLDER_NAME=""
|
||||
CHROMIUM_FILES_TO_ARCHIVE=()
|
||||
|
||||
if is_mac; then
|
||||
CHROMIUM_FOLDER_NAME="chrome-mac"
|
||||
IFS=$'\n' CHROMIUM_FILES_TO_ARCHIVE=($(node "${SCRIPT_PATH}/compute_files_to_archive.js" "${CR_CHECKOUT_PATH}/src/infra/archive_config/mac-archive-rel.json"))
|
||||
unset IFS
|
||||
elif is_linux; then
|
||||
CHROMIUM_FOLDER_NAME="chrome-linux"
|
||||
IFS=$'\n' CHROMIUM_FILES_TO_ARCHIVE=($(node "${SCRIPT_PATH}/compute_files_to_archive.js" "${CR_CHECKOUT_PATH}/src/infra/archive_config/linux-archive-rel.json"))
|
||||
unset IFS
|
||||
elif is_win; then
|
||||
CHROMIUM_FOLDER_NAME="chrome-win"
|
||||
IFS=$'\n\r' CHROMIUM_FILES_TO_ARCHIVE=($(node "${SCRIPT_PATH}/compute_files_to_archive.js" "${CR_CHECKOUT_PATH}/src/infra/archive_config/win-archive-rel.json"))
|
||||
unset IFS
|
||||
else
|
||||
echo "ERROR: unsupported platform - $(uname)"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Prepare resulting archive.
|
||||
cd "$SCRIPT_PATH"
|
||||
rm -rf output
|
||||
mkdir -p "output/${CHROMIUM_FOLDER_NAME}"
|
||||
|
||||
# On Mac, use 'ditto' to copy directories instead of 'cp'.
|
||||
COPY_COMMAND="cp -R"
|
||||
if is_mac; then
|
||||
COPY_COMMAND="ditto"
|
||||
fi
|
||||
|
||||
for ((i = 0; i < ${#CHROMIUM_FILES_TO_ARCHIVE[@]}; i++)) do
|
||||
file="${CHROMIUM_FILES_TO_ARCHIVE[$i]}"
|
||||
mkdir -p "output/${CHROMIUM_FOLDER_NAME}/$(dirname "${file}")"
|
||||
$COPY_COMMAND "${CR_CHECKOUT_PATH}/src/out/Default/${file}" "output/${CHROMIUM_FOLDER_NAME}/${file}"
|
||||
done
|
||||
|
||||
if is_win; then
|
||||
$COPY_COMMAND "${CR_CHECKOUT_PATH}/src/out/Default/"*.manifest "output/${CHROMIUM_FOLDER_NAME}/"
|
||||
mkdir -p "output/${CHROMIUM_FOLDER_NAME}/locales"
|
||||
$COPY_COMMAND "${CR_CHECKOUT_PATH}/src/out/Default/locales/"*.pak "output/${CHROMIUM_FOLDER_NAME}/locales/"
|
||||
fi
|
||||
|
||||
cd output
|
||||
zip --symlinks -r build.zip "${CHROMIUM_FOLDER_NAME}"
|
||||
|
||||
cd "${SCRIPT_PATH}"
|
||||
cp output/build.zip "$ZIP_PATH"
|
|
@ -1,130 +0,0 @@
|
|||
#!/bin/bash
|
||||
set -e
|
||||
set +x
|
||||
|
||||
trap "cd $(pwd -P)" EXIT
|
||||
cd "$(dirname "$0")"
|
||||
SCRIPT_FOLDER=$(pwd -P)
|
||||
source "${SCRIPT_FOLDER}/../utils.sh"
|
||||
|
||||
USAGE=$(cat<<EOF
|
||||
usage: $(basename "$0") [--arm64] [--symbols] [--full] [--goma] <custom targets to compile>
|
||||
|
||||
--arm64 cross-compile for arm64
|
||||
--symbols compile with symbols
|
||||
--full install build dependencies
|
||||
--goma use goma when compiling. Make sure to pre-start goma client beforehand with './goma.sh start'.
|
||||
|
||||
On Linux & MacOS, it is possible to specify custom compilation targets:
|
||||
|
||||
./build.sh --goma blink_tests
|
||||
|
||||
EOF
|
||||
)
|
||||
|
||||
source "${SCRIPT_FOLDER}/../utils.sh"
|
||||
|
||||
if [[ $1 == "--help" || $1 == "-h" ]]; then
|
||||
echo "$USAGE"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
args=("$@")
|
||||
IS_ARM64=""
|
||||
IS_SYMBOLS_BUILD=""
|
||||
IS_FULL=""
|
||||
USE_GOMA=""
|
||||
for ((i="${#args[@]}"-1; i >= 0; --i)); do
|
||||
case ${args[i]} in
|
||||
--arm64) IS_ARM64="1"; unset args[i]; ;;
|
||||
--symbols) IS_SYMBOLS_BUILD="1"; unset args[i]; ;;
|
||||
--full) IS_FULL="1"; unset args[i]; ;;
|
||||
--goma) USE_GOMA="1"; unset args[i]; ;;
|
||||
esac
|
||||
done
|
||||
|
||||
compile_chromium() {
|
||||
if [[ -z "${CR_CHECKOUT_PATH}" ]]; then
|
||||
CR_CHECKOUT_PATH="$HOME/chromium"
|
||||
fi
|
||||
|
||||
if [[ ! -d "${CR_CHECKOUT_PATH}/src" ]]; then
|
||||
echo "ERROR: CR_CHECKOUT_PATH does not have src/ subfolder; is this a chromium checkout?"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
source "${SCRIPT_FOLDER}/ensure_depot_tools.sh"
|
||||
|
||||
if is_mac; then
|
||||
selectXcodeVersionOrDie $(node "${SCRIPT_FOLDER}/../get_xcode_version.js" chromium)
|
||||
fi
|
||||
|
||||
cd "${CR_CHECKOUT_PATH}/src"
|
||||
|
||||
# Prepare build folder.
|
||||
mkdir -p "./out/Default"
|
||||
echo "is_debug = false" > ./out/Default/args.gn
|
||||
echo "dcheck_always_on = false" >> ./out/Default/args.gn
|
||||
if [[ -n "${IS_SYMBOLS_BUILD}" ]]; then
|
||||
echo "symbol_level = 1" >> ./out/Default/args.gn
|
||||
else
|
||||
echo "symbol_level = 0" >> ./out/Default/args.gn
|
||||
fi
|
||||
|
||||
if [[ -n "${IS_ARM64}" ]]; then
|
||||
echo 'target_cpu = "arm64"' >> ./out/Default/args.gn
|
||||
fi
|
||||
|
||||
if [[ ! -z "$USE_GOMA" ]]; then
|
||||
"${SCRIPT_FOLDER}/goma.sh" args >> ./out/Default/args.gn
|
||||
fi
|
||||
echo 'enable_nacl = false' >> ./out/Default/args.gn
|
||||
|
||||
echo "===== args.gn ====="
|
||||
cat ./out/Default/args.gn
|
||||
echo "===== ======= ====="
|
||||
|
||||
if [[ -n "$IS_FULL" ]]; then
|
||||
if is_linux; then
|
||||
./build/install-build-deps.sh
|
||||
if [[ -n "$IS_ARM64" ]]; then
|
||||
# Install sysroot image, see https://chromium.googlesource.com/chromium/src/+/refs/heads/main/docs/linux/chromium_arm.md
|
||||
./build/linux/sysroot_scripts/install-sysroot.py --arch=arm64
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
TARGETS="${args[@]}"
|
||||
if is_win; then
|
||||
if [[ -n "$TARGETS" ]]; then
|
||||
echo "ERROR: cannot compile custom targets on windows yet."
|
||||
echo "Requested to compile chromium targets - ${TARGETS}"
|
||||
exit 1
|
||||
fi
|
||||
if [[ -z "$USE_GOMA" ]]; then
|
||||
/c/Windows/System32/cmd.exe "/c $(cygpath -w "${SCRIPT_FOLDER}"/buildwin.bat)"
|
||||
else
|
||||
/c/Windows/System32/cmd.exe "/c $(cygpath -w "${SCRIPT_FOLDER}"/buildwingoma.bat)"
|
||||
fi
|
||||
else
|
||||
if [[ -z "$TARGETS" ]]; then
|
||||
if is_linux; then
|
||||
TARGETS="chrome chrome_sandbox clear_key_cdm"
|
||||
else
|
||||
TARGETS="chrome"
|
||||
fi
|
||||
fi
|
||||
echo
|
||||
echo ">> Compiling Targets: $TARGETS"
|
||||
echo
|
||||
|
||||
gn gen out/Default
|
||||
if [[ -z "$USE_GOMA" ]]; then
|
||||
autoninja -C out/Default $TARGETS
|
||||
else
|
||||
ninja -j 200 -C out/Default $TARGETS
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
compile_chromium "${args[@]}"
|
|
@ -1,2 +0,0 @@
|
|||
CALL gn gen out/Default
|
||||
CALL autoninja -C out/Default chrome eventlog_provider
|
|
@ -1,2 +0,0 @@
|
|||
CALL gn gen out/Default
|
||||
CALL ninja -j 200 -C out/Default chrome eventlog_provider
|
|
@ -1,15 +0,0 @@
|
|||
#!/bin/bash
|
||||
set -e
|
||||
set +x
|
||||
|
||||
trap "cd $(pwd -P)" EXIT
|
||||
cd "$(dirname "$0")"
|
||||
|
||||
rm -rf output
|
||||
if [[ -z "${CR_CHECKOUT_PATH}" ]]; then
|
||||
CR_CHECKOUT_PATH="$HOME/chromium"
|
||||
fi
|
||||
|
||||
if [[ -d "${CR_CHECKOUT_PATH}/src" ]]; then
|
||||
rm -rf "${CR_CHECKOUT_PATH}/src/out"
|
||||
fi
|
|
@ -1,26 +0,0 @@
|
|||
// This script is supposed to be run with a path to either of the following configs from chromium checkout:
|
||||
// - infra/archive_config/mac-archive-rel.json
|
||||
// - infra/archive_config/linux-archive-rel.json
|
||||
// - infra/archive_config/win-archive-rel.json
|
||||
|
||||
const fs = require('fs');
|
||||
|
||||
const configs = JSON.parse(fs.readFileSync(process.argv[2], 'utf8')).archive_datas;
|
||||
const config = configs.find(config => config.gcs_path.includes('chrome-linux.zip') || config.gcs_path.includes('chrome-win.zip') || config.gcs_path.includes('chrome-mac.zip'));
|
||||
|
||||
const excludeList = new Set([
|
||||
// We do not need interactive tests in our archive.
|
||||
'interactive_ui_tests.exe',
|
||||
// We no longer compile nacl with Chromium.
|
||||
'nacl_helper_bootstrap',
|
||||
'nacl_helper',
|
||||
'nacl_irt_x86_64.nexe',
|
||||
]);
|
||||
|
||||
const entries = [
|
||||
...(config.files || []),
|
||||
...(config.dirs || []),
|
||||
].filter(entry => !excludeList.has(entry));
|
||||
|
||||
for (const entry of entries)
|
||||
console.log(entry);
|
|
@ -1,32 +0,0 @@
|
|||
# Since this script modifies PATH, it cannot be run in a subshell
|
||||
# and must be sourced.
|
||||
# Make sure it is sourced.
|
||||
sourced=0
|
||||
(return 0 2>/dev/null) && sourced=1 || sourced=0
|
||||
|
||||
if [[ $sourced == 0 ]]; then
|
||||
echo 'ERROR: cannot run this script in a subshell'
|
||||
echo 'This file modifies $PATH of the current shell, so it must be sourced instead'
|
||||
echo 'Use `source ensure_depot_tool.sh` instead'
|
||||
exit 1
|
||||
fi
|
||||
|
||||
function ensure_depot_tools() {
|
||||
# Install depot_tools if they are not in system, and modify $PATH
|
||||
# to include depot_tools
|
||||
if ! command -v autoninja >/dev/null; then
|
||||
if [[ $(uname) == "MINGW"* || "$(uname)" == MSYS* ]]; then
|
||||
# NOTE: as of Feb 8, 2021, windows requires manual and separate
|
||||
# installation of depot_tools.
|
||||
echo "ERROR: cannot automatically install depot_tools on windows. Please, install manually"
|
||||
exit 1
|
||||
fi
|
||||
local SCRIPT_PATH=$(cd "$(dirname "$BASH_SOURCE")"; pwd -P)
|
||||
if [[ ! -d "${SCRIPT_PATH}/depot_tools" ]]; then
|
||||
git clone https://chromium.googlesource.com/chromium/tools/depot_tools.git "${SCRIPT_PATH}/depot_tools"
|
||||
fi
|
||||
export PATH="${SCRIPT_PATH}/depot_tools:$PATH"
|
||||
fi
|
||||
}
|
||||
|
||||
ensure_depot_tools
|
|
@ -1,105 +0,0 @@
|
|||
#!/bin/bash
|
||||
set -e
|
||||
set +x
|
||||
|
||||
trap "cd $(pwd -P)" EXIT
|
||||
cd "$(dirname "$0")"
|
||||
SCRIPT_FOLDER=$(pwd -P)
|
||||
source "${SCRIPT_FOLDER}/../utils.sh"
|
||||
|
||||
ELECTRON_BUILD_TOOLS_REQUIRED_VERSION=2c24fb5c7c938a4e387f355ab64be449604ae5db
|
||||
if [[ -d ./electron-build-tools ]]; then
|
||||
cd ./electron-build-tools
|
||||
# Make sure required commit is part of electron-build-tools.
|
||||
if ! git merge-base --is-ancestor "${ELECTRON_BUILD_TOOLS_REQUIRED_VERSION}" HEAD; then
|
||||
cd ..
|
||||
rm -rf ./electron-build-tools
|
||||
echo "Updating electron-build-tools"
|
||||
else
|
||||
cd ..
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ ! -d ./electron-build-tools ]]; then
|
||||
git clone --single-branch --branch main https://github.com/electron/build-tools/ electron-build-tools
|
||||
cd electron-build-tools
|
||||
npm install
|
||||
mkdir -p third_party
|
||||
./src/e update-goma msftGoma
|
||||
cd ..
|
||||
fi
|
||||
|
||||
if ! is_win; then
|
||||
if command -v python >/dev/null; then
|
||||
PYTHON=python
|
||||
elif command -v python3 >/dev/null; then
|
||||
PYTHON=python3
|
||||
else
|
||||
echo "ERROR: no python or python3 found in PATH"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
cd electron-build-tools/third_party/goma
|
||||
|
||||
export GOMA_START_COMPILER_PROXY=true
|
||||
|
||||
function print_gn_args() {
|
||||
PLAYWRIGHT_GOMA_PATH="${SCRIPT_FOLDER}/electron-build-tools/third_party/goma"
|
||||
if is_win; then
|
||||
PLAYWRIGHT_GOMA_PATH=$(cygpath -w "${PLAYWRIGHT_GOMA_PATH}")
|
||||
fi
|
||||
echo 'use_goma = true'
|
||||
echo "goma_dir = \"${PLAYWRIGHT_GOMA_PATH}\""
|
||||
}
|
||||
|
||||
if [[ $1 == "--help" ]]; then
|
||||
echo "$(basename "$0") [login|start|stop|--help]"
|
||||
exit 0
|
||||
elif [[ $1 == "args" ]]; then
|
||||
print_gn_args
|
||||
elif [[ $1 == "login" ]]; then
|
||||
if is_win; then
|
||||
/c/Windows/System32/cmd.exe "/c $(cygpath -w $(pwd)/goma_auth.bat) login"
|
||||
else
|
||||
$PYTHON ./goma_auth.py login
|
||||
fi
|
||||
echo
|
||||
echo "Congratulation! Goma is logged in!"
|
||||
echo "run '$(basename "$0") start' to launch goma client"
|
||||
elif [[ $1 == "start" ]]; then
|
||||
# We have to prefix ENV with `PLAYWRIGHT` since `GOMA_` env variables
|
||||
# have special treatment by goma.
|
||||
if [[ ! -z "$PLAYWRIGHT_GOMA_LOGIN_COOKIE" ]]; then
|
||||
echo "$PLAYWRIGHT_GOMA_LOGIN_COOKIE" > "$HOME/.goma_oauth2_config"
|
||||
fi
|
||||
if [[ ! -f "$HOME/.goma_oauth2_config" ]]; then
|
||||
echo "ERROR: goma is not logged in!"
|
||||
echo "run '$(basename "$0") login'"
|
||||
exit 1
|
||||
fi
|
||||
if is_win; then
|
||||
/c/Windows/System32/cmd.exe "/c $(cygpath -w $(pwd)/goma_ctl.bat) ensure_start"
|
||||
else
|
||||
$PYTHON ./goma_ctl.py ensure_start
|
||||
fi
|
||||
set +x
|
||||
echo
|
||||
echo "Congratulatons! Goma is running!"
|
||||
echo
|
||||
echo "Add the following gn args to use goma:"
|
||||
echo
|
||||
echo "===== args.gn ====="
|
||||
print_gn_args
|
||||
echo "===== ======= ====="
|
||||
elif [[ $1 == "stop" ]]; then
|
||||
if is_win; then
|
||||
/c/Windows/System32/cmd.exe "/c $(cygpath -w $(pwd)/goma_ctl.bat) stop"
|
||||
else
|
||||
$PYTHON ./goma_ctl.py stop
|
||||
fi
|
||||
else
|
||||
echo "ERROR: unknown command - $1"
|
||||
echo "Use --help to list all available commands"
|
||||
exit 1
|
||||
fi
|
|
@ -1,32 +0,0 @@
|
|||
#!/bin/bash
|
||||
set -e
|
||||
set +x
|
||||
|
||||
trap "cd $(pwd -P)" EXIT
|
||||
cd "$(dirname "$0")"
|
||||
SCRIPT_FOLDER=$(pwd -P)
|
||||
|
||||
# 1. get current version
|
||||
CURRENT_BETA_VERSION=$(curl https://omahaproxy.appspot.com/all | grep "win64,beta" | cut -d ',' -f 3)
|
||||
VERSION_INFO_JSON=$(curl "https://omahaproxy.appspot.com/deps.json?version=$CURRENT_BETA_VERSION")
|
||||
|
||||
NODE_SCRIPT=$(cat <<EOF
|
||||
const json = JSON.parse(fs.readFileSync(0));
|
||||
console.log([
|
||||
'# CURRENT_VERSION: ' + json.chromium_version,
|
||||
'# BRANCH_BASE_POSITION: ' + json.chromium_base_position,
|
||||
'BRANCH_COMMIT="' + json.chromium_commit + '"',
|
||||
].join('\n'));
|
||||
EOF
|
||||
)
|
||||
NEW_CONFIG=$(echo "${VERSION_INFO_JSON}" | node -e "${NODE_SCRIPT}")
|
||||
CURRENT_CONFIG=$(cat "${SCRIPT_FOLDER}/UPSTREAM_CONFIG.sh")
|
||||
|
||||
if [[ "${CURRENT_CONFIG}" == "${NEW_CONFIG}" ]]; then
|
||||
echo "No changes!"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
echo "${NEW_CONFIG}" > "${SCRIPT_FOLDER}/UPSTREAM_CONFIG.sh"
|
||||
BUILD_NUMBER=$(cat "${SCRIPT_FOLDER}/BUILD_NUMBER")
|
||||
echo $(( $BUILD_NUMBER + 1 )) > "${SCRIPT_FOLDER}/BUILD_NUMBER"
|
|
@ -1,39 +0,0 @@
|
|||
#!/bin/bash
|
||||
set -e
|
||||
set +x
|
||||
|
||||
trap "cd $(pwd -P)" EXIT
|
||||
cd "$(dirname "$0")"
|
||||
|
||||
|
||||
if [[ ($1 == '--help') || ($1 == '-h') ]]; then
|
||||
echo "usage: clean.sh [firefox|webkit|firefox-beta]"
|
||||
echo
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if [[ $# == 0 ]]; then
|
||||
echo "missing browser: 'firefox' or 'webkit'"
|
||||
echo "try './clean.sh --help' for more information"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
CMD="$1"
|
||||
shift
|
||||
if [[ ("$CMD" == "firefox") || ("$CMD" == "firefox/") || ("$CMD" == "ff") ]]; then
|
||||
bash ./firefox/clean.sh "$@"
|
||||
elif [[ ("$CMD" == "firefox-beta") || ("$CMD" == "ff-beta") ]]; then
|
||||
bash ./firefox-beta/clean.sh "$@"
|
||||
elif [[ ("$CMD" == "webkit") || ("$CMD" == "webkit/") || ("$CMD" == "wk") ]]; then
|
||||
bash ./webkit/clean.sh "$@"
|
||||
elif [[ ("$CMD" == "chromium") || ("$CMD" == "chromium/") || ("$CMD" == "cr") ]]; then
|
||||
bash ./chromium/clean.sh "$@"
|
||||
elif [[ ("$CMD" == "winldd") ]]; then
|
||||
bash ./winldd/clean.sh "$@"
|
||||
elif [[ ("$CMD" == "ffmpeg") ]]; then
|
||||
bash ./ffmpeg/clean.sh "$@"
|
||||
else
|
||||
echo ERROR: unknown browser to build - "$CMD"
|
||||
exit 1
|
||||
fi
|
||||
|
|
@ -1,98 +0,0 @@
|
|||
#!/bin/bash
|
||||
# This script is designed to build Firefox & WebKit on various Linux
|
||||
# distributions inside docker containers.
|
||||
set -e
|
||||
set +x
|
||||
set -o pipefail
|
||||
|
||||
if [[ ($1 == '--help') || ($1 == '-h') ]]; then
|
||||
echo "usage: $(basename "$0") [webkit-ubuntu-20.04|firefox-debian-11|...] [build|test|compile|enter|cleanup]"
|
||||
echo
|
||||
echo "Builds Webkit or Firefox browser inside given Linux distribution"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
trap "cd $(pwd -P)" EXIT
|
||||
cd "$(dirname "$0")"
|
||||
SCRIPT_FOLDER="$(pwd -P)"
|
||||
|
||||
export BUILD_FLAVOR="${1}"
|
||||
export BROWSER_NAME=""
|
||||
|
||||
DOCKERFILE=""
|
||||
|
||||
if [[ "${BUILD_FLAVOR}" == "firefox-beta-"* ]]; then
|
||||
DOCKERFILE="${SCRIPT_FOLDER}/firefox-beta/${BUILD_FLAVOR#firefox-beta-}.dockerfile"
|
||||
BROWSER_NAME="firefox-beta"
|
||||
elif [[ "${BUILD_FLAVOR}" == "firefox-"* ]]; then
|
||||
DOCKERFILE="${SCRIPT_FOLDER}/firefox/${BUILD_FLAVOR#firefox-}.dockerfile"
|
||||
BROWSER_NAME="firefox"
|
||||
elif [[ "${BUILD_FLAVOR}" == "webkit-"* ]]; then
|
||||
DOCKERFILE="${SCRIPT_FOLDER}/webkit/${BUILD_FLAVOR#webkit-}.dockerfile"
|
||||
BROWSER_NAME="webkit"
|
||||
else
|
||||
echo "ERROR: unknown build flavor - ${BUILD_FLAVOR}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ "${BUILD_FLAVOR}" == *"-arm64" ]]; then
|
||||
EXPECTED_ARCH="arm64"
|
||||
DOCKER_PLATFORM="linux/arm64"
|
||||
else
|
||||
EXPECTED_ARCH="x86_64"
|
||||
DOCKER_PLATFORM="linux/amd64"
|
||||
fi
|
||||
|
||||
if [[ $(arch) != "${EXPECTED_ARCH}" ]]; then
|
||||
echo "ERROR: host architecture $(arch) does not match expected architecture - ${EXPECTED_ARCH}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
DOCKER_IMAGE_NAME="${BUILD_FLAVOR}"
|
||||
DOCKER_CONTAINER_NAME="${BUILD_FLAVOR}"
|
||||
DOCKER_ARGS=$(echo \
|
||||
--env CI \
|
||||
--env BUILD_FLAVOR \
|
||||
--env BROWSER_NAME \
|
||||
--env TELEGRAM_BOT_KEY \
|
||||
--env AZ_ACCOUNT_NAME \
|
||||
--env AZ_ACCOUNT_KEY \
|
||||
--env GITHUB_SERVER_URL \
|
||||
--env GITHUB_REPOSITORY \
|
||||
--env GITHUB_RUN_ID \
|
||||
--env GH_TOKEN \
|
||||
--env DEBIAN_FRONTEND=noninteractive \
|
||||
--env TZ="America/Los_Angeles"
|
||||
)
|
||||
|
||||
if [[ "$2" == "build" ]]; then
|
||||
docker build \
|
||||
--build-arg ARG_BUILD_FLAVOR="${BUILD_FLAVOR}" \
|
||||
--build-arg ARG_BROWSER_NAME="${BROWSER_NAME}" \
|
||||
--no-cache \
|
||||
--platform "${DOCKER_PLATFORM}" \
|
||||
-t "${DOCKER_IMAGE_NAME}" \
|
||||
-f "${DOCKERFILE}" .
|
||||
elif [[ "$2" == "test" ]]; then
|
||||
docker run --rm ${DOCKER_ARGS} --init --name "${DOCKER_CONTAINER_NAME}" --platform "${DOCKER_PLATFORM}" -it "${DOCKER_IMAGE_NAME}" /bin/bash -c '
|
||||
CI=1 ./browser_patches/prepare_checkout.sh "${BROWSER_NAME}"
|
||||
./browser_patches/build.sh "${BROWSER_NAME}" --full
|
||||
./browser_patches/${BROWSER_NAME}/archive.sh $PWD/archive.zip
|
||||
'
|
||||
elif [[ "$2" == "compile" ]]; then
|
||||
docker run --rm ${DOCKER_ARGS} --init --name "${DOCKER_CONTAINER_NAME}" --platform "${DOCKER_PLATFORM}" -t "${DOCKER_IMAGE_NAME}" /bin/bash -c '
|
||||
./browser_patches/checkout_build_archive_upload.sh "${BUILD_FLAVOR}"
|
||||
'
|
||||
elif [[ "$2" == "enter" ]]; then
|
||||
docker run --rm ${DOCKER_ARGS} --init --name "${DOCKER_CONTAINER_NAME}" --platform "${DOCKER_PLATFORM}" -it "${DOCKER_IMAGE_NAME}" /bin/bash
|
||||
elif [[ "$2" == "cleanup" ]]; then
|
||||
docker kill "${DOCKER_CONTAINER_NAME}" || true
|
||||
# Wait for container to stop
|
||||
docker wait "${DOCKER_CONTAINER_NAME}" || true
|
||||
docker rmi "${DOCKER_IMAGE_NAME}"
|
||||
docker system prune -f
|
||||
else
|
||||
echo "ERROR: unknown command - $2"
|
||||
exit 1
|
||||
fi
|
||||
|
|
@ -1,54 +0,0 @@
|
|||
FROM --platform=linux/amd64 debian:11
|
||||
|
||||
# Reexport --build-arg as environment variables
|
||||
ARG ARG_BUILD_FLAVOR
|
||||
ARG ARG_BROWSER_NAME
|
||||
ENV BUILD_FLAVOR="${ARG_BUILD_FLAVOR}"
|
||||
ENV BROWSER_NAME="${ARG_BROWSER_NAME}"
|
||||
|
||||
# These are needed to auto-install tzdata. See https://serverfault.com/questions/949991/how-to-install-tzdata-on-a-ubuntu-docker-image
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
ARG TZ=America/Los_Angeles
|
||||
|
||||
RUN apt-get update && apt-get install -y curl \
|
||||
build-essential \
|
||||
git-core \
|
||||
zip unzip \
|
||||
tzdata \
|
||||
sudo
|
||||
# Install Python3 with distutils
|
||||
RUN apt-get install -y python3 python3-dev python3-pip python3-distutils
|
||||
|
||||
# Install Azure CLI
|
||||
RUN curl -sL https://aka.ms/InstallAzureCLIDeb | sudo bash
|
||||
|
||||
# Install node16
|
||||
RUN curl -sL https://deb.nodesource.com/setup_16.x | bash - && apt-get install -y nodejs
|
||||
|
||||
# Create the pwuser and make it passwordless sudoer.
|
||||
RUN adduser --disabled-password --gecos "" pwuser && \
|
||||
echo "ALL ALL = (ALL) NOPASSWD: ALL" >> /etc/sudoers
|
||||
|
||||
# mitigate git clone issues on CI
|
||||
# See https://stdworkflow.com/877/error-rpc-failed-curl-56-gnutls-recv-error-54-error-in-the-pull-function
|
||||
RUN git config --system user.email "devops@playwright.dev" && \
|
||||
git config --system user.name "Playwright DevOps" && \
|
||||
git config --system http.postBuffer 524288000 && \
|
||||
git config --system http.lowSpeedLimit 0 && \
|
||||
git config --system http.lowSpeedTime 999999
|
||||
|
||||
# Show welcome message to pwuser
|
||||
COPY --chown=pwuser ./pwuser_bashrc /home/pwuser/.bashrc
|
||||
|
||||
USER pwuser
|
||||
|
||||
# Install Rust
|
||||
RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y
|
||||
ENV PATH="${PATH}:/home/pwuser/.cargo/bin"
|
||||
|
||||
RUN mkdir -p /home/pwuser/.mozbuild
|
||||
RUN cd /home/pwuser && git clone --depth=1 https://github.com/microsoft/playwright
|
||||
|
||||
WORKDIR /home/pwuser/playwright
|
||||
|
||||
|
|
@ -1,60 +0,0 @@
|
|||
FROM --platform=linux/amd64 ubuntu:18.04
|
||||
|
||||
# Reexport --build-arg as environment variables
|
||||
ARG ARG_BUILD_FLAVOR
|
||||
ARG ARG_BROWSER_NAME
|
||||
ENV BUILD_FLAVOR="${ARG_BUILD_FLAVOR}"
|
||||
ENV BROWSER_NAME="${ARG_BROWSER_NAME}"
|
||||
|
||||
# These are needed to auto-install tzdata. See https://serverfault.com/questions/949991/how-to-install-tzdata-on-a-ubuntu-docker-image
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
ARG TZ=America/Los_Angeles
|
||||
|
||||
RUN apt-get update && apt-get install -y curl \
|
||||
build-essential \
|
||||
git-core \
|
||||
zip unzip \
|
||||
tzdata \
|
||||
sudo
|
||||
# Install Python3 with distutils
|
||||
# Firefox build on Ubuntu 18.04 requires Python3.8 to run its build scripts.
|
||||
RUN apt-get install -y python3.8 python3.8-dev python3.8-distutils && \
|
||||
# Point python3 to python3.8
|
||||
update-alternatives --install /usr/bin/python3 python3 /usr/bin/python3.8 2 && \
|
||||
curl -sSL https://bootstrap.pypa.io/get-pip.py -o get-pip.py && \
|
||||
python3 get-pip.py && \
|
||||
rm get-pip.py
|
||||
|
||||
# Install Azure CLI
|
||||
RUN curl -sL https://aka.ms/InstallAzureCLIDeb | sudo bash
|
||||
|
||||
# Install node16
|
||||
RUN curl -sL https://deb.nodesource.com/setup_16.x | bash - && apt-get install -y nodejs
|
||||
|
||||
# Create the pwuser and make it passwordless sudoer.
|
||||
RUN adduser --disabled-password --gecos "" pwuser && \
|
||||
echo "ALL ALL = (ALL) NOPASSWD: ALL" >> /etc/sudoers
|
||||
|
||||
# mitigate git clone issues on CI
|
||||
# See https://stdworkflow.com/877/error-rpc-failed-curl-56-gnutls-recv-error-54-error-in-the-pull-function
|
||||
RUN git config --system user.email "devops@playwright.dev" && \
|
||||
git config --system user.name "Playwright DevOps" && \
|
||||
git config --system http.postBuffer 524288000 && \
|
||||
git config --system http.lowSpeedLimit 0 && \
|
||||
git config --system http.lowSpeedTime 999999
|
||||
|
||||
# Show welcome message to pwuser
|
||||
COPY --chown=pwuser ./pwuser_bashrc /home/pwuser/.bashrc
|
||||
|
||||
USER pwuser
|
||||
|
||||
# Install Rust
|
||||
RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y
|
||||
ENV PATH="${PATH}:/home/pwuser/.cargo/bin"
|
||||
|
||||
RUN mkdir -p /home/pwuser/.mozbuild
|
||||
RUN cd /home/pwuser && git clone --depth=1 https://github.com/microsoft/playwright
|
||||
|
||||
WORKDIR /home/pwuser/playwright
|
||||
|
||||
|
|
@ -1,62 +0,0 @@
|
|||
FROM --platform=linux/arm64 ubuntu:20.04
|
||||
|
||||
# Reexport --build-arg as environment variables
|
||||
ARG ARG_BUILD_FLAVOR
|
||||
ARG ARG_BROWSER_NAME
|
||||
ENV BUILD_FLAVOR="${ARG_BUILD_FLAVOR}"
|
||||
ENV BROWSER_NAME="${ARG_BROWSER_NAME}"
|
||||
|
||||
# These are needed to auto-install tzdata. See https://serverfault.com/questions/949991/how-to-install-tzdata-on-a-ubuntu-docker-image
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
ARG TZ=America/Los_Angeles
|
||||
|
||||
RUN apt-get update && apt-get install -y curl \
|
||||
build-essential \
|
||||
git-core \
|
||||
zip unzip \
|
||||
tzdata \
|
||||
sudo
|
||||
|
||||
# Ubuntu 20.04 aarch64 specific: default to clang-12.
|
||||
RUN apt-get install -y clang-12
|
||||
ENV CC=/usr/bin/clang-12
|
||||
ENV CXX=/usr/bin/clang++-12
|
||||
|
||||
# Install Python3 with distutils
|
||||
RUN apt-get install -y python3 python3-dev python3-pip python3-distutils
|
||||
|
||||
# Install AZ CLI with Python since they do not ship
|
||||
# aarch64 to APT: https://github.com/Azure/azure-cli/issues/7368
|
||||
# Pin so future releases do not break us.
|
||||
RUN pip3 install azure-cli==2.38.0
|
||||
|
||||
# Install node16
|
||||
RUN curl -sL https://deb.nodesource.com/setup_16.x | bash - && apt-get install -y nodejs
|
||||
|
||||
# Create the pwuser and make it passwordless sudoer.
|
||||
RUN adduser --disabled-password --gecos "" pwuser && \
|
||||
echo "ALL ALL = (ALL) NOPASSWD: ALL" >> /etc/sudoers
|
||||
|
||||
# mitigate git clone issues on CI
|
||||
# See https://stdworkflow.com/877/error-rpc-failed-curl-56-gnutls-recv-error-54-error-in-the-pull-function
|
||||
RUN git config --system user.email "devops@playwright.dev" && \
|
||||
git config --system user.name "Playwright DevOps" && \
|
||||
git config --system http.postBuffer 524288000 && \
|
||||
git config --system http.lowSpeedLimit 0 && \
|
||||
git config --system http.lowSpeedTime 999999
|
||||
|
||||
# Show welcome message to pwuser
|
||||
COPY --chown=pwuser ./pwuser_bashrc /home/pwuser/.bashrc
|
||||
|
||||
USER pwuser
|
||||
|
||||
# Install Rust
|
||||
RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y
|
||||
ENV PATH="${PATH}:/home/pwuser/.cargo/bin"
|
||||
|
||||
RUN mkdir -p /home/pwuser/.mozbuild
|
||||
RUN cd /home/pwuser && git clone --depth=1 https://github.com/microsoft/playwright
|
||||
|
||||
WORKDIR /home/pwuser/playwright
|
||||
|
||||
|
|
@ -1,54 +0,0 @@
|
|||
FROM --platform=linux/amd64 ubuntu:20.04
|
||||
|
||||
# Reexport --build-arg as environment variables
|
||||
ARG ARG_BUILD_FLAVOR
|
||||
ARG ARG_BROWSER_NAME
|
||||
ENV BUILD_FLAVOR="${ARG_BUILD_FLAVOR}"
|
||||
ENV BROWSER_NAME="${ARG_BROWSER_NAME}"
|
||||
|
||||
# These are needed to auto-install tzdata. See https://serverfault.com/questions/949991/how-to-install-tzdata-on-a-ubuntu-docker-image
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
ARG TZ=America/Los_Angeles
|
||||
|
||||
RUN apt-get update && apt-get install -y curl \
|
||||
build-essential \
|
||||
git-core \
|
||||
zip unzip \
|
||||
tzdata \
|
||||
sudo
|
||||
# Install Python3 with distutils
|
||||
RUN apt-get install -y python3 python3-dev python3-pip python3-distutils
|
||||
|
||||
# Install Azure CLI
|
||||
RUN curl -sL https://aka.ms/InstallAzureCLIDeb | sudo bash
|
||||
|
||||
# Install node16
|
||||
RUN curl -sL https://deb.nodesource.com/setup_16.x | bash - && apt-get install -y nodejs
|
||||
|
||||
# Create the pwuser and make it passwordless sudoer.
|
||||
RUN adduser --disabled-password --gecos "" pwuser && \
|
||||
echo "ALL ALL = (ALL) NOPASSWD: ALL" >> /etc/sudoers
|
||||
|
||||
# mitigate git clone issues on CI
|
||||
# See https://stdworkflow.com/877/error-rpc-failed-curl-56-gnutls-recv-error-54-error-in-the-pull-function
|
||||
RUN git config --system user.email "devops@playwright.dev" && \
|
||||
git config --system user.name "Playwright DevOps" && \
|
||||
git config --system http.postBuffer 524288000 && \
|
||||
git config --system http.lowSpeedLimit 0 && \
|
||||
git config --system http.lowSpeedTime 999999
|
||||
|
||||
# Show welcome message to pwuser
|
||||
COPY --chown=pwuser ./pwuser_bashrc /home/pwuser/.bashrc
|
||||
|
||||
USER pwuser
|
||||
|
||||
# Install Rust
|
||||
RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y
|
||||
ENV PATH="${PATH}:/home/pwuser/.cargo/bin"
|
||||
|
||||
RUN mkdir -p /home/pwuser/.mozbuild
|
||||
RUN cd /home/pwuser && git clone --depth=1 https://github.com/microsoft/playwright
|
||||
|
||||
WORKDIR /home/pwuser/playwright
|
||||
|
||||
|
|
@ -1,62 +0,0 @@
|
|||
FROM --platform=linux/arm64 ubuntu:22.04
|
||||
|
||||
# Reexport --build-arg as environment variables
|
||||
ARG ARG_BUILD_FLAVOR
|
||||
ARG ARG_BROWSER_NAME
|
||||
ENV BUILD_FLAVOR="${ARG_BUILD_FLAVOR}"
|
||||
ENV BROWSER_NAME="${ARG_BROWSER_NAME}"
|
||||
|
||||
# These are needed to auto-install tzdata. See https://serverfault.com/questions/949991/how-to-install-tzdata-on-a-ubuntu-docker-image
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
ARG TZ=America/Los_Angeles
|
||||
|
||||
RUN apt-get update && apt-get install -y curl \
|
||||
build-essential \
|
||||
git-core \
|
||||
zip unzip \
|
||||
tzdata \
|
||||
sudo
|
||||
|
||||
# Ubuntu 22.04 aarch64 specific: default to clang-14.
|
||||
RUN apt-get install -y clang-14
|
||||
ENV CC=/usr/bin/clang-14
|
||||
ENV CXX=/usr/bin/clang++-14
|
||||
|
||||
# Install Python3 with distutils
|
||||
RUN apt-get install -y python3 python3-dev python3-pip python3-distutils
|
||||
|
||||
# Install AZ CLI with Python since they do not ship
|
||||
# aarch64 to APT: https://github.com/Azure/azure-cli/issues/7368
|
||||
# Pin so future releases do not break us.
|
||||
RUN pip3 install azure-cli==2.38.0
|
||||
|
||||
# Install node16
|
||||
RUN curl -sL https://deb.nodesource.com/setup_16.x | bash - && apt-get install -y nodejs
|
||||
|
||||
# Create the pwuser and make it passwordless sudoer.
|
||||
RUN adduser --disabled-password --gecos "" pwuser && \
|
||||
echo "ALL ALL = (ALL) NOPASSWD: ALL" >> /etc/sudoers
|
||||
|
||||
# mitigate git clone issues on CI
|
||||
# See https://stdworkflow.com/877/error-rpc-failed-curl-56-gnutls-recv-error-54-error-in-the-pull-function
|
||||
RUN git config --system user.email "devops@playwright.dev" && \
|
||||
git config --system user.name "Playwright DevOps" && \
|
||||
git config --system http.postBuffer 524288000 && \
|
||||
git config --system http.lowSpeedLimit 0 && \
|
||||
git config --system http.lowSpeedTime 999999
|
||||
|
||||
# Show welcome message to pwuser
|
||||
COPY --chown=pwuser ./pwuser_bashrc /home/pwuser/.bashrc
|
||||
|
||||
USER pwuser
|
||||
|
||||
# Install Rust
|
||||
RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y
|
||||
ENV PATH="${PATH}:/home/pwuser/.cargo/bin"
|
||||
|
||||
RUN mkdir -p /home/pwuser/.mozbuild
|
||||
RUN cd /home/pwuser && git clone --depth=1 https://github.com/microsoft/playwright
|
||||
|
||||
WORKDIR /home/pwuser/playwright
|
||||
|
||||
|
|
@ -1,54 +0,0 @@
|
|||
FROM --platform=linux/amd64 ubuntu:22.04
|
||||
|
||||
# Reexport --build-arg as environment variables
|
||||
ARG ARG_BUILD_FLAVOR
|
||||
ARG ARG_BROWSER_NAME
|
||||
ENV BUILD_FLAVOR="${ARG_BUILD_FLAVOR}"
|
||||
ENV BROWSER_NAME="${ARG_BROWSER_NAME}"
|
||||
|
||||
# These are needed to auto-install tzdata. See https://serverfault.com/questions/949991/how-to-install-tzdata-on-a-ubuntu-docker-image
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
ARG TZ=America/Los_Angeles
|
||||
|
||||
RUN apt-get update && apt-get install -y curl \
|
||||
build-essential \
|
||||
git-core \
|
||||
zip unzip \
|
||||
tzdata \
|
||||
sudo
|
||||
# Install Python3 with distutils
|
||||
RUN apt-get install -y python3 python3-dev python3-pip python3-distutils
|
||||
|
||||
# Install Azure CLI
|
||||
RUN curl -sL https://aka.ms/InstallAzureCLIDeb | sudo bash
|
||||
|
||||
# Install node16
|
||||
RUN curl -sL https://deb.nodesource.com/setup_16.x | bash - && apt-get install -y nodejs
|
||||
|
||||
# Create the pwuser and make it passwordless sudoer.
|
||||
RUN adduser --disabled-password --gecos "" pwuser && \
|
||||
echo "ALL ALL = (ALL) NOPASSWD: ALL" >> /etc/sudoers
|
||||
|
||||
# mitigate git clone issues on CI
|
||||
# See https://stdworkflow.com/877/error-rpc-failed-curl-56-gnutls-recv-error-54-error-in-the-pull-function
|
||||
RUN git config --system user.email "devops@playwright.dev" && \
|
||||
git config --system user.name "Playwright DevOps" && \
|
||||
git config --system http.postBuffer 524288000 && \
|
||||
git config --system http.lowSpeedLimit 0 && \
|
||||
git config --system http.lowSpeedTime 999999
|
||||
|
||||
# Show welcome message to pwuser
|
||||
COPY --chown=pwuser ./pwuser_bashrc /home/pwuser/.bashrc
|
||||
|
||||
USER pwuser
|
||||
|
||||
# Install Rust
|
||||
RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y
|
||||
ENV PATH="${PATH}:/home/pwuser/.cargo/bin"
|
||||
|
||||
RUN mkdir -p /home/pwuser/.mozbuild
|
||||
RUN cd /home/pwuser && git clone --depth=1 https://github.com/microsoft/playwright
|
||||
|
||||
WORKDIR /home/pwuser/playwright
|
||||
|
||||
|
|
@ -1,54 +0,0 @@
|
|||
FROM --platform=linux/amd64 debian:11
|
||||
|
||||
# Reexport --build-arg as environment variables
|
||||
ARG ARG_BUILD_FLAVOR
|
||||
ARG ARG_BROWSER_NAME
|
||||
ENV BUILD_FLAVOR="${ARG_BUILD_FLAVOR}"
|
||||
ENV BROWSER_NAME="${ARG_BROWSER_NAME}"
|
||||
|
||||
# These are needed to auto-install tzdata. See https://serverfault.com/questions/949991/how-to-install-tzdata-on-a-ubuntu-docker-image
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
ARG TZ=America/Los_Angeles
|
||||
|
||||
RUN apt-get update && apt-get install -y curl \
|
||||
build-essential \
|
||||
git-core \
|
||||
zip unzip \
|
||||
tzdata \
|
||||
sudo
|
||||
# Install Python3 with distutils
|
||||
RUN apt-get install -y python3 python3-dev python3-pip python3-distutils
|
||||
|
||||
# Install Azure CLI
|
||||
RUN curl -sL https://aka.ms/InstallAzureCLIDeb | sudo bash
|
||||
|
||||
# Install node16
|
||||
RUN curl -sL https://deb.nodesource.com/setup_16.x | bash - && apt-get install -y nodejs
|
||||
|
||||
# Create the pwuser and make it passwordless sudoer.
|
||||
RUN adduser --disabled-password --gecos "" pwuser && \
|
||||
echo "ALL ALL = (ALL) NOPASSWD: ALL" >> /etc/sudoers
|
||||
|
||||
# mitigate git clone issues on CI
|
||||
# See https://stdworkflow.com/877/error-rpc-failed-curl-56-gnutls-recv-error-54-error-in-the-pull-function
|
||||
RUN git config --system user.email "devops@playwright.dev" && \
|
||||
git config --system user.name "Playwright DevOps" && \
|
||||
git config --system http.postBuffer 524288000 && \
|
||||
git config --system http.lowSpeedLimit 0 && \
|
||||
git config --system http.lowSpeedTime 999999
|
||||
|
||||
# Show welcome message to pwuser
|
||||
COPY --chown=pwuser ./pwuser_bashrc /home/pwuser/.bashrc
|
||||
|
||||
USER pwuser
|
||||
|
||||
# Install Rust
|
||||
RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y
|
||||
ENV PATH="${PATH}:/home/pwuser/.cargo/bin"
|
||||
|
||||
RUN mkdir -p /home/pwuser/.mozbuild
|
||||
RUN cd /home/pwuser && git clone --depth=1 https://github.com/microsoft/playwright
|
||||
|
||||
WORKDIR /home/pwuser/playwright
|
||||
|
||||
|
|
@ -1,60 +0,0 @@
|
|||
FROM --platform=linux/amd64 ubuntu:18.04
|
||||
|
||||
# Reexport --build-arg as environment variables
|
||||
ARG ARG_BUILD_FLAVOR
|
||||
ARG ARG_BROWSER_NAME
|
||||
ENV BUILD_FLAVOR="${ARG_BUILD_FLAVOR}"
|
||||
ENV BROWSER_NAME="${ARG_BROWSER_NAME}"
|
||||
|
||||
# These are needed to auto-install tzdata. See https://serverfault.com/questions/949991/how-to-install-tzdata-on-a-ubuntu-docker-image
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
ARG TZ=America/Los_Angeles
|
||||
|
||||
RUN apt-get update && apt-get install -y curl \
|
||||
build-essential \
|
||||
git-core \
|
||||
zip unzip \
|
||||
tzdata \
|
||||
sudo
|
||||
# Install Python3 with distutils
|
||||
# Firefox build on Ubuntu 18.04 requires Python3.8 to run its build scripts.
|
||||
RUN apt-get install -y python3.8 python3.8-dev python3.8-distutils && \
|
||||
# Point python3 to python3.8
|
||||
update-alternatives --install /usr/bin/python3 python3 /usr/bin/python3.8 2 && \
|
||||
curl -sSL https://bootstrap.pypa.io/get-pip.py -o get-pip.py && \
|
||||
python3 get-pip.py && \
|
||||
rm get-pip.py
|
||||
|
||||
# Install Azure CLI
|
||||
RUN curl -sL https://aka.ms/InstallAzureCLIDeb | sudo bash
|
||||
|
||||
# Install node16
|
||||
RUN curl -sL https://deb.nodesource.com/setup_16.x | bash - && apt-get install -y nodejs
|
||||
|
||||
# Create the pwuser and make it passwordless sudoer.
|
||||
RUN adduser --disabled-password --gecos "" pwuser && \
|
||||
echo "ALL ALL = (ALL) NOPASSWD: ALL" >> /etc/sudoers
|
||||
|
||||
# mitigate git clone issues on CI
|
||||
# See https://stdworkflow.com/877/error-rpc-failed-curl-56-gnutls-recv-error-54-error-in-the-pull-function
|
||||
RUN git config --system user.email "devops@playwright.dev" && \
|
||||
git config --system user.name "Playwright DevOps" && \
|
||||
git config --system http.postBuffer 524288000 && \
|
||||
git config --system http.lowSpeedLimit 0 && \
|
||||
git config --system http.lowSpeedTime 999999
|
||||
|
||||
# Show welcome message to pwuser
|
||||
COPY --chown=pwuser ./pwuser_bashrc /home/pwuser/.bashrc
|
||||
|
||||
USER pwuser
|
||||
|
||||
# Install Rust
|
||||
RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y
|
||||
ENV PATH="${PATH}:/home/pwuser/.cargo/bin"
|
||||
|
||||
RUN mkdir -p /home/pwuser/.mozbuild
|
||||
RUN cd /home/pwuser && git clone --depth=1 https://github.com/microsoft/playwright
|
||||
|
||||
WORKDIR /home/pwuser/playwright
|
||||
|
||||
|
|
@ -1,62 +0,0 @@
|
|||
FROM --platform=linux/arm64 ubuntu:20.04
|
||||
|
||||
# Reexport --build-arg as environment variables
|
||||
ARG ARG_BUILD_FLAVOR
|
||||
ARG ARG_BROWSER_NAME
|
||||
ENV BUILD_FLAVOR="${ARG_BUILD_FLAVOR}"
|
||||
ENV BROWSER_NAME="${ARG_BROWSER_NAME}"
|
||||
|
||||
# These are needed to auto-install tzdata. See https://serverfault.com/questions/949991/how-to-install-tzdata-on-a-ubuntu-docker-image
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
ARG TZ=America/Los_Angeles
|
||||
|
||||
RUN apt-get update && apt-get install -y curl \
|
||||
build-essential \
|
||||
git-core \
|
||||
zip unzip \
|
||||
tzdata \
|
||||
sudo
|
||||
|
||||
# Ubuntu 20.04 aarch64 specific: default to clang-12.
|
||||
RUN apt-get install -y clang-12
|
||||
ENV CC=/usr/bin/clang-12
|
||||
ENV CXX=/usr/bin/clang++-12
|
||||
|
||||
# Install Python3 with distutils
|
||||
RUN apt-get install -y python3 python3-dev python3-pip python3-distutils
|
||||
|
||||
# Install AZ CLI with Python since they do not ship
|
||||
# aarch64 to APT: https://github.com/Azure/azure-cli/issues/7368
|
||||
# Pin so future releases do not break us.
|
||||
RUN pip3 install azure-cli==2.38.0
|
||||
|
||||
# Install node16
|
||||
RUN curl -sL https://deb.nodesource.com/setup_16.x | bash - && apt-get install -y nodejs
|
||||
|
||||
# Create the pwuser and make it passwordless sudoer.
|
||||
RUN adduser --disabled-password --gecos "" pwuser && \
|
||||
echo "ALL ALL = (ALL) NOPASSWD: ALL" >> /etc/sudoers
|
||||
|
||||
# mitigate git clone issues on CI
|
||||
# See https://stdworkflow.com/877/error-rpc-failed-curl-56-gnutls-recv-error-54-error-in-the-pull-function
|
||||
RUN git config --system user.email "devops@playwright.dev" && \
|
||||
git config --system user.name "Playwright DevOps" && \
|
||||
git config --system http.postBuffer 524288000 && \
|
||||
git config --system http.lowSpeedLimit 0 && \
|
||||
git config --system http.lowSpeedTime 999999
|
||||
|
||||
# Show welcome message to pwuser
|
||||
COPY --chown=pwuser ./pwuser_bashrc /home/pwuser/.bashrc
|
||||
|
||||
USER pwuser
|
||||
|
||||
# Install Rust
|
||||
RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y
|
||||
ENV PATH="${PATH}:/home/pwuser/.cargo/bin"
|
||||
|
||||
RUN mkdir -p /home/pwuser/.mozbuild
|
||||
RUN cd /home/pwuser && git clone --depth=1 https://github.com/microsoft/playwright
|
||||
|
||||
WORKDIR /home/pwuser/playwright
|
||||
|
||||
|
|
@ -1,54 +0,0 @@
|
|||
FROM --platform=linux/amd64 ubuntu:20.04
|
||||
|
||||
# Reexport --build-arg as environment variables
|
||||
ARG ARG_BUILD_FLAVOR
|
||||
ARG ARG_BROWSER_NAME
|
||||
ENV BUILD_FLAVOR="${ARG_BUILD_FLAVOR}"
|
||||
ENV BROWSER_NAME="${ARG_BROWSER_NAME}"
|
||||
|
||||
# These are needed to auto-install tzdata. See https://serverfault.com/questions/949991/how-to-install-tzdata-on-a-ubuntu-docker-image
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
ARG TZ=America/Los_Angeles
|
||||
|
||||
RUN apt-get update && apt-get install -y curl \
|
||||
build-essential \
|
||||
git-core \
|
||||
zip unzip \
|
||||
tzdata \
|
||||
sudo
|
||||
# Install Python3 with distutils
|
||||
RUN apt-get install -y python3 python3-dev python3-pip python3-distutils
|
||||
|
||||
# Install Azure CLI
|
||||
RUN curl -sL https://aka.ms/InstallAzureCLIDeb | sudo bash
|
||||
|
||||
# Install node16
|
||||
RUN curl -sL https://deb.nodesource.com/setup_16.x | bash - && apt-get install -y nodejs
|
||||
|
||||
# Create the pwuser and make it passwordless sudoer.
|
||||
RUN adduser --disabled-password --gecos "" pwuser && \
|
||||
echo "ALL ALL = (ALL) NOPASSWD: ALL" >> /etc/sudoers
|
||||
|
||||
# mitigate git clone issues on CI
|
||||
# See https://stdworkflow.com/877/error-rpc-failed-curl-56-gnutls-recv-error-54-error-in-the-pull-function
|
||||
RUN git config --system user.email "devops@playwright.dev" && \
|
||||
git config --system user.name "Playwright DevOps" && \
|
||||
git config --system http.postBuffer 524288000 && \
|
||||
git config --system http.lowSpeedLimit 0 && \
|
||||
git config --system http.lowSpeedTime 999999
|
||||
|
||||
# Show welcome message to pwuser
|
||||
COPY --chown=pwuser ./pwuser_bashrc /home/pwuser/.bashrc
|
||||
|
||||
USER pwuser
|
||||
|
||||
# Install Rust
|
||||
RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y
|
||||
ENV PATH="${PATH}:/home/pwuser/.cargo/bin"
|
||||
|
||||
RUN mkdir -p /home/pwuser/.mozbuild
|
||||
RUN cd /home/pwuser && git clone --depth=1 https://github.com/microsoft/playwright
|
||||
|
||||
WORKDIR /home/pwuser/playwright
|
||||
|
||||
|
|
@ -1,62 +0,0 @@
|
|||
FROM --platform=linux/arm64 ubuntu:22.04
|
||||
|
||||
# Reexport --build-arg as environment variables
|
||||
ARG ARG_BUILD_FLAVOR
|
||||
ARG ARG_BROWSER_NAME
|
||||
ENV BUILD_FLAVOR="${ARG_BUILD_FLAVOR}"
|
||||
ENV BROWSER_NAME="${ARG_BROWSER_NAME}"
|
||||
|
||||
# These are needed to auto-install tzdata. See https://serverfault.com/questions/949991/how-to-install-tzdata-on-a-ubuntu-docker-image
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
ARG TZ=America/Los_Angeles
|
||||
|
||||
RUN apt-get update && apt-get install -y curl \
|
||||
build-essential \
|
||||
git-core \
|
||||
zip unzip \
|
||||
tzdata \
|
||||
sudo
|
||||
|
||||
# Ubuntu 22.04 aarch64 specific: default to clang-14.
|
||||
RUN apt-get install -y clang-14
|
||||
ENV CC=/usr/bin/clang-14
|
||||
ENV CXX=/usr/bin/clang++-14
|
||||
|
||||
# Install Python3 with distutils
|
||||
RUN apt-get install -y python3 python3-dev python3-pip python3-distutils
|
||||
|
||||
# Install AZ CLI with Python since they do not ship
|
||||
# aarch64 to APT: https://github.com/Azure/azure-cli/issues/7368
|
||||
# Pin so future releases do not break us.
|
||||
RUN pip3 install azure-cli==2.38.0
|
||||
|
||||
# Install node16
|
||||
RUN curl -sL https://deb.nodesource.com/setup_16.x | bash - && apt-get install -y nodejs
|
||||
|
||||
# Create the pwuser and make it passwordless sudoer.
|
||||
RUN adduser --disabled-password --gecos "" pwuser && \
|
||||
echo "ALL ALL = (ALL) NOPASSWD: ALL" >> /etc/sudoers
|
||||
|
||||
# mitigate git clone issues on CI
|
||||
# See https://stdworkflow.com/877/error-rpc-failed-curl-56-gnutls-recv-error-54-error-in-the-pull-function
|
||||
RUN git config --system user.email "devops@playwright.dev" && \
|
||||
git config --system user.name "Playwright DevOps" && \
|
||||
git config --system http.postBuffer 524288000 && \
|
||||
git config --system http.lowSpeedLimit 0 && \
|
||||
git config --system http.lowSpeedTime 999999
|
||||
|
||||
# Show welcome message to pwuser
|
||||
COPY --chown=pwuser ./pwuser_bashrc /home/pwuser/.bashrc
|
||||
|
||||
USER pwuser
|
||||
|
||||
# Install Rust
|
||||
RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y
|
||||
ENV PATH="${PATH}:/home/pwuser/.cargo/bin"
|
||||
|
||||
RUN mkdir -p /home/pwuser/.mozbuild
|
||||
RUN cd /home/pwuser && git clone --depth=1 https://github.com/microsoft/playwright
|
||||
|
||||
WORKDIR /home/pwuser/playwright
|
||||
|
||||
|
|
@ -1,54 +0,0 @@
|
|||
FROM --platform=linux/amd64 ubuntu:22.04
|
||||
|
||||
# Reexport --build-arg as environment variables
|
||||
ARG ARG_BUILD_FLAVOR
|
||||
ARG ARG_BROWSER_NAME
|
||||
ENV BUILD_FLAVOR="${ARG_BUILD_FLAVOR}"
|
||||
ENV BROWSER_NAME="${ARG_BROWSER_NAME}"
|
||||
|
||||
# These are needed to auto-install tzdata. See https://serverfault.com/questions/949991/how-to-install-tzdata-on-a-ubuntu-docker-image
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
ARG TZ=America/Los_Angeles
|
||||
|
||||
RUN apt-get update && apt-get install -y curl \
|
||||
build-essential \
|
||||
git-core \
|
||||
zip unzip \
|
||||
tzdata \
|
||||
sudo
|
||||
# Install Python3 with distutils
|
||||
RUN apt-get install -y python3 python3-dev python3-pip python3-distutils
|
||||
|
||||
# Install Azure CLI
|
||||
RUN curl -sL https://aka.ms/InstallAzureCLIDeb | sudo bash
|
||||
|
||||
# Install node16
|
||||
RUN curl -sL https://deb.nodesource.com/setup_16.x | bash - && apt-get install -y nodejs
|
||||
|
||||
# Create the pwuser and make it passwordless sudoer.
|
||||
RUN adduser --disabled-password --gecos "" pwuser && \
|
||||
echo "ALL ALL = (ALL) NOPASSWD: ALL" >> /etc/sudoers
|
||||
|
||||
# mitigate git clone issues on CI
|
||||
# See https://stdworkflow.com/877/error-rpc-failed-curl-56-gnutls-recv-error-54-error-in-the-pull-function
|
||||
RUN git config --system user.email "devops@playwright.dev" && \
|
||||
git config --system user.name "Playwright DevOps" && \
|
||||
git config --system http.postBuffer 524288000 && \
|
||||
git config --system http.lowSpeedLimit 0 && \
|
||||
git config --system http.lowSpeedTime 999999
|
||||
|
||||
# Show welcome message to pwuser
|
||||
COPY --chown=pwuser ./pwuser_bashrc /home/pwuser/.bashrc
|
||||
|
||||
USER pwuser
|
||||
|
||||
# Install Rust
|
||||
RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y
|
||||
ENV PATH="${PATH}:/home/pwuser/.cargo/bin"
|
||||
|
||||
RUN mkdir -p /home/pwuser/.mozbuild
|
||||
RUN cd /home/pwuser && git clone --depth=1 https://github.com/microsoft/playwright
|
||||
|
||||
WORKDIR /home/pwuser/playwright
|
||||
|
||||
|
|
@ -1,29 +0,0 @@
|
|||
source /etc/os-release
|
||||
if [[ -z "${BUILD_FLAVOR}" ]]; then
|
||||
BUILD_FLAVOR='<unknown>'
|
||||
fi
|
||||
if [[ -z "${BROWSER_NAME}" ]]; then
|
||||
BROWSER_NAME='<browser_name>'
|
||||
fi
|
||||
|
||||
echo "======================================================================"
|
||||
echo "Welcome to the ${BUILD_FLAVOR} environment!"
|
||||
echo "- distro: ${PRETTY_NAME}"
|
||||
echo "- arch: $(arch)"
|
||||
|
||||
if [[ -n "${CXX}" ]]; then
|
||||
echo "- CXX: ${CXX}"
|
||||
fi
|
||||
if [[ -n "${CC}" ]]; then
|
||||
echo "- CC: ${CC}"
|
||||
fi
|
||||
|
||||
echo
|
||||
echo "NOTE: Playwright clone is shallow (has no git history); to unshallow, run:"
|
||||
echo " git fetch --unshallow"
|
||||
echo
|
||||
echo "To get started, prepare your browser checkout:"
|
||||
echo " CI=1 ./browser_patches/prepare_checkout.sh ${BROWSER_NAME}"
|
||||
echo
|
||||
echo "======================================================================"
|
||||
|
|
@ -1,48 +0,0 @@
|
|||
FROM --platform=linux/amd64 debian:11
|
||||
|
||||
# Reexport --build-arg as environment variables
|
||||
ARG ARG_BUILD_FLAVOR
|
||||
ARG ARG_BROWSER_NAME
|
||||
ENV BUILD_FLAVOR="${ARG_BUILD_FLAVOR}"
|
||||
ENV BROWSER_NAME="${ARG_BROWSER_NAME}"
|
||||
|
||||
# These are needed to auto-install tzdata. See https://serverfault.com/questions/949991/how-to-install-tzdata-on-a-ubuntu-docker-image
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
ARG TZ=America/Los_Angeles
|
||||
|
||||
# Debian 11 specific: add contrib & non-free repositories.
|
||||
RUN echo "deb http://ftp.us.debian.org/debian bullseye main contrib non-free" >> /etc/apt/sources.list.d/pwbuild.list
|
||||
|
||||
RUN apt-get update && apt-get install -y curl \
|
||||
build-essential \
|
||||
git-core \
|
||||
zip unzip \
|
||||
tzdata \
|
||||
sudo
|
||||
|
||||
# Install Azure CLI
|
||||
RUN curl -sL https://aka.ms/InstallAzureCLIDeb | sudo bash
|
||||
|
||||
# Install node16
|
||||
RUN curl -sL https://deb.nodesource.com/setup_16.x | bash - && apt-get install -y nodejs
|
||||
|
||||
# Create the pwuser and make it passwordless sudoer.
|
||||
RUN adduser --disabled-password --gecos "" pwuser && \
|
||||
echo "ALL ALL = (ALL) NOPASSWD: ALL" >> /etc/sudoers
|
||||
|
||||
# mitigate git clone issues on CI
|
||||
# See https://stdworkflow.com/877/error-rpc-failed-curl-56-gnutls-recv-error-54-error-in-the-pull-function
|
||||
RUN git config --system user.email "devops@playwright.dev" && \
|
||||
git config --system user.name "Playwright DevOps" && \
|
||||
git config --system http.postBuffer 524288000 && \
|
||||
git config --system http.lowSpeedLimit 0 && \
|
||||
git config --system http.lowSpeedTime 999999
|
||||
|
||||
# Show welcome message
|
||||
COPY ./pwuser_bashrc /home/pwuser/.bashrc
|
||||
|
||||
USER pwuser
|
||||
RUN cd /home/pwuser && git clone --depth=1 https://github.com/microsoft/playwright
|
||||
|
||||
WORKDIR /home/pwuser/playwright
|
||||
|
|
@ -1,67 +0,0 @@
|
|||
FROM --platform=linux/amd64 ubuntu:18.04
|
||||
|
||||
# Reexport --build-arg as environment variables
|
||||
ARG ARG_BUILD_FLAVOR
|
||||
ARG ARG_BROWSER_NAME
|
||||
ENV BUILD_FLAVOR="${ARG_BUILD_FLAVOR}"
|
||||
ENV BROWSER_NAME="${ARG_BROWSER_NAME}"
|
||||
|
||||
# These are needed to auto-install tzdata. See https://serverfault.com/questions/949991/how-to-install-tzdata-on-a-ubuntu-docker-image
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
ARG TZ=America/Los_Angeles
|
||||
|
||||
RUN apt-get update && apt-get install -y curl \
|
||||
build-essential \
|
||||
git-core \
|
||||
zip unzip \
|
||||
tzdata \
|
||||
sudo
|
||||
|
||||
# Ubuntu 18.04 specific: update CMake. Default CMake on Ubuntu 18.04 is 3.10, whereas WebKit requires 3.12+.
|
||||
RUN apt purge --auto-remove cmake && \
|
||||
apt-get install -y wget software-properties-common && \
|
||||
wget -O - https://apt.kitware.com/keys/kitware-archive-latest.asc 2>/dev/null | gpg --dearmor - | sudo tee /etc/apt/trusted.gpg.d/kitware.gpg >/dev/null && \
|
||||
apt-add-repository "deb https://apt.kitware.com/ubuntu/ bionic main" && \
|
||||
apt-get update && apt-get install -y cmake
|
||||
|
||||
# Ubuntu 18.04 specific: default to gcc-9.
|
||||
RUN add-apt-repository ppa:ubuntu-toolchain-r/test && \
|
||||
apt-get update && \
|
||||
apt-get install -y gcc-9 g++-9
|
||||
ENV CC=/usr/bin/gcc-9
|
||||
ENV CXX=/usr/bin/g++-9
|
||||
|
||||
# Install Python3 with distutils
|
||||
RUN apt-get install -y python3.8 python3.8-dev python3.8-distutils && \
|
||||
# Point python3 to python3.8
|
||||
update-alternatives --install /usr/bin/python3 python3 /usr/bin/python3.8 2 && \
|
||||
curl -sSL https://bootstrap.pypa.io/get-pip.py -o get-pip.py && \
|
||||
python3 get-pip.py && \
|
||||
rm get-pip.py
|
||||
|
||||
# Install Azure CLI
|
||||
RUN curl -sL https://aka.ms/InstallAzureCLIDeb | sudo bash
|
||||
|
||||
# Install node16
|
||||
RUN curl -sL https://deb.nodesource.com/setup_16.x | bash - && apt-get install -y nodejs
|
||||
|
||||
# Create the pwuser and make it passwordless sudoer.
|
||||
RUN adduser --disabled-password --gecos "" pwuser && \
|
||||
echo "ALL ALL = (ALL) NOPASSWD: ALL" >> /etc/sudoers
|
||||
|
||||
# mitigate git clone issues on CI
|
||||
# See https://stdworkflow.com/877/error-rpc-failed-curl-56-gnutls-recv-error-54-error-in-the-pull-function
|
||||
RUN git config --system user.email "devops@playwright.dev" && \
|
||||
git config --system user.name "Playwright DevOps" && \
|
||||
git config --system http.postBuffer 524288000 && \
|
||||
git config --system http.lowSpeedLimit 0 && \
|
||||
git config --system http.lowSpeedTime 999999
|
||||
|
||||
# Show welcome message
|
||||
COPY ./pwuser_bashrc /home/pwuser/.bashrc
|
||||
|
||||
USER pwuser
|
||||
RUN cd /home/pwuser && git clone --depth=1 https://github.com/microsoft/playwright
|
||||
|
||||
WORKDIR /home/pwuser/playwright
|
||||
|
|
@ -1,56 +0,0 @@
|
|||
FROM --platform=linux/arm64 ubuntu:20.04
|
||||
|
||||
# Reexport --build-arg as environment variables
|
||||
ARG ARG_BUILD_FLAVOR
|
||||
ARG ARG_BROWSER_NAME
|
||||
ENV BUILD_FLAVOR="${ARG_BUILD_FLAVOR}"
|
||||
ENV BROWSER_NAME="${ARG_BROWSER_NAME}"
|
||||
|
||||
# These are needed to auto-install tzdata. See https://serverfault.com/questions/949991/how-to-install-tzdata-on-a-ubuntu-docker-image
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
ARG TZ=America/Los_Angeles
|
||||
|
||||
RUN apt-get update && apt-get install -y curl \
|
||||
build-essential \
|
||||
git-core \
|
||||
zip unzip \
|
||||
tzdata \
|
||||
sudo
|
||||
|
||||
# Ubuntu 20.04 aarch64 specific: default to clang-12.
|
||||
RUN apt-get install -y clang-12
|
||||
ENV CC=/usr/bin/clang-12
|
||||
ENV CXX=/usr/bin/clang++-12
|
||||
|
||||
# Install AZ CLI with Python since they do not ship
|
||||
# aarch64 to APT: https://github.com/Azure/azure-cli/issues/7368
|
||||
# Pin so future releases do not break us.
|
||||
RUN apt-get install -y python3 \
|
||||
python3-dev \
|
||||
python3-pip \
|
||||
python3-distutils && \
|
||||
pip3 install azure-cli==2.38.0
|
||||
|
||||
# Install node16
|
||||
RUN curl -sL https://deb.nodesource.com/setup_16.x | bash - && apt-get install -y nodejs
|
||||
|
||||
# Create the pwuser and make it passwordless sudoer.
|
||||
RUN adduser --disabled-password --gecos "" pwuser && \
|
||||
echo "ALL ALL = (ALL) NOPASSWD: ALL" >> /etc/sudoers
|
||||
|
||||
# mitigate git clone issues on CI
|
||||
# See https://stdworkflow.com/877/error-rpc-failed-curl-56-gnutls-recv-error-54-error-in-the-pull-function
|
||||
RUN git config --system user.email "devops@playwright.dev" && \
|
||||
git config --system user.name "Playwright DevOps" && \
|
||||
git config --system http.postBuffer 524288000 && \
|
||||
git config --system http.lowSpeedLimit 0 && \
|
||||
git config --system http.lowSpeedTime 999999
|
||||
|
||||
# Show welcome message
|
||||
COPY ./pwuser_bashrc /home/pwuser/.bashrc
|
||||
|
||||
USER pwuser
|
||||
RUN cd /home/pwuser && git clone --depth=1 https://github.com/microsoft/playwright
|
||||
|
||||
WORKDIR /home/pwuser/playwright
|
||||
|
|
@ -1,45 +0,0 @@
|
|||
FROM --platform=linux/amd64 ubuntu:20.04
|
||||
|
||||
# Reexport --build-arg as environment variables
|
||||
ARG ARG_BUILD_FLAVOR
|
||||
ARG ARG_BROWSER_NAME
|
||||
ENV BUILD_FLAVOR="${ARG_BUILD_FLAVOR}"
|
||||
ENV BROWSER_NAME="${ARG_BROWSER_NAME}"
|
||||
|
||||
# These are needed to auto-install tzdata. See https://serverfault.com/questions/949991/how-to-install-tzdata-on-a-ubuntu-docker-image
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
ARG TZ=America/Los_Angeles
|
||||
|
||||
RUN apt-get update && apt-get install -y curl \
|
||||
build-essential \
|
||||
git-core \
|
||||
zip unzip \
|
||||
tzdata \
|
||||
sudo
|
||||
|
||||
# Install Azure CLI
|
||||
RUN curl -sL https://aka.ms/InstallAzureCLIDeb | sudo bash
|
||||
|
||||
# Install node16
|
||||
RUN curl -sL https://deb.nodesource.com/setup_16.x | bash - && apt-get install -y nodejs
|
||||
|
||||
# Create the pwuser and make it passwordless sudoer.
|
||||
RUN adduser --disabled-password --gecos "" pwuser && \
|
||||
echo "ALL ALL = (ALL) NOPASSWD: ALL" >> /etc/sudoers
|
||||
|
||||
# mitigate git clone issues on CI
|
||||
# See https://stdworkflow.com/877/error-rpc-failed-curl-56-gnutls-recv-error-54-error-in-the-pull-function
|
||||
RUN git config --system user.email "devops@playwright.dev" && \
|
||||
git config --system user.name "Playwright DevOps" && \
|
||||
git config --system http.postBuffer 524288000 && \
|
||||
git config --system http.lowSpeedLimit 0 && \
|
||||
git config --system http.lowSpeedTime 999999
|
||||
|
||||
# Show welcome message
|
||||
COPY ./pwuser_bashrc /home/pwuser/.bashrc
|
||||
|
||||
USER pwuser
|
||||
RUN cd /home/pwuser && git clone --depth=1 https://github.com/microsoft/playwright
|
||||
|
||||
WORKDIR /home/pwuser/playwright
|
||||
|
|
@ -1,56 +0,0 @@
|
|||
FROM --platform=linux/arm64 ubuntu:22.04
|
||||
|
||||
# Reexport --build-arg as environment variables
|
||||
ARG ARG_BUILD_FLAVOR
|
||||
ARG ARG_BROWSER_NAME
|
||||
ENV BUILD_FLAVOR="${ARG_BUILD_FLAVOR}"
|
||||
ENV BROWSER_NAME="${ARG_BROWSER_NAME}"
|
||||
|
||||
# These are needed to auto-install tzdata. See https://serverfault.com/questions/949991/how-to-install-tzdata-on-a-ubuntu-docker-image
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
ARG TZ=America/Los_Angeles
|
||||
|
||||
RUN apt-get update && apt-get install -y curl \
|
||||
build-essential \
|
||||
git-core \
|
||||
zip unzip \
|
||||
tzdata \
|
||||
sudo
|
||||
|
||||
# Ubuntu 22.04 aarch64 specific: default to clang-12.
|
||||
RUN apt-get install -y clang-12
|
||||
ENV CC=/usr/bin/clang-12
|
||||
ENV CXX=/usr/bin/clang++-12
|
||||
|
||||
# Install AZ CLI with Python since they do not ship
|
||||
# aarch64 to APT: https://github.com/Azure/azure-cli/issues/7368
|
||||
# Pin so future releases do not break us.
|
||||
RUN apt-get install -y python3 \
|
||||
python3-dev \
|
||||
python3-pip \
|
||||
python3-distutils && \
|
||||
pip3 install azure-cli==2.38.0
|
||||
|
||||
# Install node16
|
||||
RUN curl -sL https://deb.nodesource.com/setup_16.x | bash - && apt-get install -y nodejs
|
||||
|
||||
# Create the pwuser and make it passwordless sudoer.
|
||||
RUN adduser --disabled-password --gecos "" pwuser && \
|
||||
echo "ALL ALL = (ALL) NOPASSWD: ALL" >> /etc/sudoers
|
||||
|
||||
# mitigate git clone issues on CI
|
||||
# See https://stdworkflow.com/877/error-rpc-failed-curl-56-gnutls-recv-error-54-error-in-the-pull-function
|
||||
RUN git config --system user.email "devops@playwright.dev" && \
|
||||
git config --system user.name "Playwright DevOps" && \
|
||||
git config --system http.postBuffer 524288000 && \
|
||||
git config --system http.lowSpeedLimit 0 && \
|
||||
git config --system http.lowSpeedTime 999999
|
||||
|
||||
# Show welcome message
|
||||
COPY ./pwuser_bashrc /home/pwuser/.bashrc
|
||||
|
||||
USER pwuser
|
||||
RUN cd /home/pwuser && git clone --depth=1 https://github.com/microsoft/playwright
|
||||
|
||||
WORKDIR /home/pwuser/playwright
|
||||
|
|
@ -1,45 +0,0 @@
|
|||
FROM --platform=linux/amd64 ubuntu:22.04
|
||||
|
||||
# Reexport --build-arg as environment variables
|
||||
ARG ARG_BUILD_FLAVOR
|
||||
ARG ARG_BROWSER_NAME
|
||||
ENV BUILD_FLAVOR="${ARG_BUILD_FLAVOR}"
|
||||
ENV BROWSER_NAME="${ARG_BROWSER_NAME}"
|
||||
|
||||
# These are needed to auto-install tzdata. See https://serverfault.com/questions/949991/how-to-install-tzdata-on-a-ubuntu-docker-image
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
ARG TZ=America/Los_Angeles
|
||||
|
||||
RUN apt-get update && apt-get install -y curl \
|
||||
build-essential \
|
||||
git-core \
|
||||
zip unzip \
|
||||
tzdata \
|
||||
sudo
|
||||
|
||||
# Install Azure CLI
|
||||
RUN curl -sL https://aka.ms/InstallAzureCLIDeb | sudo bash
|
||||
|
||||
# Install node16
|
||||
RUN curl -sL https://deb.nodesource.com/setup_16.x | bash - && apt-get install -y nodejs
|
||||
|
||||
# Create the pwuser and make it passwordless sudoer.
|
||||
RUN adduser --disabled-password --gecos "" pwuser && \
|
||||
echo "ALL ALL = (ALL) NOPASSWD: ALL" >> /etc/sudoers
|
||||
|
||||
# mitigate git clone issues on CI
|
||||
# See https://stdworkflow.com/877/error-rpc-failed-curl-56-gnutls-recv-error-54-error-in-the-pull-function
|
||||
RUN git config --system user.email "devops@playwright.dev" && \
|
||||
git config --system user.name "Playwright DevOps" && \
|
||||
git config --system http.postBuffer 524288000 && \
|
||||
git config --system http.lowSpeedLimit 0 && \
|
||||
git config --system http.lowSpeedTime 999999
|
||||
|
||||
# Show welcome message
|
||||
COPY ./pwuser_bashrc /home/pwuser/.bashrc
|
||||
|
||||
USER pwuser
|
||||
RUN cd /home/pwuser && git clone --depth=1 https://github.com/microsoft/playwright
|
||||
|
||||
WORKDIR /home/pwuser/playwright
|
||||
|
|
@ -1,177 +0,0 @@
|
|||
#!/bin/bash
|
||||
set -e
|
||||
set +x
|
||||
|
||||
trap "cd $(pwd -P)" EXIT
|
||||
cd "$(dirname "$0")"
|
||||
|
||||
REMOTE_BROWSER_UPSTREAM="browser_upstream"
|
||||
BUILD_BRANCH="playwright-build"
|
||||
|
||||
# COLORS
|
||||
RED=$'\e[1;31m'
|
||||
GRN=$'\e[1;32m'
|
||||
YEL=$'\e[1;33m'
|
||||
END=$'\e[0m'
|
||||
|
||||
if [[ ($1 == '--help') || ($1 == '-h') ]]; then
|
||||
echo "usage: export.sh [firefox|webkit] [custom_checkout_path]"
|
||||
echo
|
||||
echo "Exports patch from the current branch of the checkout to browser folder."
|
||||
echo "The checkout has to be 'prepared', meaning that 'prepare_checkout.sh' should be"
|
||||
echo "run against it first."
|
||||
echo
|
||||
echo "You can optionally specify custom_checkout_path if you have browser checkout somewhere else"
|
||||
echo "and wish to export patches from it."
|
||||
echo
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if [[ $# == 0 ]]; then
|
||||
echo "missing browser: 'firefox' or 'webkit'"
|
||||
echo "try './export.sh --help' for more information"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# FRIENDLY_CHECKOUT_PATH is used only for logging.
|
||||
FRIENDLY_CHECKOUT_PATH="";
|
||||
BUILD_NUMBER_UPSTREAM_URL=""
|
||||
CHECKOUT_PATH=""
|
||||
EXPORT_PATH=""
|
||||
EXTRA_FOLDER_PW_PATH=""
|
||||
EXTRA_FOLDER_CHECKOUT_RELPATH=""
|
||||
if [[ ("$1" == "firefox") || ("$1" == "firefox/") || ("$1" == "ff") ]]; then
|
||||
if [[ -z "${FF_CHECKOUT_PATH}" ]]; then
|
||||
FRIENDLY_CHECKOUT_PATH='$HOME/firefox';
|
||||
CHECKOUT_PATH="$HOME/firefox"
|
||||
else
|
||||
echo "WARNING: using checkout path from FF_CHECKOUT_PATH env: ${FF_CHECKOUT_PATH}"
|
||||
CHECKOUT_PATH="${FF_CHECKOUT_PATH}"
|
||||
FRIENDLY_CHECKOUT_PATH="<FF_CHECKOUT_PATH>"
|
||||
fi
|
||||
|
||||
EXTRA_FOLDER_PW_PATH="$PWD/firefox/juggler"
|
||||
EXTRA_FOLDER_CHECKOUT_RELPATH="juggler"
|
||||
EXPORT_PATH="$PWD/firefox"
|
||||
BUILD_NUMBER_UPSTREAM_URL="https://raw.githubusercontent.com/microsoft/playwright/main/browser_patches/firefox/BUILD_NUMBER"
|
||||
source "./firefox/UPSTREAM_CONFIG.sh"
|
||||
elif [[ ("$1" == "firefox-beta") || ("$1" == "ff-beta") ]]; then
|
||||
if [[ -z "${FF_CHECKOUT_PATH}" ]]; then
|
||||
FRIENDLY_CHECKOUT_PATH='$HOME/firefox';
|
||||
CHECKOUT_PATH="$HOME/firefox"
|
||||
else
|
||||
echo "WARNING: using checkout path from FF_CHECKOUT_PATH env: ${FF_CHECKOUT_PATH}"
|
||||
CHECKOUT_PATH="${FF_CHECKOUT_PATH}"
|
||||
FRIENDLY_CHECKOUT_PATH="<FF_CHECKOUT_PATH>"
|
||||
fi
|
||||
|
||||
EXTRA_FOLDER_PW_PATH="$PWD/firefox-beta/juggler"
|
||||
EXTRA_FOLDER_CHECKOUT_RELPATH="juggler"
|
||||
EXPORT_PATH="$PWD/firefox-beta"
|
||||
BUILD_NUMBER_UPSTREAM_URL="https://raw.githubusercontent.com/microsoft/playwright/main/browser_patches/firefox-beta/BUILD_NUMBER"
|
||||
source "./firefox-beta/UPSTREAM_CONFIG.sh"
|
||||
elif [[ ("$1" == "webkit") || ("$1" == "webkit/") || ("$1" == "wk") ]]; then
|
||||
if [[ -z "${WK_CHECKOUT_PATH}" ]]; then
|
||||
FRIENDLY_CHECKOUT_PATH='$HOME/webkit';
|
||||
CHECKOUT_PATH="$HOME/webkit"
|
||||
else
|
||||
echo "WARNING: using checkout path from WK_CHECKOUT_PATH env: ${WK_CHECKOUT_PATH}"
|
||||
CHECKOUT_PATH="${WK_CHECKOUT_PATH}"
|
||||
FRIENDLY_CHECKOUT_PATH="<WK_CHECKOUT_PATH>"
|
||||
fi
|
||||
|
||||
EXTRA_FOLDER_PW_PATH="$PWD/webkit/embedder/Playwright"
|
||||
EXTRA_FOLDER_CHECKOUT_RELPATH="Tools/Playwright"
|
||||
EXPORT_PATH="$PWD/webkit"
|
||||
BUILD_NUMBER_UPSTREAM_URL="https://raw.githubusercontent.com/microsoft/playwright/main/browser_patches/webkit/BUILD_NUMBER"
|
||||
source "./webkit/UPSTREAM_CONFIG.sh"
|
||||
else
|
||||
echo ERROR: unknown browser to export - "$1"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# we will use this just for beauty.
|
||||
if [[ $# == 2 ]]; then
|
||||
echo "WARNING: using custom checkout path $2"
|
||||
CHECKOUT_PATH=$2
|
||||
FRIENDLY_CHECKOUT_PATH="<custom_checkout ( $2 )>"
|
||||
fi
|
||||
|
||||
# if there's no checkout folder - bail out.
|
||||
if ! [[ -d $CHECKOUT_PATH ]]; then
|
||||
echo "ERROR: $FRIENDLY_CHECKOUT_PATH is missing - nothing to export."
|
||||
exit 1;
|
||||
else
|
||||
echo "-- checking $FRIENDLY_CHECKOUT_PATH exists - OK"
|
||||
fi
|
||||
|
||||
# if folder exists but not a git repository - bail out.
|
||||
if ! [[ -d $CHECKOUT_PATH/.git ]]; then
|
||||
echo "ERROR: $FRIENDLY_CHECKOUT_PATH is not a git repository! Nothing to export."
|
||||
exit 1
|
||||
else
|
||||
echo "-- checking $FRIENDLY_CHECKOUT_PATH is a git repo - OK"
|
||||
fi
|
||||
|
||||
# Switch to git repository.
|
||||
cd "$CHECKOUT_PATH"
|
||||
|
||||
# Setting up |$REMOTE_BROWSER_UPSTREAM| remote and fetch the $BASE_BRANCH
|
||||
if git remote get-url $REMOTE_BROWSER_UPSTREAM >/dev/null; then
|
||||
if ! [[ $(git config --get remote.$REMOTE_BROWSER_UPSTREAM.url || echo "") == "$REMOTE_URL" ]]; then
|
||||
echo "ERROR: remote $REMOTE_BROWSER_UPSTREAM is not pointing to '$REMOTE_URL'! run 'prepare_checkout.sh' first"
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
echo "ERROR: checkout does not have $REMOTE_BROWSER_UPSTREAM; run 'prepare_checkout.sh' first"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Check if git repo is dirty.
|
||||
if [[ -n $(git status -s --untracked-files=no) ]]; then
|
||||
echo "ERROR: $FRIENDLY_CHECKOUT_PATH has dirty GIT state - aborting export."
|
||||
exit 1
|
||||
else
|
||||
echo "-- checking $FRIENDLY_CHECKOUT_PATH is clean - OK"
|
||||
fi
|
||||
|
||||
PATCH_NAME=$(ls -1 "$EXPORT_PATH"/patches)
|
||||
if [[ -z "$PATCH_NAME" ]]; then
|
||||
PATCH_NAME="bootstrap.diff"
|
||||
OLD_DIFF=""
|
||||
else
|
||||
OLD_DIFF=$(cat "$EXPORT_PATH"/patches/$PATCH_NAME)
|
||||
fi
|
||||
|
||||
CURRENT_BRANCH=$(git rev-parse --abbrev-ref HEAD)
|
||||
NEW_BASE_REVISION=$(git merge-base $REMOTE_BROWSER_UPSTREAM/"$BASE_BRANCH" "$CURRENT_BRANCH")
|
||||
NEW_DIFF=$(git diff --diff-algorithm=myers --full-index "$NEW_BASE_REVISION" "$CURRENT_BRANCH" -- . ":!${EXTRA_FOLDER_CHECKOUT_RELPATH}")
|
||||
|
||||
# Increment BUILD_NUMBER
|
||||
BUILD_NUMBER=$(curl ${BUILD_NUMBER_UPSTREAM_URL} | head -1)
|
||||
BUILD_NUMBER=$((BUILD_NUMBER+1))
|
||||
|
||||
echo "REMOTE_URL=\"$REMOTE_URL\"
|
||||
BASE_BRANCH=\"$BASE_BRANCH\"
|
||||
BASE_REVISION=\"$NEW_BASE_REVISION\"" > "$EXPORT_PATH"/UPSTREAM_CONFIG.sh
|
||||
echo "$NEW_DIFF" > "$EXPORT_PATH"/patches/$PATCH_NAME
|
||||
echo $BUILD_NUMBER > "$EXPORT_PATH"/BUILD_NUMBER
|
||||
echo "Changed: $(git config user.email) $(date)" >> "$EXPORT_PATH"/BUILD_NUMBER
|
||||
|
||||
echo "-- exporting standalone folder"
|
||||
rm -rf "${EXTRA_FOLDER_PW_PATH}"
|
||||
mkdir -p $(dirname "${EXTRA_FOLDER_PW_PATH}")
|
||||
cp -r "${EXTRA_FOLDER_CHECKOUT_RELPATH}" "${EXTRA_FOLDER_PW_PATH}"
|
||||
|
||||
NEW_BASE_REVISION_TEXT="$NEW_BASE_REVISION (not changed)"
|
||||
if [[ "$NEW_BASE_REVISION" != "$BASE_REVISION" ]]; then
|
||||
NEW_BASE_REVISION_TEXT="$YEL$NEW_BASE_REVISION (changed)$END"
|
||||
fi
|
||||
|
||||
echo "=============================================================="
|
||||
echo " Repository: $FRIENDLY_CHECKOUT_PATH"
|
||||
echo " Changes between branches: $REMOTE_BROWSER_UPSTREAM/$BASE_BRANCH..$CURRENT_BRANCH"
|
||||
echo " BASE_REVISION: $NEW_BASE_REVISION_TEXT"
|
||||
echo " BUILD_NUMBER: $YEL$BUILD_NUMBER (changed)$END"
|
||||
echo "=============================================================="
|
||||
echo
|
|
@ -1,2 +0,0 @@
|
|||
build/
|
||||
output/
|
|
@ -1 +0,0 @@
|
|||
1007
|
|
@ -1,53 +0,0 @@
|
|||
# Copyright (c) Microsoft Corporation.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the 'License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
ZLIB_VERSION="v1.2.11"
|
||||
ZLIB_CONFIG="--static"
|
||||
|
||||
LIBVPX_VERSION="v1.9.0"
|
||||
LIBVPX_CONFIG="--enable-static \
|
||||
--disable-shared \
|
||||
--disable-docs \
|
||||
--disable-tools \
|
||||
--disable-unit-tests \
|
||||
--disable-examples"
|
||||
|
||||
FFMPEG_VERSION="n4.3.1"
|
||||
FFMPEG_CONFIG="--extra-version=playwright-build-$(cat ./BUILD_NUMBER | head -1) \
|
||||
--disable-debug \
|
||||
--disable-autodetect \
|
||||
--disable-everything \
|
||||
--enable-ffmpeg \
|
||||
--enable-protocol=pipe \
|
||||
--enable-protocol=file \
|
||||
--enable-parser=mjpeg \
|
||||
--enable-decoder=mjpeg \
|
||||
--enable-demuxer=image2pipe \
|
||||
--enable-filter=pad \
|
||||
--enable-filter=crop \
|
||||
--enable-filter=scale \
|
||||
--enable-muxer=webm \
|
||||
--enable-libvpx \
|
||||
--enable-static \
|
||||
--enable-encoder=libvpx_vp8 \
|
||||
--enable-decoder=libvpx_vp8 \
|
||||
--enable-demuxer=matroska \
|
||||
--enable-encoder=png \
|
||||
--enable-zlib \
|
||||
--enable-muxer=image2 \
|
||||
--disable-pthreads \
|
||||
--disable-iconv \
|
||||
--disable-w32threads \
|
||||
--disable-bzlib"
|
||||
|
|
@ -1,6 +0,0 @@
|
|||
ffmpeg-mac.zip
|
||||
ffmpeg-mac-arm64.zip
|
||||
ffmpeg-linux.zip
|
||||
ffmpeg-linux-arm64.zip
|
||||
ffmpeg-win64.zip
|
||||
|
|
@ -1,57 +0,0 @@
|
|||
# Playwright and FFMPEG
|
||||
|
||||
Playwright requires FFMPEG to produce screncast and bundles FFMPEG binaries for Mac , Linux and Windows.
|
||||
|
||||
## Configuration
|
||||
|
||||
We compile `libvpx` and `ffmpeg` only. Their source versions and build
|
||||
configurations are defined in [`//browser_patches/ffmpeg/CONFIG.sh`](./CONFIG.sh).
|
||||
|
||||
## Building `ffmpeg-linux`
|
||||
|
||||
Compilation scripts are based on:
|
||||
- https://trac.ffmpeg.org/wiki/CompilationGuide/Generic
|
||||
|
||||
Prerequisites:
|
||||
- Mac or Linux
|
||||
- Docker
|
||||
|
||||
Building:
|
||||
|
||||
```
|
||||
~/playwright$ ./browser_patches/ffmpeg/build.sh --linux
|
||||
```
|
||||
|
||||
## Building `ffmpeg-mac`
|
||||
|
||||
Compilation scripts are based on:
|
||||
- https://trac.ffmpeg.org/wiki/CompilationGuide/Generic
|
||||
- https://trac.ffmpeg.org/wiki/CompilationGuide/macOS
|
||||
|
||||
Prerequisites:
|
||||
- Mac
|
||||
- xcode command line tools: `xcode-select --install`
|
||||
- [homebrew](https://brew.sh/)
|
||||
|
||||
Building:
|
||||
|
||||
```
|
||||
~/playwright$ ./browser_patches/ffmpeg/build.sh --mac
|
||||
```
|
||||
|
||||
## Building `ffmpeg-win*`
|
||||
|
||||
Cross-compilation scripts are based on:
|
||||
- https://trac.ffmpeg.org/wiki/CompilationGuide/Generic
|
||||
- https://trac.ffmpeg.org/wiki/CompilationGuide/CrossCompilingForWindows
|
||||
|
||||
Prerequisites:
|
||||
- Mac or Linux
|
||||
- [Docker](https://www.docker.com/)
|
||||
|
||||
Building:
|
||||
|
||||
```
|
||||
~/playwright$ ./browser_patches/ffmpeg/build.sh --cross-compile-win64
|
||||
```
|
||||
|
|
@ -1,34 +0,0 @@
|
|||
#!/bin/bash
|
||||
set -e
|
||||
set +x
|
||||
|
||||
if [[ ("$1" == "-h") || ("$1" == "--help") ]]; then
|
||||
echo "usage: $(basename $0) [output-absolute-path]"
|
||||
echo
|
||||
echo "Generate distributable .zip archive from ./output folder that was previously built."
|
||||
echo
|
||||
exit 0
|
||||
fi
|
||||
|
||||
ZIP_PATH=$1
|
||||
if [[ $ZIP_PATH != /* ]]; then
|
||||
echo "ERROR: path $ZIP_PATH is not absolute"
|
||||
exit 1
|
||||
fi
|
||||
if [[ $ZIP_PATH != *.zip ]]; then
|
||||
echo "ERROR: path $ZIP_PATH must have .zip extension"
|
||||
exit 1
|
||||
fi
|
||||
if [[ -f $ZIP_PATH ]]; then
|
||||
echo "ERROR: path $ZIP_PATH exists; can't do anything."
|
||||
exit 1
|
||||
fi
|
||||
if ! [[ -d $(dirname $ZIP_PATH) ]]; then
|
||||
echo "ERROR: folder for path $($ZIP_PATH) does not exist."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
trap "cd $(pwd -P)" EXIT
|
||||
cd "$(dirname $0)"
|
||||
|
||||
cp output/ffmpeg.zip $ZIP_PATH
|
|
@ -1,93 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
# Copyright (c) Microsoft Corporation.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the 'License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
set -ex
|
||||
|
||||
function die() { echo "$@"; exit 1; }
|
||||
|
||||
|
||||
PREFIX="${HOME}/prefix"
|
||||
|
||||
|
||||
if [[ "$(uname)" != "Linux" ]]; then
|
||||
echo "ERROR: this script is designed to be run on Linux. Can't run on $(uname)"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
output_path="$1"
|
||||
if [[ -z "${output_path}" ]]; then
|
||||
die "ERROR: output path is not specified"
|
||||
elif [[ "${output_path}" != /* ]]; then
|
||||
die "ERROR: output path ${output_path} is not absolute"
|
||||
elif ! [[ -d $(dirname "${output_path}") ]]; then
|
||||
die "ERROR: folder for output path ${output_path} does not exist."
|
||||
fi
|
||||
|
||||
function build_zlib {
|
||||
cd "${HOME}"
|
||||
git clone https://github.com/madler/zlib
|
||||
cd zlib
|
||||
git checkout "${ZLIB_VERSION}"
|
||||
./configure --prefix="${PREFIX}" ${ZLIB_CONFIG}
|
||||
make && make install
|
||||
}
|
||||
|
||||
function build_libvpx {
|
||||
cd "${HOME}"
|
||||
git clone https://chromium.googlesource.com/webm/libvpx
|
||||
cd libvpx
|
||||
git checkout "${LIBVPX_VERSION}"
|
||||
# Cross-compiling libvpx according to the docs:
|
||||
# - https://chromium.googlesource.com/webm/libvpx/+/main/README
|
||||
./configure --prefix="${PREFIX}" ${LIBVPX_CONFIG}
|
||||
make && make install
|
||||
}
|
||||
|
||||
function build_ffmpeg {
|
||||
cd "${HOME}"
|
||||
git clone git://source.ffmpeg.org/ffmpeg.git
|
||||
cd ffmpeg
|
||||
git checkout "${FFMPEG_VERSION}"
|
||||
export PKG_CONFIG_PATH="${PREFIX}/lib/pkgconfig"
|
||||
# Prohibit pkg-config from using linux system installed libs.
|
||||
export PKG_CONFIG_LIBDIR=
|
||||
|
||||
./configure --pkg-config=pkg-config \
|
||||
--pkg-config-flags="--static" \
|
||||
--extra-cflags="-I/${PREFIX}/include" \
|
||||
--extra-ldflags="-L/${PREFIX}/lib -static" \
|
||||
--prefix="${PREFIX}" \
|
||||
--bindir="${PWD}/bin" \
|
||||
${FFMPEG_CONFIG}
|
||||
make && make install
|
||||
}
|
||||
|
||||
trap "cd $(pwd -P)" EXIT
|
||||
cd "$(dirname $0)"
|
||||
|
||||
source ./CONFIG.sh
|
||||
|
||||
apt-get update
|
||||
apt-get install -y git make yasm pkg-config
|
||||
|
||||
build_zlib
|
||||
build_libvpx
|
||||
build_ffmpeg
|
||||
|
||||
# put resulting executable where we were asked to
|
||||
cp "${HOME}/ffmpeg/bin/ffmpeg" "${output_path}"
|
||||
strip "${output_path}"
|
||||
|
|
@ -1,106 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
# Copyright (c) Microsoft Corporation.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the 'License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
set -e
|
||||
|
||||
function die() { echo "$@"; exit 1; }
|
||||
|
||||
if [[ "$(uname)" != "Darwin" ]]; then
|
||||
die "ERROR: this script is designed to be run on OSX. Can't run on $(uname)"
|
||||
fi
|
||||
|
||||
trap "cd $(pwd -P)" EXIT
|
||||
cd "$(dirname $0)"
|
||||
SCRIPT_FOLDER="$(pwd -P)"
|
||||
source "${SCRIPT_FOLDER}/../utils.sh"
|
||||
|
||||
selectXcodeVersionOrDie $(node "${SCRIPT_FOLDER}/../get_xcode_version.js" ffmpeg)
|
||||
|
||||
source ./CONFIG.sh
|
||||
|
||||
BUILDDIR="${PWD}/build"
|
||||
PREFIX="${BUILDDIR}/osx_prefix"
|
||||
OUTPUT_PATH="${PWD}/output/ffmpeg-mac"
|
||||
|
||||
function build_zlib {
|
||||
cd "${BUILDDIR}"
|
||||
git clone https://github.com/madler/zlib
|
||||
cd zlib
|
||||
git checkout "${ZLIB_VERSION}"
|
||||
./configure --prefix="${PREFIX}" ${ZLIB_CONFIG}
|
||||
make && make install
|
||||
}
|
||||
|
||||
function build_libvpx {
|
||||
cd "${BUILDDIR}"
|
||||
git clone https://chromium.googlesource.com/webm/libvpx
|
||||
cd libvpx
|
||||
git checkout "${LIBVPX_VERSION}"
|
||||
# Compile libvpx according to the docs:
|
||||
# - https://chromium.googlesource.com/webm/libvpx/+/main/README
|
||||
./configure --prefix="${PREFIX}" ${LIBVPX_CONFIG}
|
||||
make && make install
|
||||
}
|
||||
|
||||
function build_ffmpeg {
|
||||
cd "${BUILDDIR}"
|
||||
git clone git://source.ffmpeg.org/ffmpeg.git
|
||||
cd ffmpeg
|
||||
git checkout "${FFMPEG_VERSION}"
|
||||
export PKG_CONFIG_PATH="${PREFIX}/lib/pkgconfig"
|
||||
# Prohibit pkg-config from using system installed libs.
|
||||
export PKG_CONFIG_LIBDIR=
|
||||
|
||||
./configure --pkg-config=pkg-config \
|
||||
--pkg-config-flags="--static" \
|
||||
--extra-cflags="-I/${PREFIX}/include" \
|
||||
--extra-ldflags="-L/${PREFIX}/lib" \
|
||||
--prefix="${PREFIX}" \
|
||||
--bindir="${PWD}/bin" \
|
||||
${FFMPEG_CONFIG}
|
||||
make && make install
|
||||
}
|
||||
|
||||
REQUIERED_BUILD_TOOLS=("git" "make" "yasm" "pkg-config")
|
||||
missing_build_tools=()
|
||||
|
||||
for dependency in ${REQUIERED_BUILD_TOOLS[@]}; do
|
||||
if ! command -v "${dependency}" >/dev/null; then
|
||||
missing_build_tools+=("${dependency}")
|
||||
fi
|
||||
done
|
||||
|
||||
if [[ ${#missing_build_tools[@]} != 0 ]]; then
|
||||
if [[ "$1" == "--full" ]]; then
|
||||
brew install ${missing_build_tools[@]}
|
||||
else
|
||||
die "ERROR: missing dependencies! Please run: brew install ${missing_build_tools[@]}"
|
||||
fi
|
||||
fi
|
||||
|
||||
# Cleanup
|
||||
set -x
|
||||
rm -rf "${BUILDDIR}"
|
||||
mkdir -p "${BUILDDIR}"
|
||||
|
||||
build_zlib
|
||||
build_libvpx
|
||||
build_ffmpeg
|
||||
|
||||
# put resulting executable where we were asked to
|
||||
mkdir -p $(dirname "${OUTPUT_PATH}")
|
||||
cp "${BUILDDIR}/ffmpeg/bin/ffmpeg" "${OUTPUT_PATH}"
|
||||
strip "${OUTPUT_PATH}"
|
|
@ -1,78 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
# Copyright (c) Microsoft Corporation.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the 'License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
set -e
|
||||
set +x
|
||||
|
||||
trap "cd $(pwd -P)" EXIT
|
||||
cd "$(dirname $0)"
|
||||
|
||||
if [[ ("$1" == "-h") || ("$1" == "--help") ]]; then
|
||||
echo "usage: $(basename $0) [--mac|--linux|--cross-compile-win64] [--full]"
|
||||
echo
|
||||
echo "Build ffmpeg for the given platform"
|
||||
echo
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if [[ -z "$1" ]]; then
|
||||
echo "ERROR: expected build target. Run with --help for more info"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
LICENSE_FILE="COPYING.LGPLv2.1"
|
||||
|
||||
rm -rf ./output
|
||||
mkdir -p output
|
||||
cp ffmpeg-license/"${LICENSE_FILE}" output
|
||||
|
||||
dockerflags="";
|
||||
# Use |-it| to run docker to support Ctrl-C if we run the script inside interactive terminal.
|
||||
# Otherwise (e.g. cronjob) - do nothing.
|
||||
if [[ -t 0 ]]; then
|
||||
dockerflags="-it"
|
||||
fi
|
||||
|
||||
function ensure_docker_or_die() {
|
||||
if ! command -v docker >/dev/null; then
|
||||
echo "ERROR: docker is required for the script"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
if [[ "$1" == "--mac" ]]; then
|
||||
bash ./build-mac.sh $2
|
||||
cd output && zip ffmpeg.zip ffmpeg-mac "${LICENSE_FILE}"
|
||||
elif [[ "$1" == "--linux" ]]; then
|
||||
ensure_docker_or_die
|
||||
|
||||
time docker run --init --rm -v"${PWD}":/host ${dockerflags} ubuntu:18.04 bash /host/build-linux.sh /host/output/ffmpeg-linux
|
||||
cd output && zip ffmpeg.zip ffmpeg-linux "${LICENSE_FILE}"
|
||||
elif [[ "$1" == --cross-compile-win64 ]]; then
|
||||
ensure_docker_or_die
|
||||
|
||||
time docker run --init --rm -v"${PWD}":/host ${dockerflags} ubuntu:18.04 bash /host/crosscompile-from-linux.sh --win64 /host/output/ffmpeg-win64.exe
|
||||
cd output && zip ffmpeg.zip ffmpeg-win64.exe "${LICENSE_FILE}"
|
||||
elif [[ "$1" == "--cross-compile-linux-arm64" ]]; then
|
||||
ensure_docker_or_die
|
||||
|
||||
time docker run --init --rm -v"${PWD}":/host ${dockerflags} ubuntu:18.04 bash /host/crosscompile-from-linux.sh --linux-arm64 /host/output/ffmpeg-linux
|
||||
cd output && zip ffmpeg.zip ffmpeg-linux "${LICENSE_FILE}"
|
||||
else
|
||||
echo "ERROR: unsupported platform - $1"
|
||||
exit 1
|
||||
fi
|
||||
|
|
@ -1,9 +0,0 @@
|
|||
#!/bin/bash
|
||||
set -e
|
||||
set +x
|
||||
|
||||
trap "cd $(pwd -P)" EXIT
|
||||
cd "$(dirname $0)"
|
||||
|
||||
rm -rf output
|
||||
|
|
@ -1,147 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
# Copyright (c) Microsoft Corporation.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the 'License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
set -ex
|
||||
|
||||
function die() { echo "$@"; exit 1; }
|
||||
|
||||
|
||||
PREFIX="${HOME}/prefix"
|
||||
TOOLCHAIN_PREFIX_64="/usr/bin/x86_64-w64-mingw32-"
|
||||
TOOLCHAIN_PREFIX_ARM64="/usr/bin/aarch64-linux-gnu-"
|
||||
|
||||
arch=""
|
||||
toolchain_prefix=""
|
||||
binary=""
|
||||
|
||||
if [[ "$(uname)" != "Linux" ]]; then
|
||||
echo "ERROR: this script is designed to be run on Linux. Can't run on $(uname)"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ "$1" == "--win64" ]]; then
|
||||
arch="win64";
|
||||
toolchain_prefix="${TOOLCHAIN_PREFIX_64}"
|
||||
binary="ffmpeg.exe"
|
||||
elif [[ "$1" == "--linux-arm64" ]]; then
|
||||
arch="linux-arm64";
|
||||
toolchain_prefix="${TOOLCHAIN_PREFIX_ARM64}"
|
||||
binary="ffmpeg"
|
||||
elif [[ -z "$1" ]]; then
|
||||
die "ERROR: expect --win64 or --linux-arm64 as the first argument"
|
||||
else
|
||||
die "ERROR: unknown arch '$1' - expected --win64 or --linux-arm64"
|
||||
fi
|
||||
|
||||
output_path="$2"
|
||||
if [[ -z "${output_path}" ]]; then
|
||||
die "ERROR: output path is not specified"
|
||||
elif [[ "${output_path}" != /* ]]; then
|
||||
die "ERROR: output path ${output_path} is not absolute"
|
||||
elif ! [[ -d $(dirname "${output_path}") ]]; then
|
||||
die "ERROR: folder for output path ${output_path} does not exist."
|
||||
fi
|
||||
|
||||
function build_zlib {
|
||||
cd "${HOME}"
|
||||
git clone https://github.com/madler/zlib
|
||||
cd zlib
|
||||
git checkout "${ZLIB_VERSION}"
|
||||
./configure --prefix="${PREFIX}" ${ZLIB_CONFIG}
|
||||
make \
|
||||
CC="${toolchain_prefix}gcc" \
|
||||
CXX="${toolchain_prefix}g++" \
|
||||
AR="${toolchain_prefix}ar" \
|
||||
PREFIX="$PREFIX" \
|
||||
RANLIB="${toolchain_prefix}ranlib" \
|
||||
LD="${toolchain_prefix}ld" \
|
||||
STRIP="${toolchain_prefix}strip"
|
||||
make install
|
||||
}
|
||||
|
||||
function build_libvpx {
|
||||
cd "${HOME}"
|
||||
git clone https://chromium.googlesource.com/webm/libvpx
|
||||
cd libvpx
|
||||
git checkout "${LIBVPX_VERSION}"
|
||||
# Cross-compiling libvpx according to the docs:
|
||||
# - https://chromium.googlesource.com/webm/libvpx/+/main/README
|
||||
local target=""
|
||||
if [[ $arch == "win64" ]]; then
|
||||
target="x86_64-win64-gcc";
|
||||
elif [[ $arch == "linux-arm64" ]]; then
|
||||
target="arm64-linux-gcc";
|
||||
else
|
||||
die "ERROR: unsupported arch to compile libvpx - $arch"
|
||||
fi
|
||||
CROSS="${toolchain_prefix}" ./configure --prefix="${PREFIX}" --target="${target}" ${LIBVPX_CONFIG}
|
||||
CROSS="${toolchain_prefix}" make && make install
|
||||
}
|
||||
|
||||
function build_ffmpeg {
|
||||
cd "${HOME}"
|
||||
git clone git://source.ffmpeg.org/ffmpeg.git
|
||||
cd ffmpeg
|
||||
git checkout "${FFMPEG_VERSION}"
|
||||
export PKG_CONFIG_PATH="${PREFIX}/lib/pkgconfig"
|
||||
# Prohibit pkg-config from using linux system installed libs.
|
||||
export PKG_CONFIG_LIBDIR=
|
||||
|
||||
local ffmpeg_arch=""
|
||||
local ffmpeg_target_os=""
|
||||
if [[ $arch == "win64" ]]; then
|
||||
ffmpeg_arch="x86_64";
|
||||
ffmpeg_target_os="mingw32"
|
||||
elif [[ $arch == "linux-arm64" ]]; then
|
||||
ffmpeg_arch="arm64";
|
||||
ffmpeg_target_os="linux"
|
||||
else
|
||||
die "ERROR: unsupported arch to compile ffmpeg - $arch"
|
||||
fi
|
||||
./configure --arch="${ffmpeg_arch}" \
|
||||
--target-os="${ffmpeg_target_os}" \
|
||||
--cross-prefix="${toolchain_prefix}" \
|
||||
--disable-doc \
|
||||
--pkg-config=pkg-config \
|
||||
--pkg-config-flags="--static" \
|
||||
--extra-cflags="-I/${PREFIX}/include" \
|
||||
--extra-ldflags="-L/${PREFIX}/lib -static" \
|
||||
--prefix="${PREFIX}" \
|
||||
--bindir="${PWD}/bin" \
|
||||
${FFMPEG_CONFIG}
|
||||
make && make install
|
||||
}
|
||||
|
||||
trap "cd $(pwd -P)" EXIT
|
||||
cd "$(dirname $0)"
|
||||
|
||||
source ./CONFIG.sh
|
||||
|
||||
apt-get update
|
||||
apt-get install -y git make yasm pkg-config
|
||||
if [[ "${arch}" == "linux-arm64" ]]; then
|
||||
apt-get install -y gcc-aarch64-linux-gnu g++-aarch64-linux-gnu
|
||||
else
|
||||
apt-get install -y mingw-w64
|
||||
fi
|
||||
|
||||
build_zlib
|
||||
build_libvpx
|
||||
build_ffmpeg
|
||||
|
||||
# put resulting executable where we were asked to
|
||||
cp "${HOME}/ffmpeg/bin/${binary}" "${output_path}"
|
||||
${toolchain_prefix}strip "${output_path}"
|
|
@ -1,502 +0,0 @@
|
|||
GNU LESSER GENERAL PUBLIC LICENSE
|
||||
Version 2.1, February 1999
|
||||
|
||||
Copyright (C) 1991, 1999 Free Software Foundation, Inc.
|
||||
51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
|
||||
Everyone is permitted to copy and distribute verbatim copies
|
||||
of this license document, but changing it is not allowed.
|
||||
|
||||
[This is the first released version of the Lesser GPL. It also counts
|
||||
as the successor of the GNU Library Public License, version 2, hence
|
||||
the version number 2.1.]
|
||||
|
||||
Preamble
|
||||
|
||||
The licenses for most software are designed to take away your
|
||||
freedom to share and change it. By contrast, the GNU General Public
|
||||
Licenses are intended to guarantee your freedom to share and change
|
||||
free software--to make sure the software is free for all its users.
|
||||
|
||||
This license, the Lesser General Public License, applies to some
|
||||
specially designated software packages--typically libraries--of the
|
||||
Free Software Foundation and other authors who decide to use it. You
|
||||
can use it too, but we suggest you first think carefully about whether
|
||||
this license or the ordinary General Public License is the better
|
||||
strategy to use in any particular case, based on the explanations below.
|
||||
|
||||
When we speak of free software, we are referring to freedom of use,
|
||||
not price. Our General Public Licenses are designed to make sure that
|
||||
you have the freedom to distribute copies of free software (and charge
|
||||
for this service if you wish); that you receive source code or can get
|
||||
it if you want it; that you can change the software and use pieces of
|
||||
it in new free programs; and that you are informed that you can do
|
||||
these things.
|
||||
|
||||
To protect your rights, we need to make restrictions that forbid
|
||||
distributors to deny you these rights or to ask you to surrender these
|
||||
rights. These restrictions translate to certain responsibilities for
|
||||
you if you distribute copies of the library or if you modify it.
|
||||
|
||||
For example, if you distribute copies of the library, whether gratis
|
||||
or for a fee, you must give the recipients all the rights that we gave
|
||||
you. You must make sure that they, too, receive or can get the source
|
||||
code. If you link other code with the library, you must provide
|
||||
complete object files to the recipients, so that they can relink them
|
||||
with the library after making changes to the library and recompiling
|
||||
it. And you must show them these terms so they know their rights.
|
||||
|
||||
We protect your rights with a two-step method: (1) we copyright the
|
||||
library, and (2) we offer you this license, which gives you legal
|
||||
permission to copy, distribute and/or modify the library.
|
||||
|
||||
To protect each distributor, we want to make it very clear that
|
||||
there is no warranty for the free library. Also, if the library is
|
||||
modified by someone else and passed on, the recipients should know
|
||||
that what they have is not the original version, so that the original
|
||||
author's reputation will not be affected by problems that might be
|
||||
introduced by others.
|
||||
|
||||
Finally, software patents pose a constant threat to the existence of
|
||||
any free program. We wish to make sure that a company cannot
|
||||
effectively restrict the users of a free program by obtaining a
|
||||
restrictive license from a patent holder. Therefore, we insist that
|
||||
any patent license obtained for a version of the library must be
|
||||
consistent with the full freedom of use specified in this license.
|
||||
|
||||
Most GNU software, including some libraries, is covered by the
|
||||
ordinary GNU General Public License. This license, the GNU Lesser
|
||||
General Public License, applies to certain designated libraries, and
|
||||
is quite different from the ordinary General Public License. We use
|
||||
this license for certain libraries in order to permit linking those
|
||||
libraries into non-free programs.
|
||||
|
||||
When a program is linked with a library, whether statically or using
|
||||
a shared library, the combination of the two is legally speaking a
|
||||
combined work, a derivative of the original library. The ordinary
|
||||
General Public License therefore permits such linking only if the
|
||||
entire combination fits its criteria of freedom. The Lesser General
|
||||
Public License permits more lax criteria for linking other code with
|
||||
the library.
|
||||
|
||||
We call this license the "Lesser" General Public License because it
|
||||
does Less to protect the user's freedom than the ordinary General
|
||||
Public License. It also provides other free software developers Less
|
||||
of an advantage over competing non-free programs. These disadvantages
|
||||
are the reason we use the ordinary General Public License for many
|
||||
libraries. However, the Lesser license provides advantages in certain
|
||||
special circumstances.
|
||||
|
||||
For example, on rare occasions, there may be a special need to
|
||||
encourage the widest possible use of a certain library, so that it becomes
|
||||
a de-facto standard. To achieve this, non-free programs must be
|
||||
allowed to use the library. A more frequent case is that a free
|
||||
library does the same job as widely used non-free libraries. In this
|
||||
case, there is little to gain by limiting the free library to free
|
||||
software only, so we use the Lesser General Public License.
|
||||
|
||||
In other cases, permission to use a particular library in non-free
|
||||
programs enables a greater number of people to use a large body of
|
||||
free software. For example, permission to use the GNU C Library in
|
||||
non-free programs enables many more people to use the whole GNU
|
||||
operating system, as well as its variant, the GNU/Linux operating
|
||||
system.
|
||||
|
||||
Although the Lesser General Public License is Less protective of the
|
||||
users' freedom, it does ensure that the user of a program that is
|
||||
linked with the Library has the freedom and the wherewithal to run
|
||||
that program using a modified version of the Library.
|
||||
|
||||
The precise terms and conditions for copying, distribution and
|
||||
modification follow. Pay close attention to the difference between a
|
||||
"work based on the library" and a "work that uses the library". The
|
||||
former contains code derived from the library, whereas the latter must
|
||||
be combined with the library in order to run.
|
||||
|
||||
GNU LESSER GENERAL PUBLIC LICENSE
|
||||
TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
|
||||
|
||||
0. This License Agreement applies to any software library or other
|
||||
program which contains a notice placed by the copyright holder or
|
||||
other authorized party saying it may be distributed under the terms of
|
||||
this Lesser General Public License (also called "this License").
|
||||
Each licensee is addressed as "you".
|
||||
|
||||
A "library" means a collection of software functions and/or data
|
||||
prepared so as to be conveniently linked with application programs
|
||||
(which use some of those functions and data) to form executables.
|
||||
|
||||
The "Library", below, refers to any such software library or work
|
||||
which has been distributed under these terms. A "work based on the
|
||||
Library" means either the Library or any derivative work under
|
||||
copyright law: that is to say, a work containing the Library or a
|
||||
portion of it, either verbatim or with modifications and/or translated
|
||||
straightforwardly into another language. (Hereinafter, translation is
|
||||
included without limitation in the term "modification".)
|
||||
|
||||
"Source code" for a work means the preferred form of the work for
|
||||
making modifications to it. For a library, complete source code means
|
||||
all the source code for all modules it contains, plus any associated
|
||||
interface definition files, plus the scripts used to control compilation
|
||||
and installation of the library.
|
||||
|
||||
Activities other than copying, distribution and modification are not
|
||||
covered by this License; they are outside its scope. The act of
|
||||
running a program using the Library is not restricted, and output from
|
||||
such a program is covered only if its contents constitute a work based
|
||||
on the Library (independent of the use of the Library in a tool for
|
||||
writing it). Whether that is true depends on what the Library does
|
||||
and what the program that uses the Library does.
|
||||
|
||||
1. You may copy and distribute verbatim copies of the Library's
|
||||
complete source code as you receive it, in any medium, provided that
|
||||
you conspicuously and appropriately publish on each copy an
|
||||
appropriate copyright notice and disclaimer of warranty; keep intact
|
||||
all the notices that refer to this License and to the absence of any
|
||||
warranty; and distribute a copy of this License along with the
|
||||
Library.
|
||||
|
||||
You may charge a fee for the physical act of transferring a copy,
|
||||
and you may at your option offer warranty protection in exchange for a
|
||||
fee.
|
||||
|
||||
2. You may modify your copy or copies of the Library or any portion
|
||||
of it, thus forming a work based on the Library, and copy and
|
||||
distribute such modifications or work under the terms of Section 1
|
||||
above, provided that you also meet all of these conditions:
|
||||
|
||||
a) The modified work must itself be a software library.
|
||||
|
||||
b) You must cause the files modified to carry prominent notices
|
||||
stating that you changed the files and the date of any change.
|
||||
|
||||
c) You must cause the whole of the work to be licensed at no
|
||||
charge to all third parties under the terms of this License.
|
||||
|
||||
d) If a facility in the modified Library refers to a function or a
|
||||
table of data to be supplied by an application program that uses
|
||||
the facility, other than as an argument passed when the facility
|
||||
is invoked, then you must make a good faith effort to ensure that,
|
||||
in the event an application does not supply such function or
|
||||
table, the facility still operates, and performs whatever part of
|
||||
its purpose remains meaningful.
|
||||
|
||||
(For example, a function in a library to compute square roots has
|
||||
a purpose that is entirely well-defined independent of the
|
||||
application. Therefore, Subsection 2d requires that any
|
||||
application-supplied function or table used by this function must
|
||||
be optional: if the application does not supply it, the square
|
||||
root function must still compute square roots.)
|
||||
|
||||
These requirements apply to the modified work as a whole. If
|
||||
identifiable sections of that work are not derived from the Library,
|
||||
and can be reasonably considered independent and separate works in
|
||||
themselves, then this License, and its terms, do not apply to those
|
||||
sections when you distribute them as separate works. But when you
|
||||
distribute the same sections as part of a whole which is a work based
|
||||
on the Library, the distribution of the whole must be on the terms of
|
||||
this License, whose permissions for other licensees extend to the
|
||||
entire whole, and thus to each and every part regardless of who wrote
|
||||
it.
|
||||
|
||||
Thus, it is not the intent of this section to claim rights or contest
|
||||
your rights to work written entirely by you; rather, the intent is to
|
||||
exercise the right to control the distribution of derivative or
|
||||
collective works based on the Library.
|
||||
|
||||
In addition, mere aggregation of another work not based on the Library
|
||||
with the Library (or with a work based on the Library) on a volume of
|
||||
a storage or distribution medium does not bring the other work under
|
||||
the scope of this License.
|
||||
|
||||
3. You may opt to apply the terms of the ordinary GNU General Public
|
||||
License instead of this License to a given copy of the Library. To do
|
||||
this, you must alter all the notices that refer to this License, so
|
||||
that they refer to the ordinary GNU General Public License, version 2,
|
||||
instead of to this License. (If a newer version than version 2 of the
|
||||
ordinary GNU General Public License has appeared, then you can specify
|
||||
that version instead if you wish.) Do not make any other change in
|
||||
these notices.
|
||||
|
||||
Once this change is made in a given copy, it is irreversible for
|
||||
that copy, so the ordinary GNU General Public License applies to all
|
||||
subsequent copies and derivative works made from that copy.
|
||||
|
||||
This option is useful when you wish to copy part of the code of
|
||||
the Library into a program that is not a library.
|
||||
|
||||
4. You may copy and distribute the Library (or a portion or
|
||||
derivative of it, under Section 2) in object code or executable form
|
||||
under the terms of Sections 1 and 2 above provided that you accompany
|
||||
it with the complete corresponding machine-readable source code, which
|
||||
must be distributed under the terms of Sections 1 and 2 above on a
|
||||
medium customarily used for software interchange.
|
||||
|
||||
If distribution of object code is made by offering access to copy
|
||||
from a designated place, then offering equivalent access to copy the
|
||||
source code from the same place satisfies the requirement to
|
||||
distribute the source code, even though third parties are not
|
||||
compelled to copy the source along with the object code.
|
||||
|
||||
5. A program that contains no derivative of any portion of the
|
||||
Library, but is designed to work with the Library by being compiled or
|
||||
linked with it, is called a "work that uses the Library". Such a
|
||||
work, in isolation, is not a derivative work of the Library, and
|
||||
therefore falls outside the scope of this License.
|
||||
|
||||
However, linking a "work that uses the Library" with the Library
|
||||
creates an executable that is a derivative of the Library (because it
|
||||
contains portions of the Library), rather than a "work that uses the
|
||||
library". The executable is therefore covered by this License.
|
||||
Section 6 states terms for distribution of such executables.
|
||||
|
||||
When a "work that uses the Library" uses material from a header file
|
||||
that is part of the Library, the object code for the work may be a
|
||||
derivative work of the Library even though the source code is not.
|
||||
Whether this is true is especially significant if the work can be
|
||||
linked without the Library, or if the work is itself a library. The
|
||||
threshold for this to be true is not precisely defined by law.
|
||||
|
||||
If such an object file uses only numerical parameters, data
|
||||
structure layouts and accessors, and small macros and small inline
|
||||
functions (ten lines or less in length), then the use of the object
|
||||
file is unrestricted, regardless of whether it is legally a derivative
|
||||
work. (Executables containing this object code plus portions of the
|
||||
Library will still fall under Section 6.)
|
||||
|
||||
Otherwise, if the work is a derivative of the Library, you may
|
||||
distribute the object code for the work under the terms of Section 6.
|
||||
Any executables containing that work also fall under Section 6,
|
||||
whether or not they are linked directly with the Library itself.
|
||||
|
||||
6. As an exception to the Sections above, you may also combine or
|
||||
link a "work that uses the Library" with the Library to produce a
|
||||
work containing portions of the Library, and distribute that work
|
||||
under terms of your choice, provided that the terms permit
|
||||
modification of the work for the customer's own use and reverse
|
||||
engineering for debugging such modifications.
|
||||
|
||||
You must give prominent notice with each copy of the work that the
|
||||
Library is used in it and that the Library and its use are covered by
|
||||
this License. You must supply a copy of this License. If the work
|
||||
during execution displays copyright notices, you must include the
|
||||
copyright notice for the Library among them, as well as a reference
|
||||
directing the user to the copy of this License. Also, you must do one
|
||||
of these things:
|
||||
|
||||
a) Accompany the work with the complete corresponding
|
||||
machine-readable source code for the Library including whatever
|
||||
changes were used in the work (which must be distributed under
|
||||
Sections 1 and 2 above); and, if the work is an executable linked
|
||||
with the Library, with the complete machine-readable "work that
|
||||
uses the Library", as object code and/or source code, so that the
|
||||
user can modify the Library and then relink to produce a modified
|
||||
executable containing the modified Library. (It is understood
|
||||
that the user who changes the contents of definitions files in the
|
||||
Library will not necessarily be able to recompile the application
|
||||
to use the modified definitions.)
|
||||
|
||||
b) Use a suitable shared library mechanism for linking with the
|
||||
Library. A suitable mechanism is one that (1) uses at run time a
|
||||
copy of the library already present on the user's computer system,
|
||||
rather than copying library functions into the executable, and (2)
|
||||
will operate properly with a modified version of the library, if
|
||||
the user installs one, as long as the modified version is
|
||||
interface-compatible with the version that the work was made with.
|
||||
|
||||
c) Accompany the work with a written offer, valid for at
|
||||
least three years, to give the same user the materials
|
||||
specified in Subsection 6a, above, for a charge no more
|
||||
than the cost of performing this distribution.
|
||||
|
||||
d) If distribution of the work is made by offering access to copy
|
||||
from a designated place, offer equivalent access to copy the above
|
||||
specified materials from the same place.
|
||||
|
||||
e) Verify that the user has already received a copy of these
|
||||
materials or that you have already sent this user a copy.
|
||||
|
||||
For an executable, the required form of the "work that uses the
|
||||
Library" must include any data and utility programs needed for
|
||||
reproducing the executable from it. However, as a special exception,
|
||||
the materials to be distributed need not include anything that is
|
||||
normally distributed (in either source or binary form) with the major
|
||||
components (compiler, kernel, and so on) of the operating system on
|
||||
which the executable runs, unless that component itself accompanies
|
||||
the executable.
|
||||
|
||||
It may happen that this requirement contradicts the license
|
||||
restrictions of other proprietary libraries that do not normally
|
||||
accompany the operating system. Such a contradiction means you cannot
|
||||
use both them and the Library together in an executable that you
|
||||
distribute.
|
||||
|
||||
7. You may place library facilities that are a work based on the
|
||||
Library side-by-side in a single library together with other library
|
||||
facilities not covered by this License, and distribute such a combined
|
||||
library, provided that the separate distribution of the work based on
|
||||
the Library and of the other library facilities is otherwise
|
||||
permitted, and provided that you do these two things:
|
||||
|
||||
a) Accompany the combined library with a copy of the same work
|
||||
based on the Library, uncombined with any other library
|
||||
facilities. This must be distributed under the terms of the
|
||||
Sections above.
|
||||
|
||||
b) Give prominent notice with the combined library of the fact
|
||||
that part of it is a work based on the Library, and explaining
|
||||
where to find the accompanying uncombined form of the same work.
|
||||
|
||||
8. You may not copy, modify, sublicense, link with, or distribute
|
||||
the Library except as expressly provided under this License. Any
|
||||
attempt otherwise to copy, modify, sublicense, link with, or
|
||||
distribute the Library is void, and will automatically terminate your
|
||||
rights under this License. However, parties who have received copies,
|
||||
or rights, from you under this License will not have their licenses
|
||||
terminated so long as such parties remain in full compliance.
|
||||
|
||||
9. You are not required to accept this License, since you have not
|
||||
signed it. However, nothing else grants you permission to modify or
|
||||
distribute the Library or its derivative works. These actions are
|
||||
prohibited by law if you do not accept this License. Therefore, by
|
||||
modifying or distributing the Library (or any work based on the
|
||||
Library), you indicate your acceptance of this License to do so, and
|
||||
all its terms and conditions for copying, distributing or modifying
|
||||
the Library or works based on it.
|
||||
|
||||
10. Each time you redistribute the Library (or any work based on the
|
||||
Library), the recipient automatically receives a license from the
|
||||
original licensor to copy, distribute, link with or modify the Library
|
||||
subject to these terms and conditions. You may not impose any further
|
||||
restrictions on the recipients' exercise of the rights granted herein.
|
||||
You are not responsible for enforcing compliance by third parties with
|
||||
this License.
|
||||
|
||||
11. If, as a consequence of a court judgment or allegation of patent
|
||||
infringement or for any other reason (not limited to patent issues),
|
||||
conditions are imposed on you (whether by court order, agreement or
|
||||
otherwise) that contradict the conditions of this License, they do not
|
||||
excuse you from the conditions of this License. If you cannot
|
||||
distribute so as to satisfy simultaneously your obligations under this
|
||||
License and any other pertinent obligations, then as a consequence you
|
||||
may not distribute the Library at all. For example, if a patent
|
||||
license would not permit royalty-free redistribution of the Library by
|
||||
all those who receive copies directly or indirectly through you, then
|
||||
the only way you could satisfy both it and this License would be to
|
||||
refrain entirely from distribution of the Library.
|
||||
|
||||
If any portion of this section is held invalid or unenforceable under any
|
||||
particular circumstance, the balance of the section is intended to apply,
|
||||
and the section as a whole is intended to apply in other circumstances.
|
||||
|
||||
It is not the purpose of this section to induce you to infringe any
|
||||
patents or other property right claims or to contest validity of any
|
||||
such claims; this section has the sole purpose of protecting the
|
||||
integrity of the free software distribution system which is
|
||||
implemented by public license practices. Many people have made
|
||||
generous contributions to the wide range of software distributed
|
||||
through that system in reliance on consistent application of that
|
||||
system; it is up to the author/donor to decide if he or she is willing
|
||||
to distribute software through any other system and a licensee cannot
|
||||
impose that choice.
|
||||
|
||||
This section is intended to make thoroughly clear what is believed to
|
||||
be a consequence of the rest of this License.
|
||||
|
||||
12. If the distribution and/or use of the Library is restricted in
|
||||
certain countries either by patents or by copyrighted interfaces, the
|
||||
original copyright holder who places the Library under this License may add
|
||||
an explicit geographical distribution limitation excluding those countries,
|
||||
so that distribution is permitted only in or among countries not thus
|
||||
excluded. In such case, this License incorporates the limitation as if
|
||||
written in the body of this License.
|
||||
|
||||
13. The Free Software Foundation may publish revised and/or new
|
||||
versions of the Lesser General Public License from time to time.
|
||||
Such new versions will be similar in spirit to the present version,
|
||||
but may differ in detail to address new problems or concerns.
|
||||
|
||||
Each version is given a distinguishing version number. If the Library
|
||||
specifies a version number of this License which applies to it and
|
||||
"any later version", you have the option of following the terms and
|
||||
conditions either of that version or of any later version published by
|
||||
the Free Software Foundation. If the Library does not specify a
|
||||
license version number, you may choose any version ever published by
|
||||
the Free Software Foundation.
|
||||
|
||||
14. If you wish to incorporate parts of the Library into other free
|
||||
programs whose distribution conditions are incompatible with these,
|
||||
write to the author to ask for permission. For software which is
|
||||
copyrighted by the Free Software Foundation, write to the Free
|
||||
Software Foundation; we sometimes make exceptions for this. Our
|
||||
decision will be guided by the two goals of preserving the free status
|
||||
of all derivatives of our free software and of promoting the sharing
|
||||
and reuse of software generally.
|
||||
|
||||
NO WARRANTY
|
||||
|
||||
15. BECAUSE THE LIBRARY IS LICENSED FREE OF CHARGE, THERE IS NO
|
||||
WARRANTY FOR THE LIBRARY, TO THE EXTENT PERMITTED BY APPLICABLE LAW.
|
||||
EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR
|
||||
OTHER PARTIES PROVIDE THE LIBRARY "AS IS" WITHOUT WARRANTY OF ANY
|
||||
KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
||||
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE
|
||||
LIBRARY IS WITH YOU. SHOULD THE LIBRARY PROVE DEFECTIVE, YOU ASSUME
|
||||
THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
|
||||
|
||||
16. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN
|
||||
WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY
|
||||
AND/OR REDISTRIBUTE THE LIBRARY AS PERMITTED ABOVE, BE LIABLE TO YOU
|
||||
FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR
|
||||
CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE
|
||||
LIBRARY (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING
|
||||
RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A
|
||||
FAILURE OF THE LIBRARY TO OPERATE WITH ANY OTHER SOFTWARE), EVEN IF
|
||||
SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
|
||||
DAMAGES.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
How to Apply These Terms to Your New Libraries
|
||||
|
||||
If you develop a new library, and you want it to be of the greatest
|
||||
possible use to the public, we recommend making it free software that
|
||||
everyone can redistribute and change. You can do so by permitting
|
||||
redistribution under these terms (or, alternatively, under the terms of the
|
||||
ordinary General Public License).
|
||||
|
||||
To apply these terms, attach the following notices to the library. It is
|
||||
safest to attach them to the start of each source file to most effectively
|
||||
convey the exclusion of warranty; and each file should have at least the
|
||||
"copyright" line and a pointer to where the full notice is found.
|
||||
|
||||
<one line to give the library's name and a brief idea of what it does.>
|
||||
Copyright (C) <year> <name of author>
|
||||
|
||||
This library is free software; you can redistribute it and/or
|
||||
modify it under the terms of the GNU Lesser General Public
|
||||
License as published by the Free Software Foundation; either
|
||||
version 2.1 of the License, or (at your option) any later version.
|
||||
|
||||
This library is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
Lesser General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU Lesser General Public
|
||||
License along with this library; if not, write to the Free Software
|
||||
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
|
||||
|
||||
Also add information on how to contact you by electronic and paper mail.
|
||||
|
||||
You should also get your employer (if you work as a programmer) or your
|
||||
school, if any, to sign a "copyright disclaimer" for the library, if
|
||||
necessary. Here is a sample; alter the names:
|
||||
|
||||
Yoyodyne, Inc., hereby disclaims all copyright interest in the
|
||||
library `Frob' (a library for tweaking knobs) written by James Random Hacker.
|
||||
|
||||
<signature of Ty Coon>, 1 April 1990
|
||||
Ty Coon, President of Vice
|
||||
|
||||
That's all there is to it!
|
|
@ -1 +0,0 @@
|
|||
/checkout
|
|
@ -1,2 +0,0 @@
|
|||
1349
|
||||
Changed: lushnikov@chromium.org Thu 25 Aug 2022 08:30:01 AM PDT
|
|
@ -1,9 +0,0 @@
|
|||
firefox-beta-mac-11.zip
|
||||
firefox-beta-mac-11-arm64.zip
|
||||
firefox-beta-ubuntu-18.04.zip
|
||||
firefox-beta-ubuntu-20.04.zip
|
||||
firefox-beta-ubuntu-20.04-arm64.zip
|
||||
firefox-beta-ubuntu-22.04.zip
|
||||
firefox-beta-ubuntu-22.04-arm64.zip
|
||||
firefox-beta-debian-11.zip
|
||||
firefox-beta-win64.zip
|
|
@ -1,3 +0,0 @@
|
|||
REMOTE_URL="https://github.com/mozilla/gecko-dev"
|
||||
BASE_BRANCH="beta"
|
||||
BASE_REVISION="ef2a450dd015d4e6e80469b21cab1dd1a52cfbac"
|
|
@ -1,61 +0,0 @@
|
|||
#!/bin/bash
|
||||
set -e
|
||||
set +x
|
||||
|
||||
if [[ ("$1" == "-h") || ("$1" == "--help") ]]; then
|
||||
echo "usage: $(basename "$0") [output-absolute-path]"
|
||||
echo
|
||||
echo "Generate distributable .zip archive from Firefox checkout folder that was previously built."
|
||||
echo
|
||||
exit 0
|
||||
fi
|
||||
|
||||
ZIP_PATH=$1
|
||||
if [[ $ZIP_PATH != /* ]]; then
|
||||
echo "ERROR: path $ZIP_PATH is not absolute"
|
||||
exit 1
|
||||
fi
|
||||
if [[ $ZIP_PATH != *.zip ]]; then
|
||||
echo "ERROR: path $ZIP_PATH must have .zip extension"
|
||||
exit 1
|
||||
fi
|
||||
if [[ -f $ZIP_PATH ]]; then
|
||||
echo "ERROR: path $ZIP_PATH exists; can't do anything."
|
||||
exit 1
|
||||
fi
|
||||
if ! [[ -d $(dirname "$ZIP_PATH") ]]; then
|
||||
echo "ERROR: folder for path $($ZIP_PATH) does not exist."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
trap "cd $(pwd -P)" EXIT
|
||||
cd "$(dirname "$0")"
|
||||
SCRIPT_FOLDER="$(pwd -P)"
|
||||
source "${SCRIPT_FOLDER}/../utils.sh"
|
||||
|
||||
if [[ -z "${FF_CHECKOUT_PATH}" ]]; then
|
||||
FF_CHECKOUT_PATH="$HOME/firefox"
|
||||
fi
|
||||
OBJ_FOLDER="${FF_CHECKOUT_PATH}/obj-build-playwright"
|
||||
|
||||
cd "${FF_CHECKOUT_PATH}"
|
||||
|
||||
export MH_BRANCH=mozilla-beta
|
||||
export MOZ_BUILD_DATE=$(date +%Y%m%d%H%M%S)
|
||||
./mach package
|
||||
node "${SCRIPT_FOLDER}/install-preferences.js" "${OBJ_FOLDER}/dist/firefox"
|
||||
|
||||
if ! [[ -d "$OBJ_FOLDER/dist/firefox" ]]; then
|
||||
echo "ERROR: cannot find $OBJ_FOLDER/dist/firefox folder in the firefox checkout. Did you build?"
|
||||
exit 1;
|
||||
fi
|
||||
|
||||
if is_win; then
|
||||
# Bundle vcruntime14_1.dll - see https://github.com/microsoft/playwright/issues/9974
|
||||
cd "$(printMSVCRedistDir)"
|
||||
cp -t "${OBJ_FOLDER}/dist/firefox" vcruntime140_1.dll
|
||||
fi
|
||||
|
||||
# tar resulting directory and cleanup TMP.
|
||||
cd "${OBJ_FOLDER}/dist"
|
||||
zip -r "$ZIP_PATH" firefox
|
|
@ -1,153 +0,0 @@
|
|||
#!/bin/bash
|
||||
set -e
|
||||
set +x
|
||||
|
||||
RUST_VERSION="1.61.0"
|
||||
CBINDGEN_VERSION="0.24.3"
|
||||
|
||||
trap "cd $(pwd -P)" EXIT
|
||||
|
||||
cd "$(dirname "$0")"
|
||||
SCRIPT_FOLDER="$(pwd -P)"
|
||||
source "${SCRIPT_FOLDER}/../utils.sh"
|
||||
|
||||
if [[ ! -z "${FF_CHECKOUT_PATH}" ]]; then
|
||||
cd "${FF_CHECKOUT_PATH}"
|
||||
echo "WARNING: checkout path from FF_CHECKOUT_PATH env: ${FF_CHECKOUT_PATH}"
|
||||
else
|
||||
cd "$HOME/firefox"
|
||||
fi
|
||||
|
||||
args=("$@")
|
||||
IS_FULL=""
|
||||
IS_JUGGLER=""
|
||||
IS_DEBUG=""
|
||||
for ((i="${#args[@]}"-1; i >= 0; --i)); do
|
||||
case ${args[i]} in
|
||||
--full) IS_FULL="1"; unset args[i]; ;;
|
||||
--juggler) IS_JUGGLER="1"; unset args[i]; ;;
|
||||
--debug) IS_DEBUG="1"; unset args[i]; ;;
|
||||
esac
|
||||
done
|
||||
|
||||
if [[ -n "${IS_JUGGLER}" && -n "${IS_FULL}" ]]; then
|
||||
echo "ERROR: either --full or --juggler is allowed"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "== BUILD CONFIGURATION =="
|
||||
if [[ -n "${IS_FULL}" ]]; then
|
||||
echo "- build type: FULL"
|
||||
elif [[ -n "${IS_JUGGLER}" ]]; then
|
||||
echo "- build type: JUGGLER"
|
||||
else
|
||||
echo "- build type: INCREMENTAL"
|
||||
fi
|
||||
|
||||
if [[ -n "${IS_DEBUG}" ]]; then
|
||||
echo "- debug: YES"
|
||||
else
|
||||
echo "- debug: NO"
|
||||
fi
|
||||
|
||||
echo "========================="
|
||||
|
||||
rm -rf .mozconfig
|
||||
|
||||
if is_mac; then
|
||||
selectXcodeVersionOrDie $(node "${SCRIPT_FOLDER}/../get_xcode_version.js" firefox)
|
||||
echo "-- building on Mac"
|
||||
elif is_linux; then
|
||||
echo "-- building on Linux"
|
||||
elif is_win; then
|
||||
echo "ac_add_options --disable-update-agent" >> .mozconfig
|
||||
echo "ac_add_options --disable-default-browser-agent" >> .mozconfig
|
||||
echo "ac_add_options --disable-maintenance-service" >> .mozconfig
|
||||
|
||||
echo "-- building win64 build on MINGW"
|
||||
echo "ac_add_options --target=x86_64-pc-mingw32" >> .mozconfig
|
||||
echo "ac_add_options --host=x86_64-pc-mingw32" >> .mozconfig
|
||||
DLL_FILE=$("C:\Program Files (x86)\Microsoft Visual Studio\Installer\vswhere.exe" -latest -find '**\Redist\MSVC\*\x64\**\vcruntime140.dll')
|
||||
WIN32_REDIST_DIR=$(dirname "$DLL_FILE" | tail -n 1)
|
||||
if ! [[ -d $WIN32_REDIST_DIR ]]; then
|
||||
echo "ERROR: cannot find MS VS C++ redistributable $WIN32_REDIST_DIR"
|
||||
exit 1;
|
||||
fi
|
||||
else
|
||||
echo "ERROR: cannot upload on this platform!" 1>&2
|
||||
exit 1;
|
||||
fi
|
||||
|
||||
# There's no pre-built wasi sysroot on certain platforms.
|
||||
echo "ac_add_options --without-wasm-sandboxed-libraries" >> .mozconfig
|
||||
|
||||
OBJ_FOLDER="obj-build-playwright"
|
||||
echo "mk_add_options MOZ_OBJDIR=@TOPSRCDIR@/${OBJ_FOLDER}" >> .mozconfig
|
||||
echo "ac_add_options --disable-crashreporter" >> .mozconfig
|
||||
echo "ac_add_options --disable-backgroundtasks" >> .mozconfig
|
||||
|
||||
if [[ -n "${IS_DEBUG}" ]]; then
|
||||
echo "ac_add_options --enable-debug" >> .mozconfig
|
||||
echo "ac_add_options --enable-debug-symbols" >> .mozconfig
|
||||
else
|
||||
echo "ac_add_options --enable-release" >> .mozconfig
|
||||
fi
|
||||
|
||||
if is_mac || is_win; then
|
||||
# This options is only available on win and mac.
|
||||
echo "ac_add_options --disable-update-agent" >> .mozconfig
|
||||
fi
|
||||
|
||||
if [[ -z "${IS_JUGGLER}" ]]; then
|
||||
# TODO: rustup is not in the PATH on Windows
|
||||
if command -v rustup >/dev/null; then
|
||||
# We manage Rust version ourselves.
|
||||
echo "-- Using rust v${RUST_VERSION}"
|
||||
rustup install "${RUST_VERSION}"
|
||||
rustup default "${RUST_VERSION}"
|
||||
fi
|
||||
# Firefox on Linux arm64 host does not ship
|
||||
# cbindgen in their default toolchains - install manually.
|
||||
if command -v cargo >/dev/null; then
|
||||
echo "-- Using cbindgen v${CBINDGEN_VERSION}"
|
||||
cargo install cbindgen --version "${CBINDGEN_VERSION}"
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ -n "${IS_FULL}" ]]; then
|
||||
# This is a slow but sure way to get all the necessary toolchains.
|
||||
# However, it will not work if tree is dirty.
|
||||
# Bail out if git repo is dirty.
|
||||
if [[ -n $(git status -s --untracked-files=no) ]]; then
|
||||
echo "ERROR: dirty GIT state - commit everything and re-run the script."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# 1. We have a --single-branch checkout, so we have to add a "master" branch and fetch it
|
||||
git remote set-branches --add browser_upstream master
|
||||
git fetch --depth 1 browser_upstream master
|
||||
# 2. Checkout the master branch and run bootstrap from it.
|
||||
git checkout browser_upstream/master
|
||||
echo "ac_add_options --enable-bootstrap" >> .mozconfig
|
||||
SHELL=/bin/sh ./mach --no-interactive bootstrap --application-choice=browser
|
||||
git checkout -
|
||||
rm -rf "${OBJ_FOLDER}"
|
||||
|
||||
if [[ -n "${WIN32_REDIST_DIR}" ]]; then
|
||||
# Having this option in .mozconfig kills incremental compilation.
|
||||
echo "export WIN32_REDIST_DIR=\"$WIN32_REDIST_DIR\"" >> .mozconfig
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ -n "${IS_JUGGLER}" ]]; then
|
||||
./mach build faster
|
||||
else
|
||||
./mach build
|
||||
if is_mac; then
|
||||
FF_DEBUG_BUILD="${IS_DEBUG}" node "${SCRIPT_FOLDER}"/install-preferences.js "$PWD"/${OBJ_FOLDER}/dist
|
||||
else
|
||||
FF_DEBUG_BUILD="${IS_DEBUG}" node "${SCRIPT_FOLDER}"/install-preferences.js "$PWD"/${OBJ_FOLDER}/dist/bin
|
||||
fi
|
||||
fi
|
||||
|
||||
|
|
@ -1,20 +0,0 @@
|
|||
#!/bin/bash
|
||||
set -e
|
||||
set +x
|
||||
|
||||
trap "cd $(pwd -P)" EXIT
|
||||
if [[ ! -z "${FF_CHECKOUT_PATH}" ]]; then
|
||||
cd "${FF_CHECKOUT_PATH}"
|
||||
echo "WARNING: checkout path from FF_CHECKOUT_PATH env: ${FF_CHECKOUT_PATH}"
|
||||
else
|
||||
cd "$HOME/firefox"
|
||||
fi
|
||||
|
||||
OBJ_FOLDER="obj-build-playwright"
|
||||
if [[ -d $OBJ_FOLDER ]]; then
|
||||
rm -rf $OBJ_FOLDER
|
||||
fi
|
||||
|
||||
if [[ -f "mach" ]]; then
|
||||
./mach clobber || true
|
||||
fi
|
|
@ -1,100 +0,0 @@
|
|||
/**
|
||||
* Copyright 2018 Google Inc. All rights reserved.
|
||||
* Modifications copyright (c) Microsoft Corporation.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
const os = require('os');
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const util = require('util');
|
||||
|
||||
const writeFileAsync = util.promisify(fs.writeFile.bind(fs));
|
||||
const mkdirAsync = util.promisify(fs.mkdir.bind(fs));
|
||||
|
||||
// Install browser preferences after downloading and unpacking
|
||||
// firefox instances.
|
||||
// Based on: https://developer.mozilla.org/en-US/docs/Mozilla/Firefox/Enterprise_deployment_before_60#Configuration
|
||||
async function installFirefoxPreferences(distpath) {
|
||||
let executablePath = '';
|
||||
if (os.platform() === 'linux')
|
||||
executablePath = path.join(distpath, 'firefox');
|
||||
else if (os.platform() === 'darwin')
|
||||
executablePath = path.join(distpath, (process.env.FF_DEBUG_BUILD ? 'NightlyDebug.app' : 'Nightly.app'), 'Contents', 'MacOS', 'firefox');
|
||||
else if (os.platform() === 'win32')
|
||||
executablePath = path.join(distpath, 'firefox.exe');
|
||||
|
||||
const firefoxFolder = path.dirname(executablePath);
|
||||
|
||||
let prefPath = '';
|
||||
let configPath = '';
|
||||
if (os.platform() === 'darwin') {
|
||||
prefPath = path.join(firefoxFolder, '..', 'Resources', 'defaults', 'pref');
|
||||
configPath = path.join(firefoxFolder, '..', 'Resources');
|
||||
} else if (os.platform() === 'linux') {
|
||||
if (!fs.existsSync(path.join(firefoxFolder, 'browser', 'defaults')))
|
||||
await mkdirAsync(path.join(firefoxFolder, 'browser', 'defaults'));
|
||||
if (!fs.existsSync(path.join(firefoxFolder, 'browser', 'defaults', 'preferences')))
|
||||
await mkdirAsync(path.join(firefoxFolder, 'browser', 'defaults', 'preferences'));
|
||||
prefPath = path.join(firefoxFolder, 'browser', 'defaults', 'preferences');
|
||||
configPath = firefoxFolder;
|
||||
} else if (os.platform() === 'win32') {
|
||||
prefPath = path.join(firefoxFolder, 'defaults', 'pref');
|
||||
configPath = firefoxFolder;
|
||||
} else {
|
||||
throw new Error('Unsupported platform: ' + os.platform());
|
||||
}
|
||||
|
||||
await Promise.all([
|
||||
copyFile({
|
||||
from: path.join(__dirname, 'preferences', '00-playwright-prefs.js'),
|
||||
to: path.join(prefPath, '00-playwright-prefs.js'),
|
||||
}),
|
||||
copyFile({
|
||||
from: path.join(__dirname, 'preferences', 'playwright.cfg'),
|
||||
to: path.join(configPath, 'playwright.cfg'),
|
||||
}),
|
||||
]);
|
||||
}
|
||||
|
||||
function copyFile({from, to}) {
|
||||
const rd = fs.createReadStream(from);
|
||||
const wr = fs.createWriteStream(to);
|
||||
return new Promise(function(resolve, reject) {
|
||||
rd.on('error', reject);
|
||||
wr.on('error', reject);
|
||||
wr.on('finish', resolve);
|
||||
rd.pipe(wr);
|
||||
}).catch(function(error) {
|
||||
rd.destroy();
|
||||
wr.end();
|
||||
throw error;
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = { installFirefoxPreferences };
|
||||
|
||||
if (require.main === module) {
|
||||
if (process.argv.length !== 3) {
|
||||
console.log('ERROR: expected a path to the directory with browser build');
|
||||
process.exit(1);
|
||||
return;
|
||||
}
|
||||
|
||||
installFirefoxPreferences(process.argv[2]).catch(error => {
|
||||
console.error('ERROR: failed to put preferences!');
|
||||
console.error(error);
|
||||
process.exit(1);
|
||||
});
|
||||
}
|
|
@ -1,135 +0,0 @@
|
|||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
|
||||
const uuidGen = Cc["@mozilla.org/uuid-generator;1"].getService(Ci.nsIUUIDGenerator);
|
||||
const {Services} = ChromeUtils.import("resource://gre/modules/Services.jsm");
|
||||
|
||||
class Helper {
|
||||
|
||||
addObserver(handler, topic) {
|
||||
Services.obs.addObserver(handler, topic);
|
||||
return () => Services.obs.removeObserver(handler, topic);
|
||||
}
|
||||
|
||||
addMessageListener(receiver, eventName, handler) {
|
||||
receiver.addMessageListener(eventName, handler);
|
||||
return () => receiver.removeMessageListener(eventName, handler);
|
||||
}
|
||||
|
||||
addEventListener(receiver, eventName, handler) {
|
||||
receiver.addEventListener(eventName, handler);
|
||||
return () => receiver.removeEventListener(eventName, handler);
|
||||
}
|
||||
|
||||
awaitEvent(receiver, eventName) {
|
||||
return new Promise(resolve => {
|
||||
receiver.addEventListener(eventName, function listener() {
|
||||
receiver.removeEventListener(eventName, listener);
|
||||
resolve();
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
on(receiver, eventName, handler) {
|
||||
// The toolkit/modules/EventEmitter.jsm dispatches event name as a first argument.
|
||||
// Fire event listeners without it for convenience.
|
||||
const handlerWrapper = (_, ...args) => handler(...args);
|
||||
receiver.on(eventName, handlerWrapper);
|
||||
return () => receiver.off(eventName, handlerWrapper);
|
||||
}
|
||||
|
||||
addProgressListener(progress, listener, flags) {
|
||||
progress.addProgressListener(listener, flags);
|
||||
return () => progress.removeProgressListener(listener);
|
||||
}
|
||||
|
||||
removeListeners(listeners) {
|
||||
for (const tearDown of listeners)
|
||||
tearDown.call(null);
|
||||
listeners.splice(0, listeners.length);
|
||||
}
|
||||
|
||||
generateId() {
|
||||
const string = uuidGen.generateUUID().toString();
|
||||
return string.substring(1, string.length - 1);
|
||||
}
|
||||
|
||||
getLoadContext(channel) {
|
||||
let loadContext = null;
|
||||
try {
|
||||
if (channel.notificationCallbacks)
|
||||
loadContext = channel.notificationCallbacks.getInterface(Ci.nsILoadContext);
|
||||
} catch (e) {}
|
||||
try {
|
||||
if (!loadContext && channel.loadGroup)
|
||||
loadContext = channel.loadGroup.notificationCallbacks.getInterface(Ci.nsILoadContext);
|
||||
} catch (e) { }
|
||||
return loadContext;
|
||||
}
|
||||
|
||||
getNetworkErrorStatusText(status) {
|
||||
if (!status)
|
||||
return null;
|
||||
for (const key of Object.keys(Cr)) {
|
||||
if (Cr[key] === status)
|
||||
return key;
|
||||
}
|
||||
// Security module. The following is taken from
|
||||
// https://developer.mozilla.org/en-US/docs/Web/API/XMLHttpRequest/How_to_check_the_secruity_state_of_an_XMLHTTPRequest_over_SSL
|
||||
if ((status & 0xff0000) === 0x5a0000) {
|
||||
// NSS_SEC errors (happen below the base value because of negative vals)
|
||||
if ((status & 0xffff) < Math.abs(Ci.nsINSSErrorsService.NSS_SEC_ERROR_BASE)) {
|
||||
// The bases are actually negative, so in our positive numeric space, we
|
||||
// need to subtract the base off our value.
|
||||
const nssErr = Math.abs(Ci.nsINSSErrorsService.NSS_SEC_ERROR_BASE) - (status & 0xffff);
|
||||
switch (nssErr) {
|
||||
case 11:
|
||||
return 'SEC_ERROR_EXPIRED_CERTIFICATE';
|
||||
case 12:
|
||||
return 'SEC_ERROR_REVOKED_CERTIFICATE';
|
||||
case 13:
|
||||
return 'SEC_ERROR_UNKNOWN_ISSUER';
|
||||
case 20:
|
||||
return 'SEC_ERROR_UNTRUSTED_ISSUER';
|
||||
case 21:
|
||||
return 'SEC_ERROR_UNTRUSTED_CERT';
|
||||
case 36:
|
||||
return 'SEC_ERROR_CA_CERT_INVALID';
|
||||
case 90:
|
||||
return 'SEC_ERROR_INADEQUATE_KEY_USAGE';
|
||||
case 176:
|
||||
return 'SEC_ERROR_CERT_SIGNATURE_ALGORITHM_DISABLED';
|
||||
default:
|
||||
return 'SEC_ERROR_UNKNOWN';
|
||||
}
|
||||
}
|
||||
const sslErr = Math.abs(Ci.nsINSSErrorsService.NSS_SSL_ERROR_BASE) - (status & 0xffff);
|
||||
switch (sslErr) {
|
||||
case 3:
|
||||
return 'SSL_ERROR_NO_CERTIFICATE';
|
||||
case 4:
|
||||
return 'SSL_ERROR_BAD_CERTIFICATE';
|
||||
case 8:
|
||||
return 'SSL_ERROR_UNSUPPORTED_CERTIFICATE_TYPE';
|
||||
case 9:
|
||||
return 'SSL_ERROR_UNSUPPORTED_VERSION';
|
||||
case 12:
|
||||
return 'SSL_ERROR_BAD_CERT_DOMAIN';
|
||||
default:
|
||||
return 'SSL_ERROR_UNKNOWN';
|
||||
}
|
||||
}
|
||||
return '<unknown error>';
|
||||
}
|
||||
|
||||
browsingContextToFrameId(browsingContext) {
|
||||
if (!browsingContext)
|
||||
return undefined;
|
||||
return 'frame-' + browsingContext.id;
|
||||
}
|
||||
}
|
||||
|
||||
var EXPORTED_SYMBOLS = [ "Helper" ];
|
||||
this.Helper = Helper;
|
||||
|
|
@ -1,958 +0,0 @@
|
|||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
|
||||
"use strict";
|
||||
|
||||
const {EventEmitter} = ChromeUtils.import('resource://gre/modules/EventEmitter.jsm');
|
||||
const {Helper} = ChromeUtils.import('chrome://juggler/content/Helper.js');
|
||||
const {Services} = ChromeUtils.import("resource://gre/modules/Services.jsm");
|
||||
const {NetUtil} = ChromeUtils.import('resource://gre/modules/NetUtil.jsm');
|
||||
const { ChannelEventSinkFactory } = ChromeUtils.import("chrome://remote/content/cdp/observers/ChannelEventSink.jsm");
|
||||
|
||||
|
||||
const Cc = Components.classes;
|
||||
const Ci = Components.interfaces;
|
||||
const Cu = Components.utils;
|
||||
const Cr = Components.results;
|
||||
const Cm = Components.manager;
|
||||
const CC = Components.Constructor;
|
||||
const helper = new Helper();
|
||||
|
||||
const UINT32_MAX = Math.pow(2, 32)-1;
|
||||
|
||||
const BinaryInputStream = CC('@mozilla.org/binaryinputstream;1', 'nsIBinaryInputStream', 'setInputStream');
|
||||
const BinaryOutputStream = CC('@mozilla.org/binaryoutputstream;1', 'nsIBinaryOutputStream', 'setOutputStream');
|
||||
const StorageStream = CC('@mozilla.org/storagestream;1', 'nsIStorageStream', 'init');
|
||||
|
||||
// Cap response storage with 100Mb per tracked tab.
|
||||
const MAX_RESPONSE_STORAGE_SIZE = 100 * 1024 * 1024;
|
||||
|
||||
const pageNetworkSymbol = Symbol('PageNetwork');
|
||||
|
||||
class PageNetwork {
|
||||
static forPageTarget(target) {
|
||||
let result = target[pageNetworkSymbol];
|
||||
if (!result) {
|
||||
result = new PageNetwork(target);
|
||||
target[pageNetworkSymbol] = result;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
constructor(target) {
|
||||
EventEmitter.decorate(this);
|
||||
this._target = target;
|
||||
this._extraHTTPHeaders = null;
|
||||
this._responseStorage = new ResponseStorage(MAX_RESPONSE_STORAGE_SIZE, MAX_RESPONSE_STORAGE_SIZE / 10);
|
||||
this._requestInterceptionEnabled = false;
|
||||
// This is requestId => NetworkRequest map, only contains requests that are
|
||||
// awaiting interception action (abort, resume, fulfill) over the protocol.
|
||||
this._interceptedRequests = new Map();
|
||||
}
|
||||
|
||||
setExtraHTTPHeaders(headers) {
|
||||
this._extraHTTPHeaders = headers;
|
||||
}
|
||||
|
||||
combinedExtraHTTPHeaders() {
|
||||
return [
|
||||
...(this._target.browserContext().extraHTTPHeaders || []),
|
||||
...(this._extraHTTPHeaders || []),
|
||||
];
|
||||
}
|
||||
|
||||
enableRequestInterception() {
|
||||
this._requestInterceptionEnabled = true;
|
||||
}
|
||||
|
||||
disableRequestInterception() {
|
||||
this._requestInterceptionEnabled = false;
|
||||
for (const intercepted of this._interceptedRequests.values())
|
||||
intercepted.resume();
|
||||
this._interceptedRequests.clear();
|
||||
}
|
||||
|
||||
resumeInterceptedRequest(requestId, url, method, headers, postData) {
|
||||
this._takeIntercepted(requestId).resume(url, method, headers, postData);
|
||||
}
|
||||
|
||||
fulfillInterceptedRequest(requestId, status, statusText, headers, base64body) {
|
||||
this._takeIntercepted(requestId).fulfill(status, statusText, headers, base64body);
|
||||
}
|
||||
|
||||
abortInterceptedRequest(requestId, errorCode) {
|
||||
this._takeIntercepted(requestId).abort(errorCode);
|
||||
}
|
||||
|
||||
getResponseBody(requestId) {
|
||||
if (!this._responseStorage)
|
||||
throw new Error('Responses are not tracked for the given browser');
|
||||
return this._responseStorage.getBase64EncodedResponse(requestId);
|
||||
}
|
||||
|
||||
_takeIntercepted(requestId) {
|
||||
const intercepted = this._interceptedRequests.get(requestId);
|
||||
if (!intercepted)
|
||||
throw new Error(`Cannot find request "${requestId}"`);
|
||||
this._interceptedRequests.delete(requestId);
|
||||
return intercepted;
|
||||
}
|
||||
}
|
||||
|
||||
class NetworkRequest {
|
||||
constructor(networkObserver, httpChannel, redirectedFrom) {
|
||||
this._networkObserver = networkObserver;
|
||||
this.httpChannel = httpChannel;
|
||||
|
||||
const loadInfo = this.httpChannel.loadInfo;
|
||||
let browsingContext = loadInfo?.frameBrowsingContext || loadInfo?.browsingContext;
|
||||
// TODO: Unfortunately, requests from web workers don't have frameBrowsingContext or
|
||||
// browsingContext.
|
||||
//
|
||||
// We fail to attribute them to the original frames on the browser side, but we
|
||||
// can use load context top frame to attribute them to the top frame at least.
|
||||
if (!browsingContext) {
|
||||
const loadContext = helper.getLoadContext(this.httpChannel);
|
||||
browsingContext = loadContext?.topFrameElement?.browsingContext;
|
||||
}
|
||||
|
||||
this._frameId = helper.browsingContextToFrameId(browsingContext);
|
||||
|
||||
this.requestId = httpChannel.channelId + '';
|
||||
this.navigationId = httpChannel.isMainDocumentChannel ? this.requestId : undefined;
|
||||
|
||||
this._redirectedIndex = 0;
|
||||
if (redirectedFrom) {
|
||||
this.redirectedFromId = redirectedFrom.requestId;
|
||||
this._redirectedIndex = redirectedFrom._redirectedIndex + 1;
|
||||
this.requestId = this.requestId + '-redirect' + this._redirectedIndex;
|
||||
this.navigationId = redirectedFrom.navigationId;
|
||||
// Finish previous request now. Since we inherit the listener, we could in theory
|
||||
// use onStopRequest, but that will only happen after the last redirect has finished.
|
||||
redirectedFrom._sendOnRequestFinished();
|
||||
}
|
||||
// In case of proxy auth, we get two requests with the same channel:
|
||||
// - one is pre-auth
|
||||
// - second is with auth header.
|
||||
//
|
||||
// In this case, we create this NetworkRequest object with a `redirectedFrom`
|
||||
// object, and they both share the same httpChannel.
|
||||
//
|
||||
// Since we want to maintain _channelToRequest map without clashes,
|
||||
// we must call `_sendOnRequestFinished` **before** we update it with a new object
|
||||
// here.
|
||||
if (this._networkObserver._channelToRequest.has(this.httpChannel))
|
||||
throw new Error(`Internal Error: invariant is broken for _channelToRequest map`);
|
||||
this._networkObserver._channelToRequest.set(this.httpChannel, this);
|
||||
|
||||
this._pageNetwork = redirectedFrom ? redirectedFrom._pageNetwork : networkObserver._findPageNetwork(httpChannel);
|
||||
this._expectingInterception = false;
|
||||
this._expectingResumedRequest = undefined; // { method, headers, postData }
|
||||
this._sentOnResponse = false;
|
||||
|
||||
if (this._pageNetwork)
|
||||
appendExtraHTTPHeaders(httpChannel, this._pageNetwork.combinedExtraHTTPHeaders());
|
||||
|
||||
this._responseBodyChunks = [];
|
||||
|
||||
httpChannel.QueryInterface(Ci.nsITraceableChannel);
|
||||
this._originalListener = httpChannel.setNewListener(this);
|
||||
if (redirectedFrom) {
|
||||
// Listener is inherited for regular redirects, so we'd like to avoid
|
||||
// calling into previous NetworkRequest.
|
||||
this._originalListener = redirectedFrom._originalListener;
|
||||
}
|
||||
|
||||
this._previousCallbacks = httpChannel.notificationCallbacks;
|
||||
httpChannel.notificationCallbacks = this;
|
||||
|
||||
this.QueryInterface = ChromeUtils.generateQI([
|
||||
Ci.nsIAuthPrompt2,
|
||||
Ci.nsIAuthPromptProvider,
|
||||
Ci.nsIInterfaceRequestor,
|
||||
Ci.nsINetworkInterceptController,
|
||||
Ci.nsIStreamListener,
|
||||
]);
|
||||
|
||||
if (this.redirectedFromId) {
|
||||
// Redirects are not interceptable.
|
||||
this._sendOnRequest(false);
|
||||
}
|
||||
}
|
||||
|
||||
// Public interception API.
|
||||
resume(url, method, headers, postData) {
|
||||
this._expectingResumedRequest = { method, headers, postData };
|
||||
const newUri = url ? Services.io.newURI(url) : null;
|
||||
this._interceptedChannel.resetInterceptionWithURI(newUri);
|
||||
this._interceptedChannel = undefined;
|
||||
}
|
||||
|
||||
// Public interception API.
|
||||
abort(errorCode) {
|
||||
const error = errorMap[errorCode] || Cr.NS_ERROR_FAILURE;
|
||||
this._interceptedChannel.cancelInterception(error);
|
||||
this._interceptedChannel = undefined;
|
||||
}
|
||||
|
||||
// Public interception API.
|
||||
fulfill(status, statusText, headers, base64body) {
|
||||
this._interceptedChannel.synthesizeStatus(status, statusText);
|
||||
for (const header of headers) {
|
||||
this._interceptedChannel.synthesizeHeader(header.name, header.value);
|
||||
if (header.name.toLowerCase() === 'set-cookie') {
|
||||
Services.cookies.QueryInterface(Ci.nsICookieService);
|
||||
Services.cookies.setCookieStringFromHttp(this.httpChannel.URI, header.value, this.httpChannel);
|
||||
}
|
||||
}
|
||||
const synthesized = Cc["@mozilla.org/io/string-input-stream;1"].createInstance(Ci.nsIStringInputStream);
|
||||
synthesized.data = base64body ? atob(base64body) : '';
|
||||
this._interceptedChannel.startSynthesizedResponse(synthesized, null, null, '', false);
|
||||
this._interceptedChannel.finishSynthesizedResponse();
|
||||
this._interceptedChannel = undefined;
|
||||
}
|
||||
|
||||
// Instrumentation called by NetworkObserver.
|
||||
_onInternalRedirect(newChannel) {
|
||||
// Intercepted requests produce "internal redirects" - this is both for our own
|
||||
// interception and service workers.
|
||||
// An internal redirect has the same channelId, inherits notificationCallbacks and
|
||||
// listener, and should be used instead of an old channel.
|
||||
this._networkObserver._channelToRequest.delete(this.httpChannel);
|
||||
this.httpChannel = newChannel;
|
||||
this._networkObserver._channelToRequest.set(this.httpChannel, this);
|
||||
}
|
||||
|
||||
// Instrumentation called by NetworkObserver.
|
||||
_onInternalRedirectReady() {
|
||||
// Resumed request is first internally redirected to a new request,
|
||||
// and then the new request is ready to be updated.
|
||||
if (!this._expectingResumedRequest)
|
||||
return;
|
||||
const { method, headers, postData } = this._expectingResumedRequest;
|
||||
this._expectingResumedRequest = undefined;
|
||||
|
||||
if (headers) {
|
||||
for (const header of requestHeaders(this.httpChannel))
|
||||
this.httpChannel.setRequestHeader(header.name, '', false /* merge */);
|
||||
for (const header of headers)
|
||||
this.httpChannel.setRequestHeader(header.name, header.value, false /* merge */);
|
||||
} else if (this._pageNetwork) {
|
||||
appendExtraHTTPHeaders(this.httpChannel, this._pageNetwork.combinedExtraHTTPHeaders());
|
||||
}
|
||||
if (method)
|
||||
this.httpChannel.requestMethod = method;
|
||||
if (postData !== undefined)
|
||||
setPostData(this.httpChannel, postData, headers);
|
||||
}
|
||||
|
||||
// nsIInterfaceRequestor
|
||||
getInterface(iid) {
|
||||
if (iid.equals(Ci.nsIAuthPrompt2) || iid.equals(Ci.nsIAuthPromptProvider) || iid.equals(Ci.nsINetworkInterceptController))
|
||||
return this;
|
||||
if (iid.equals(Ci.nsIAuthPrompt)) // Block nsIAuthPrompt - we want nsIAuthPrompt2 to be used instead.
|
||||
throw Cr.NS_ERROR_NO_INTERFACE;
|
||||
if (this._previousCallbacks)
|
||||
return this._previousCallbacks.getInterface(iid);
|
||||
throw Cr.NS_ERROR_NO_INTERFACE;
|
||||
}
|
||||
|
||||
// nsIAuthPromptProvider
|
||||
getAuthPrompt(aPromptReason, iid) {
|
||||
return this;
|
||||
}
|
||||
|
||||
// nsIAuthPrompt2
|
||||
asyncPromptAuth(aChannel, aCallback, aContext, level, authInfo) {
|
||||
let canceled = false;
|
||||
Promise.resolve().then(() => {
|
||||
if (canceled)
|
||||
return;
|
||||
const hasAuth = this.promptAuth(aChannel, level, authInfo);
|
||||
if (hasAuth)
|
||||
aCallback.onAuthAvailable(aContext, authInfo);
|
||||
else
|
||||
aCallback.onAuthCancelled(aContext, true);
|
||||
});
|
||||
return {
|
||||
QueryInterface: ChromeUtils.generateQI([Ci.nsICancelable]),
|
||||
cancel: () => {
|
||||
aCallback.onAuthCancelled(aContext, false);
|
||||
canceled = true;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
// nsIAuthPrompt2
|
||||
promptAuth(aChannel, level, authInfo) {
|
||||
if (authInfo.flags & Ci.nsIAuthInformation.PREVIOUS_FAILED)
|
||||
return false;
|
||||
const pageNetwork = this._pageNetwork;
|
||||
if (!pageNetwork)
|
||||
return false;
|
||||
let credentials = null;
|
||||
if (authInfo.flags & Ci.nsIAuthInformation.AUTH_PROXY) {
|
||||
const proxy = this._networkObserver._targetRegistry.getProxyInfo(aChannel);
|
||||
credentials = proxy ? {username: proxy.username, password: proxy.password} : null;
|
||||
} else {
|
||||
credentials = pageNetwork._target.browserContext().httpCredentials;
|
||||
}
|
||||
if (!credentials)
|
||||
return false;
|
||||
authInfo.username = credentials.username;
|
||||
authInfo.password = credentials.password;
|
||||
// This will produce a new request with respective auth header set.
|
||||
// It will have the same id as ours. We expect it to arrive as new request and
|
||||
// will treat it as our own redirect.
|
||||
this._networkObserver._expectRedirect(this.httpChannel.channelId + '', this);
|
||||
return true;
|
||||
}
|
||||
|
||||
// nsINetworkInterceptController
|
||||
shouldPrepareForIntercept(aURI, channel) {
|
||||
const interceptController = this._fallThroughInterceptController();
|
||||
if (interceptController && interceptController.shouldPrepareForIntercept(aURI, channel)) {
|
||||
// We assume that interceptController is a service worker if there is one,
|
||||
// and yield interception to it. We are not going to intercept ourselves,
|
||||
// so we send onRequest now.
|
||||
this._sendOnRequest(false);
|
||||
return true;
|
||||
}
|
||||
|
||||
if (channel !== this.httpChannel) {
|
||||
// Not our channel? Just in case this happens, don't do anything.
|
||||
return false;
|
||||
}
|
||||
|
||||
// We do not want to intercept any redirects, because we are not able
|
||||
// to intercept subresource redirects, and it's unreliable for main requests.
|
||||
// We do not sendOnRequest here, because redirects do that in constructor.
|
||||
if (this.redirectedFromId)
|
||||
return false;
|
||||
|
||||
const shouldIntercept = this._shouldIntercept();
|
||||
if (!shouldIntercept) {
|
||||
// We are not intercepting - ready to issue onRequest.
|
||||
this._sendOnRequest(false);
|
||||
return false;
|
||||
}
|
||||
|
||||
this._expectingInterception = true;
|
||||
return true;
|
||||
}
|
||||
|
||||
// nsINetworkInterceptController
|
||||
channelIntercepted(intercepted) {
|
||||
if (!this._expectingInterception) {
|
||||
// We are not intercepting, fall-through.
|
||||
const interceptController = this._fallThroughInterceptController();
|
||||
if (interceptController)
|
||||
interceptController.channelIntercepted(intercepted);
|
||||
return;
|
||||
}
|
||||
|
||||
this._expectingInterception = false;
|
||||
this._interceptedChannel = intercepted.QueryInterface(Ci.nsIInterceptedChannel);
|
||||
|
||||
const pageNetwork = this._pageNetwork;
|
||||
if (!pageNetwork) {
|
||||
// Just in case we disabled instrumentation while intercepting, resume and forget.
|
||||
this.resume();
|
||||
return;
|
||||
}
|
||||
|
||||
const browserContext = pageNetwork._target.browserContext();
|
||||
if (browserContext.settings.onlineOverride === 'offline') {
|
||||
// Implement offline.
|
||||
this.abort(Cr.NS_ERROR_OFFLINE);
|
||||
return;
|
||||
}
|
||||
|
||||
// Ok, so now we have intercepted the request, let's issue onRequest.
|
||||
// If interception has been disabled while we were intercepting, resume and forget.
|
||||
const interceptionEnabled = this._shouldIntercept();
|
||||
this._sendOnRequest(!!interceptionEnabled);
|
||||
if (interceptionEnabled)
|
||||
pageNetwork._interceptedRequests.set(this.requestId, this);
|
||||
else
|
||||
this.resume();
|
||||
}
|
||||
|
||||
// nsIStreamListener
|
||||
onDataAvailable(aRequest, aInputStream, aOffset, aCount) {
|
||||
// Turns out webcompat shims might redirect to
|
||||
// SimpleChannel, so we get requests from a different channel.
|
||||
// See https://github.com/microsoft/playwright/issues/9418#issuecomment-944836244
|
||||
if (aRequest !== this.httpChannel)
|
||||
return;
|
||||
// For requests with internal redirect (e.g. intercepted by Service Worker),
|
||||
// we do not get onResponse normally, but we do get nsIStreamListener notifications.
|
||||
this._sendOnResponse(false);
|
||||
|
||||
const iStream = new BinaryInputStream(aInputStream);
|
||||
const sStream = new StorageStream(8192, aCount, null);
|
||||
const oStream = new BinaryOutputStream(sStream.getOutputStream(0));
|
||||
|
||||
// Copy received data as they come.
|
||||
const data = iStream.readBytes(aCount);
|
||||
this._responseBodyChunks.push(data);
|
||||
|
||||
oStream.writeBytes(data, aCount);
|
||||
try {
|
||||
this._originalListener.onDataAvailable(aRequest, sStream.newInputStream(0), aOffset, aCount);
|
||||
} catch (e) {
|
||||
// Be ready to original listener exceptions.
|
||||
}
|
||||
}
|
||||
|
||||
// nsIStreamListener
|
||||
onStartRequest(aRequest) {
|
||||
// Turns out webcompat shims might redirect to
|
||||
// SimpleChannel, so we get requests from a different channel.
|
||||
// See https://github.com/microsoft/playwright/issues/9418#issuecomment-944836244
|
||||
if (aRequest !== this.httpChannel)
|
||||
return;
|
||||
try {
|
||||
this._originalListener.onStartRequest(aRequest);
|
||||
} catch (e) {
|
||||
// Be ready to original listener exceptions.
|
||||
}
|
||||
}
|
||||
|
||||
// nsIStreamListener
|
||||
onStopRequest(aRequest, aStatusCode) {
|
||||
// Turns out webcompat shims might redirect to
|
||||
// SimpleChannel, so we get requests from a different channel.
|
||||
// See https://github.com/microsoft/playwright/issues/9418#issuecomment-944836244
|
||||
if (aRequest !== this.httpChannel)
|
||||
return;
|
||||
try {
|
||||
this._originalListener.onStopRequest(aRequest, aStatusCode);
|
||||
} catch (e) {
|
||||
// Be ready to original listener exceptions.
|
||||
}
|
||||
|
||||
if (aStatusCode === 0) {
|
||||
// For requests with internal redirect (e.g. intercepted by Service Worker),
|
||||
// we do not get onResponse normally, but we do get nsIRequestObserver notifications.
|
||||
this._sendOnResponse(false);
|
||||
const body = this._responseBodyChunks.join('');
|
||||
const pageNetwork = this._pageNetwork;
|
||||
if (pageNetwork)
|
||||
pageNetwork._responseStorage.addResponseBody(this, body);
|
||||
this._sendOnRequestFinished();
|
||||
} else {
|
||||
this._sendOnRequestFailed(aStatusCode);
|
||||
}
|
||||
|
||||
delete this._responseBodyChunks;
|
||||
}
|
||||
|
||||
_shouldIntercept() {
|
||||
const pageNetwork = this._pageNetwork;
|
||||
if (!pageNetwork)
|
||||
return false;
|
||||
if (pageNetwork._requestInterceptionEnabled)
|
||||
return true;
|
||||
const browserContext = pageNetwork._target.browserContext();
|
||||
if (browserContext.requestInterceptionEnabled)
|
||||
return true;
|
||||
if (browserContext.settings.onlineOverride === 'offline')
|
||||
return true;
|
||||
return false;
|
||||
}
|
||||
|
||||
_fallThroughInterceptController() {
|
||||
if (!this._previousCallbacks || !(this._previousCallbacks instanceof Ci.nsINetworkInterceptController))
|
||||
return undefined;
|
||||
return this._previousCallbacks.getInterface(Ci.nsINetworkInterceptController);
|
||||
}
|
||||
|
||||
_sendOnRequest(isIntercepted) {
|
||||
// Note: we call _sendOnRequest either after we intercepted the request,
|
||||
// or at the first moment we know that we are not going to intercept.
|
||||
const pageNetwork = this._pageNetwork;
|
||||
if (!pageNetwork)
|
||||
return;
|
||||
const loadInfo = this.httpChannel.loadInfo;
|
||||
const causeType = loadInfo?.externalContentPolicyType || Ci.nsIContentPolicy.TYPE_OTHER;
|
||||
const internalCauseType = loadInfo?.internalContentPolicyType || Ci.nsIContentPolicy.TYPE_OTHER;
|
||||
pageNetwork.emit(PageNetwork.Events.Request, {
|
||||
url: this.httpChannel.URI.spec,
|
||||
frameId: this._frameId,
|
||||
isIntercepted,
|
||||
requestId: this.requestId,
|
||||
redirectedFrom: this.redirectedFromId,
|
||||
postData: readRequestPostData(this.httpChannel),
|
||||
headers: requestHeaders(this.httpChannel),
|
||||
method: this.httpChannel.requestMethod,
|
||||
navigationId: this.navigationId,
|
||||
cause: causeTypeToString(causeType),
|
||||
internalCause: causeTypeToString(internalCauseType),
|
||||
}, this._frameId);
|
||||
}
|
||||
|
||||
_sendOnResponse(fromCache, opt_statusCode, opt_statusText) {
|
||||
if (this._sentOnResponse) {
|
||||
// We can come here twice because of internal redirects, e.g. service workers.
|
||||
return;
|
||||
}
|
||||
this._sentOnResponse = true;
|
||||
const pageNetwork = this._pageNetwork;
|
||||
if (!pageNetwork)
|
||||
return;
|
||||
|
||||
this.httpChannel.QueryInterface(Ci.nsIHttpChannelInternal);
|
||||
this.httpChannel.QueryInterface(Ci.nsITimedChannel);
|
||||
const timing = {
|
||||
startTime: this.httpChannel.channelCreationTime,
|
||||
domainLookupStart: this.httpChannel.domainLookupStartTime,
|
||||
domainLookupEnd: this.httpChannel.domainLookupEndTime,
|
||||
connectStart: this.httpChannel.connectStartTime,
|
||||
secureConnectionStart: this.httpChannel.secureConnectionStartTime,
|
||||
connectEnd: this.httpChannel.connectEndTime,
|
||||
requestStart: this.httpChannel.requestStartTime,
|
||||
responseStart: this.httpChannel.responseStartTime,
|
||||
};
|
||||
|
||||
const { status, statusText, headers } = responseHead(this.httpChannel, opt_statusCode, opt_statusText);
|
||||
let remoteIPAddress = undefined;
|
||||
let remotePort = undefined;
|
||||
try {
|
||||
remoteIPAddress = this.httpChannel.remoteAddress;
|
||||
remotePort = this.httpChannel.remotePort;
|
||||
} catch (e) {
|
||||
// remoteAddress is not defined for cached requests.
|
||||
}
|
||||
|
||||
const fromServiceWorker = this._networkObserver._channelIdsFulfilledByServiceWorker.has(this.requestId);
|
||||
this._networkObserver._channelIdsFulfilledByServiceWorker.delete(this.requestId);
|
||||
|
||||
pageNetwork.emit(PageNetwork.Events.Response, {
|
||||
requestId: this.requestId,
|
||||
securityDetails: getSecurityDetails(this.httpChannel),
|
||||
fromCache,
|
||||
headers,
|
||||
remoteIPAddress,
|
||||
remotePort,
|
||||
status,
|
||||
statusText,
|
||||
timing,
|
||||
fromServiceWorker,
|
||||
}, this._frameId);
|
||||
}
|
||||
|
||||
_sendOnRequestFailed(error) {
|
||||
const pageNetwork = this._pageNetwork;
|
||||
if (pageNetwork) {
|
||||
pageNetwork.emit(PageNetwork.Events.RequestFailed, {
|
||||
requestId: this.requestId,
|
||||
errorCode: helper.getNetworkErrorStatusText(error),
|
||||
}, this._frameId);
|
||||
}
|
||||
this._networkObserver._channelToRequest.delete(this.httpChannel);
|
||||
}
|
||||
|
||||
_sendOnRequestFinished() {
|
||||
const pageNetwork = this._pageNetwork;
|
||||
if (pageNetwork) {
|
||||
let protocolVersion = undefined;
|
||||
try {
|
||||
protocolVersion = this.httpChannel.protocolVersion;
|
||||
} catch (e) {
|
||||
// protocolVersion is unavailable in certain cases.
|
||||
};
|
||||
pageNetwork.emit(PageNetwork.Events.RequestFinished, {
|
||||
requestId: this.requestId,
|
||||
responseEndTime: this.httpChannel.responseEndTime,
|
||||
transferSize: this.httpChannel.transferSize,
|
||||
encodedBodySize: this.httpChannel.encodedBodySize,
|
||||
protocolVersion,
|
||||
}, this._frameId);
|
||||
}
|
||||
this._networkObserver._channelToRequest.delete(this.httpChannel);
|
||||
}
|
||||
}
|
||||
|
||||
class NetworkObserver {
|
||||
static instance() {
|
||||
return NetworkObserver._instance || null;
|
||||
}
|
||||
|
||||
constructor(targetRegistry) {
|
||||
EventEmitter.decorate(this);
|
||||
NetworkObserver._instance = this;
|
||||
|
||||
this._targetRegistry = targetRegistry;
|
||||
|
||||
this._channelToRequest = new Map(); // http channel -> network request
|
||||
this._expectedRedirect = new Map(); // expected redirect channel id (string) -> network request
|
||||
this._channelIdsFulfilledByServiceWorker = new Set(); // http channel ids that were fulfilled by service worker
|
||||
|
||||
const protocolProxyService = Cc['@mozilla.org/network/protocol-proxy-service;1'].getService();
|
||||
this._channelProxyFilter = {
|
||||
QueryInterface: ChromeUtils.generateQI([Ci.nsIProtocolProxyChannelFilter]),
|
||||
applyFilter: (channel, defaultProxyInfo, proxyFilter) => {
|
||||
const proxy = this._targetRegistry.getProxyInfo(channel);
|
||||
if (!proxy) {
|
||||
proxyFilter.onProxyFilterResult(defaultProxyInfo);
|
||||
return;
|
||||
}
|
||||
proxyFilter.onProxyFilterResult(protocolProxyService.newProxyInfo(
|
||||
proxy.type,
|
||||
proxy.host,
|
||||
proxy.port,
|
||||
'', /* aProxyAuthorizationHeader */
|
||||
'', /* aConnectionIsolationKey */
|
||||
Ci.nsIProxyInfo.TRANSPARENT_PROXY_RESOLVES_HOST, /* aFlags */
|
||||
UINT32_MAX, /* aFailoverTimeout */
|
||||
null, /* failover proxy */
|
||||
));
|
||||
},
|
||||
};
|
||||
protocolProxyService.registerChannelFilter(this._channelProxyFilter, 0 /* position */);
|
||||
|
||||
// Register self as ChannelEventSink to track redirects.
|
||||
ChannelEventSinkFactory.getService().registerCollector({
|
||||
_onChannelRedirect: this._onRedirect.bind(this),
|
||||
});
|
||||
|
||||
this._eventListeners = [
|
||||
helper.addObserver(this._onRequest.bind(this), 'http-on-modify-request'),
|
||||
helper.addObserver(this._onResponse.bind(this, false /* fromCache */), 'http-on-examine-response'),
|
||||
helper.addObserver(this._onResponse.bind(this, true /* fromCache */), 'http-on-examine-cached-response'),
|
||||
helper.addObserver(this._onResponse.bind(this, true /* fromCache */), 'http-on-examine-merged-response'),
|
||||
helper.addObserver(this._onServiceWorkerResponse.bind(this), 'service-worker-synthesized-response'),
|
||||
];
|
||||
}
|
||||
|
||||
_expectRedirect(channelId, previous) {
|
||||
this._expectedRedirect.set(channelId, previous);
|
||||
}
|
||||
|
||||
_onRedirect(oldChannel, newChannel, flags) {
|
||||
if (!(oldChannel instanceof Ci.nsIHttpChannel) || !(newChannel instanceof Ci.nsIHttpChannel))
|
||||
return;
|
||||
const oldHttpChannel = oldChannel.QueryInterface(Ci.nsIHttpChannel);
|
||||
const newHttpChannel = newChannel.QueryInterface(Ci.nsIHttpChannel);
|
||||
const request = this._channelToRequest.get(oldHttpChannel);
|
||||
if (flags & Ci.nsIChannelEventSink.REDIRECT_INTERNAL) {
|
||||
if (request)
|
||||
request._onInternalRedirect(newHttpChannel);
|
||||
} else if (flags & Ci.nsIChannelEventSink.REDIRECT_STS_UPGRADE) {
|
||||
if (request) {
|
||||
// This is an internal HSTS upgrade. The original http request is canceled, and a new
|
||||
// equivalent https request is sent. We forge 307 redirect to follow Chromium here:
|
||||
// https://source.chromium.org/chromium/chromium/src/+/main:net/url_request/url_request_http_job.cc;l=211
|
||||
request._sendOnResponse(false, 307, 'Temporary Redirect');
|
||||
this._expectRedirect(newHttpChannel.channelId + '', request);
|
||||
}
|
||||
} else {
|
||||
if (request)
|
||||
this._expectRedirect(newHttpChannel.channelId + '', request);
|
||||
}
|
||||
}
|
||||
|
||||
_findPageNetwork(httpChannel) {
|
||||
let loadContext = helper.getLoadContext(httpChannel);
|
||||
if (!loadContext)
|
||||
return;
|
||||
const target = this._targetRegistry.targetForBrowser(loadContext.topFrameElement);
|
||||
if (!target)
|
||||
return;
|
||||
return PageNetwork.forPageTarget(target);
|
||||
}
|
||||
|
||||
_onRequest(channel, topic) {
|
||||
if (!(channel instanceof Ci.nsIHttpChannel))
|
||||
return;
|
||||
const httpChannel = channel.QueryInterface(Ci.nsIHttpChannel);
|
||||
const channelId = httpChannel.channelId + '';
|
||||
const redirectedFrom = this._expectedRedirect.get(channelId);
|
||||
if (redirectedFrom) {
|
||||
this._expectedRedirect.delete(channelId);
|
||||
new NetworkRequest(this, httpChannel, redirectedFrom);
|
||||
} else {
|
||||
const redirectedRequest = this._channelToRequest.get(httpChannel);
|
||||
if (redirectedRequest)
|
||||
redirectedRequest._onInternalRedirectReady();
|
||||
else
|
||||
new NetworkRequest(this, httpChannel);
|
||||
}
|
||||
}
|
||||
|
||||
_onResponse(fromCache, httpChannel, topic) {
|
||||
const request = this._channelToRequest.get(httpChannel);
|
||||
if (request)
|
||||
request._sendOnResponse(fromCache);
|
||||
}
|
||||
|
||||
_onServiceWorkerResponse(channel, topic) {
|
||||
if (!(channel instanceof Ci.nsIHttpChannel))
|
||||
return;
|
||||
const httpChannel = channel.QueryInterface(Ci.nsIHttpChannel);
|
||||
const channelId = httpChannel.channelId + '';
|
||||
this._channelIdsFulfilledByServiceWorker.add(channelId);
|
||||
}
|
||||
|
||||
dispose() {
|
||||
this._activityDistributor.removeObserver(this);
|
||||
ChannelEventSinkFactory.unregister();
|
||||
helper.removeListeners(this._eventListeners);
|
||||
}
|
||||
}
|
||||
|
||||
const protocolVersionNames = {
|
||||
[Ci.nsITransportSecurityInfo.TLS_VERSION_1]: 'TLS 1',
|
||||
[Ci.nsITransportSecurityInfo.TLS_VERSION_1_1]: 'TLS 1.1',
|
||||
[Ci.nsITransportSecurityInfo.TLS_VERSION_1_2]: 'TLS 1.2',
|
||||
[Ci.nsITransportSecurityInfo.TLS_VERSION_1_3]: 'TLS 1.3',
|
||||
};
|
||||
|
||||
function getSecurityDetails(httpChannel) {
|
||||
const securityInfo = httpChannel.securityInfo;
|
||||
if (!securityInfo)
|
||||
return null;
|
||||
securityInfo.QueryInterface(Ci.nsITransportSecurityInfo);
|
||||
if (!securityInfo.serverCert)
|
||||
return null;
|
||||
return {
|
||||
protocol: protocolVersionNames[securityInfo.protocolVersion] || '<unknown>',
|
||||
subjectName: securityInfo.serverCert.commonName,
|
||||
issuer: securityInfo.serverCert.issuerCommonName,
|
||||
// Convert to seconds.
|
||||
validFrom: securityInfo.serverCert.validity.notBefore / 1000 / 1000,
|
||||
validTo: securityInfo.serverCert.validity.notAfter / 1000 / 1000,
|
||||
};
|
||||
}
|
||||
|
||||
function readRequestPostData(httpChannel) {
|
||||
if (!(httpChannel instanceof Ci.nsIUploadChannel))
|
||||
return undefined;
|
||||
let iStream = httpChannel.uploadStream;
|
||||
if (!iStream)
|
||||
return undefined;
|
||||
const isSeekableStream = iStream instanceof Ci.nsISeekableStream;
|
||||
|
||||
// For some reason, we cannot rewind back big streams,
|
||||
// so instead we should clone them.
|
||||
const isCloneable = iStream instanceof Ci.nsICloneableInputStream;
|
||||
if (isCloneable)
|
||||
iStream = iStream.clone();
|
||||
|
||||
let prevOffset;
|
||||
if (isSeekableStream) {
|
||||
prevOffset = iStream.tell();
|
||||
iStream.seek(Ci.nsISeekableStream.NS_SEEK_SET, 0);
|
||||
}
|
||||
|
||||
// Read data from the stream.
|
||||
let result = undefined;
|
||||
try {
|
||||
const maxLen = iStream.available();
|
||||
// Cap at 10Mb.
|
||||
if (maxLen <= 10 * 1024 * 1024) {
|
||||
const buffer = NetUtil.readInputStreamToString(iStream, maxLen);
|
||||
result = btoa(buffer);
|
||||
}
|
||||
} catch (err) {
|
||||
}
|
||||
|
||||
// Seek locks the file, so seek to the beginning only if necko hasn't
|
||||
// read it yet, since necko doesn't seek to 0 before reading (at lest
|
||||
// not till 459384 is fixed).
|
||||
if (isSeekableStream && prevOffset == 0 && !isCloneable)
|
||||
iStream.seek(Ci.nsISeekableStream.NS_SEEK_SET, 0);
|
||||
return result;
|
||||
}
|
||||
|
||||
function requestHeaders(httpChannel) {
|
||||
const headers = [];
|
||||
httpChannel.visitRequestHeaders({
|
||||
visitHeader: (name, value) => headers.push({name, value}),
|
||||
});
|
||||
return headers;
|
||||
}
|
||||
|
||||
function causeTypeToString(causeType) {
|
||||
for (let key in Ci.nsIContentPolicy) {
|
||||
if (Ci.nsIContentPolicy[key] === causeType)
|
||||
return key;
|
||||
}
|
||||
return 'TYPE_OTHER';
|
||||
}
|
||||
|
||||
function appendExtraHTTPHeaders(httpChannel, headers) {
|
||||
if (!headers)
|
||||
return;
|
||||
for (const header of headers)
|
||||
httpChannel.setRequestHeader(header.name, header.value, false /* merge */);
|
||||
}
|
||||
|
||||
class ResponseStorage {
|
||||
constructor(maxTotalSize, maxResponseSize) {
|
||||
this._totalSize = 0;
|
||||
this._maxResponseSize = maxResponseSize;
|
||||
this._maxTotalSize = maxTotalSize;
|
||||
this._responses = new Map();
|
||||
}
|
||||
|
||||
addResponseBody(request, body) {
|
||||
if (body.length > this._maxResponseSize) {
|
||||
this._responses.set(request.requestId, {
|
||||
evicted: true,
|
||||
body: '',
|
||||
});
|
||||
return;
|
||||
}
|
||||
let encodings = [];
|
||||
if ((request.httpChannel instanceof Ci.nsIEncodedChannel) && request.httpChannel.contentEncodings && !request.httpChannel.applyConversion) {
|
||||
const encodingHeader = request.httpChannel.getResponseHeader("Content-Encoding");
|
||||
encodings = encodingHeader.split(/\s*\t*,\s*\t*/);
|
||||
}
|
||||
this._responses.set(request.requestId, {body, encodings});
|
||||
this._totalSize += body.length;
|
||||
if (this._totalSize > this._maxTotalSize) {
|
||||
for (let [requestId, response] of this._responses) {
|
||||
this._totalSize -= response.body.length;
|
||||
response.body = '';
|
||||
response.evicted = true;
|
||||
if (this._totalSize < this._maxTotalSize)
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
getBase64EncodedResponse(requestId) {
|
||||
const response = this._responses.get(requestId);
|
||||
if (!response)
|
||||
throw new Error(`Request "${requestId}" is not found`);
|
||||
if (response.evicted)
|
||||
return {base64body: '', evicted: true};
|
||||
let result = response.body;
|
||||
if (response.encodings && response.encodings.length) {
|
||||
for (const encoding of response.encodings)
|
||||
result = convertString(result, encoding, 'uncompressed');
|
||||
}
|
||||
return {base64body: btoa(result)};
|
||||
}
|
||||
}
|
||||
|
||||
function responseHead(httpChannel, opt_statusCode, opt_statusText) {
|
||||
const headers = [];
|
||||
let status = opt_statusCode || 0;
|
||||
let statusText = opt_statusText || '';
|
||||
try {
|
||||
status = httpChannel.responseStatus;
|
||||
statusText = httpChannel.responseStatusText;
|
||||
httpChannel.visitResponseHeaders({
|
||||
visitHeader: (name, value) => headers.push({name, value}),
|
||||
});
|
||||
} catch (e) {
|
||||
// Response headers, status and/or statusText are not available
|
||||
// when redirect did not actually hit the network.
|
||||
}
|
||||
return { status, statusText, headers };
|
||||
}
|
||||
|
||||
function setPostData(httpChannel, postData, headers) {
|
||||
if (!(httpChannel instanceof Ci.nsIUploadChannel2))
|
||||
return;
|
||||
const synthesized = Cc["@mozilla.org/io/string-input-stream;1"].createInstance(Ci.nsIStringInputStream);
|
||||
const body = atob(postData);
|
||||
synthesized.setData(body, body.length);
|
||||
|
||||
const overriddenHeader = (lowerCaseName) => {
|
||||
if (headers) {
|
||||
for (const header of headers) {
|
||||
if (header.name.toLowerCase() === lowerCaseName) {
|
||||
return header.value;
|
||||
}
|
||||
}
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
// Clear content-length, so that upload stream resets it.
|
||||
httpChannel.setRequestHeader('content-length', '', false /* merge */);
|
||||
let contentType = overriddenHeader('content-type');
|
||||
if (contentType === undefined) {
|
||||
try {
|
||||
contentType = httpChannel.getRequestHeader('content-type');
|
||||
} catch (e) {
|
||||
if (e.result == Cr.NS_ERROR_NOT_AVAILABLE)
|
||||
contentType = 'application/octet-stream';
|
||||
else
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
httpChannel.explicitSetUploadStream(synthesized, contentType, -1, httpChannel.requestMethod, false);
|
||||
}
|
||||
|
||||
function convertString(s, source, dest) {
|
||||
const is = Cc["@mozilla.org/io/string-input-stream;1"].createInstance(
|
||||
Ci.nsIStringInputStream
|
||||
);
|
||||
is.setData(s, s.length);
|
||||
const listener = Cc["@mozilla.org/network/stream-loader;1"].createInstance(
|
||||
Ci.nsIStreamLoader
|
||||
);
|
||||
let result = [];
|
||||
listener.init({
|
||||
onStreamComplete: function onStreamComplete(
|
||||
loader,
|
||||
context,
|
||||
status,
|
||||
length,
|
||||
data
|
||||
) {
|
||||
const array = Array.from(data);
|
||||
const kChunk = 100000;
|
||||
for (let i = 0; i < length; i += kChunk) {
|
||||
const len = Math.min(kChunk, length - i);
|
||||
const chunk = String.fromCharCode.apply(this, array.slice(i, i + len));
|
||||
result.push(chunk);
|
||||
}
|
||||
},
|
||||
});
|
||||
const converter = Cc["@mozilla.org/streamConverters;1"].getService(
|
||||
Ci.nsIStreamConverterService
|
||||
).asyncConvertData(
|
||||
source,
|
||||
dest,
|
||||
listener,
|
||||
null
|
||||
);
|
||||
converter.onStartRequest(null, null);
|
||||
converter.onDataAvailable(null, is, 0, s.length);
|
||||
converter.onStopRequest(null, null, null);
|
||||
return result.join('');
|
||||
}
|
||||
|
||||
const errorMap = {
|
||||
'aborted': Cr.NS_ERROR_ABORT,
|
||||
'accessdenied': Cr.NS_ERROR_PORT_ACCESS_NOT_ALLOWED,
|
||||
'addressunreachable': Cr.NS_ERROR_UNKNOWN_HOST,
|
||||
'blockedbyclient': Cr.NS_ERROR_FAILURE,
|
||||
'blockedbyresponse': Cr.NS_ERROR_FAILURE,
|
||||
'connectionaborted': Cr.NS_ERROR_NET_INTERRUPT,
|
||||
'connectionclosed': Cr.NS_ERROR_FAILURE,
|
||||
'connectionfailed': Cr.NS_ERROR_FAILURE,
|
||||
'connectionrefused': Cr.NS_ERROR_CONNECTION_REFUSED,
|
||||
'connectionreset': Cr.NS_ERROR_NET_RESET,
|
||||
'internetdisconnected': Cr.NS_ERROR_OFFLINE,
|
||||
'namenotresolved': Cr.NS_ERROR_UNKNOWN_HOST,
|
||||
'timedout': Cr.NS_ERROR_NET_TIMEOUT,
|
||||
'failed': Cr.NS_ERROR_FAILURE,
|
||||
};
|
||||
|
||||
PageNetwork.Events = {
|
||||
Request: Symbol('PageNetwork.Events.Request'),
|
||||
Response: Symbol('PageNetwork.Events.Response'),
|
||||
RequestFinished: Symbol('PageNetwork.Events.RequestFinished'),
|
||||
RequestFailed: Symbol('PageNetwork.Events.RequestFailed'),
|
||||
};
|
||||
|
||||
var EXPORTED_SYMBOLS = ['NetworkObserver', 'PageNetwork'];
|
||||
this.NetworkObserver = NetworkObserver;
|
||||
this.PageNetwork = PageNetwork;
|
|
@ -1,180 +0,0 @@
|
|||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
|
||||
"use strict";
|
||||
// Note: this file should be loadabale with eval() into worker environment.
|
||||
// Avoid Components.*, ChromeUtils and global const variables.
|
||||
|
||||
const SIMPLE_CHANNEL_MESSAGE_NAME = 'juggler:simplechannel';
|
||||
|
||||
class SimpleChannel {
|
||||
static createForMessageManager(name, mm) {
|
||||
const channel = new SimpleChannel(name);
|
||||
|
||||
const messageListener = {
|
||||
receiveMessage: message => channel._onMessage(message.data)
|
||||
};
|
||||
mm.addMessageListener(SIMPLE_CHANNEL_MESSAGE_NAME, messageListener);
|
||||
|
||||
channel.setTransport({
|
||||
sendMessage: obj => mm.sendAsyncMessage(SIMPLE_CHANNEL_MESSAGE_NAME, obj),
|
||||
dispose: () => mm.removeMessageListener(SIMPLE_CHANNEL_MESSAGE_NAME, messageListener),
|
||||
});
|
||||
|
||||
return channel;
|
||||
}
|
||||
|
||||
constructor(name) {
|
||||
this._name = name;
|
||||
this._messageId = 0;
|
||||
this._connectorId = 0;
|
||||
this._pendingMessages = new Map();
|
||||
this._handlers = new Map();
|
||||
this._bufferedIncomingMessages = [];
|
||||
this._bufferedOutgoingMessages = [];
|
||||
this.transport = {
|
||||
sendMessage: null,
|
||||
dispose: null,
|
||||
};
|
||||
this._ready = false;
|
||||
this._disposed = false;
|
||||
}
|
||||
|
||||
setTransport(transport) {
|
||||
this.transport = transport;
|
||||
// connection handshake:
|
||||
// 1. There are two channel ends in different processes.
|
||||
// 2. Both ends start in the `ready = false` state, meaning that they will
|
||||
// not send any messages over transport.
|
||||
// 3. Once channel end is created, it sends `READY` message to the other end.
|
||||
// 4. Eventually, at least one of the ends receives `READY` message and responds with
|
||||
// `READY_ACK`. We assume at least one of the ends will receive "READY" event from the other, since
|
||||
// channel ends have a "parent-child" relation, i.e. one end is always created before the other one.
|
||||
// 5. Once channel end receives either `READY` or `READY_ACK`, it transitions to `ready` state.
|
||||
this.transport.sendMessage('READY');
|
||||
}
|
||||
|
||||
_markAsReady() {
|
||||
if (this._ready)
|
||||
return;
|
||||
this._ready = true;
|
||||
for (const msg of this._bufferedOutgoingMessages)
|
||||
this.transport.sendMessage(msg);
|
||||
this._bufferedOutgoingMessages = [];
|
||||
}
|
||||
|
||||
dispose() {
|
||||
if (this._disposed)
|
||||
return;
|
||||
this._disposed = true;
|
||||
for (const {resolve, reject, methodName} of this._pendingMessages.values())
|
||||
reject(new Error(`Failed "${methodName}": ${this._name} is disposed.`));
|
||||
this._pendingMessages.clear();
|
||||
this._handlers.clear();
|
||||
this.transport.dispose();
|
||||
}
|
||||
|
||||
_rejectCallbacksFromConnector(connectorId) {
|
||||
for (const [messageId, callback] of this._pendingMessages) {
|
||||
if (callback.connectorId === connectorId) {
|
||||
callback.reject(new Error(`Failed "${callback.methodName}": connector for namespace "${callback.namespace}" in channel "${this._name}" is disposed.`));
|
||||
this._pendingMessages.delete(messageId);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
connect(namespace) {
|
||||
const connectorId = ++this._connectorId;
|
||||
return {
|
||||
send: (...args) => this._send(namespace, connectorId, ...args),
|
||||
emit: (...args) => void this._send(namespace, connectorId, ...args).catch(e => {}),
|
||||
dispose: () => this._rejectCallbacksFromConnector(connectorId),
|
||||
};
|
||||
}
|
||||
|
||||
register(namespace, handler) {
|
||||
if (this._handlers.has(namespace))
|
||||
throw new Error('ERROR: double-register for namespace ' + namespace);
|
||||
this._handlers.set(namespace, handler);
|
||||
// Try to re-deliver all pending messages.
|
||||
const bufferedRequests = this._bufferedIncomingMessages;
|
||||
this._bufferedIncomingMessages = [];
|
||||
for (const data of bufferedRequests) {
|
||||
this._onMessage(data);
|
||||
}
|
||||
return () => this.unregister(namespace);
|
||||
}
|
||||
|
||||
unregister(namespace) {
|
||||
this._handlers.delete(namespace);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} namespace
|
||||
* @param {number} connectorId
|
||||
* @param {string} methodName
|
||||
* @param {...*} params
|
||||
* @return {!Promise<*>}
|
||||
*/
|
||||
async _send(namespace, connectorId, methodName, ...params) {
|
||||
if (this._disposed)
|
||||
throw new Error(`ERROR: channel ${this._name} is already disposed! Cannot send "${methodName}" to "${namespace}"`);
|
||||
const id = ++this._messageId;
|
||||
const promise = new Promise((resolve, reject) => {
|
||||
this._pendingMessages.set(id, {connectorId, resolve, reject, methodName, namespace});
|
||||
});
|
||||
const message = {requestId: id, methodName, params, namespace};
|
||||
if (this._ready)
|
||||
this.transport.sendMessage(message);
|
||||
else
|
||||
this._bufferedOutgoingMessages.push(message);
|
||||
return promise;
|
||||
}
|
||||
|
||||
async _onMessage(data) {
|
||||
if (data === 'READY') {
|
||||
this.transport.sendMessage('READY_ACK');
|
||||
this._markAsReady();
|
||||
return;
|
||||
}
|
||||
if (data === 'READY_ACK') {
|
||||
this._markAsReady();
|
||||
return;
|
||||
}
|
||||
if (data.responseId) {
|
||||
const {resolve, reject} = this._pendingMessages.get(data.responseId);
|
||||
this._pendingMessages.delete(data.responseId);
|
||||
if (data.error)
|
||||
reject(new Error(data.error));
|
||||
else
|
||||
resolve(data.result);
|
||||
} else if (data.requestId) {
|
||||
const namespace = data.namespace;
|
||||
const handler = this._handlers.get(namespace);
|
||||
if (!handler) {
|
||||
this._bufferedIncomingMessages.push(data);
|
||||
return;
|
||||
}
|
||||
const method = handler[data.methodName];
|
||||
if (!method) {
|
||||
this.transport.sendMessage({responseId: data.requestId, error: `error in channel "${this._name}": No method "${data.methodName}" in namespace "${namespace}"`});
|
||||
return;
|
||||
}
|
||||
try {
|
||||
const result = await method.call(handler, ...data.params);
|
||||
this.transport.sendMessage({responseId: data.requestId, result});
|
||||
} catch (error) {
|
||||
this.transport.sendMessage({responseId: data.requestId, error: `error in channel "${this._name}": exception while running method "${data.methodName}" in namespace "${namespace}": ${error.message} ${error.stack}`});
|
||||
return;
|
||||
}
|
||||
} else {
|
||||
dump(`
|
||||
ERROR: unknown message in channel "${this._name}": ${JSON.stringify(data)}
|
||||
`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var EXPORTED_SYMBOLS = ['SimpleChannel'];
|
||||
this.SimpleChannel = SimpleChannel;
|
Разница между файлами не показана из-за своего большого размера
Загрузить разницу
|
@ -1,135 +0,0 @@
|
|||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
|
||||
var EXPORTED_SYMBOLS = ["Juggler", "JugglerFactory"];
|
||||
|
||||
const {XPCOMUtils} = ChromeUtils.import("resource://gre/modules/XPCOMUtils.jsm");
|
||||
const {ComponentUtils} = ChromeUtils.import("resource://gre/modules/ComponentUtils.jsm");
|
||||
const {Services} = ChromeUtils.import("resource://gre/modules/Services.jsm");
|
||||
const {Dispatcher} = ChromeUtils.import("chrome://juggler/content/protocol/Dispatcher.js");
|
||||
const {BrowserHandler} = ChromeUtils.import("chrome://juggler/content/protocol/BrowserHandler.js");
|
||||
const {NetworkObserver} = ChromeUtils.import("chrome://juggler/content/NetworkObserver.js");
|
||||
const {TargetRegistry} = ChromeUtils.import("chrome://juggler/content/TargetRegistry.js");
|
||||
const {Helper} = ChromeUtils.import('chrome://juggler/content/Helper.js');
|
||||
const helper = new Helper();
|
||||
|
||||
const Cc = Components.classes;
|
||||
const Ci = Components.interfaces;
|
||||
|
||||
const FRAME_SCRIPT = "chrome://juggler/content/content/main.js";
|
||||
|
||||
let browserStartupFinishedCallback;
|
||||
let browserStartupFinishedPromise = new Promise(x => browserStartupFinishedCallback = x);
|
||||
|
||||
class Juggler {
|
||||
get classDescription() { return "Sample command-line handler"; }
|
||||
get classID() { return Components.ID('{f7a74a33-e2ab-422d-b022-4fb213dd2639}'); }
|
||||
get contractID() { return "@mozilla.org/remote/juggler;1" }
|
||||
get QueryInterface() {
|
||||
return ChromeUtils.generateQI([ Ci.nsICommandLineHandler, Ci.nsIObserver ]);
|
||||
}
|
||||
get helpInfo() {
|
||||
return " --juggler Enable Juggler automation\n";
|
||||
}
|
||||
|
||||
handle(cmdLine) {
|
||||
// flag has to be consumed in nsICommandLineHandler:handle
|
||||
// to avoid issues on macos. See Marionette.jsm::handle() for more details.
|
||||
// TODO: remove after Bug 1724251 is fixed.
|
||||
cmdLine.handleFlag("juggler-pipe", false);
|
||||
}
|
||||
|
||||
// This flow is taken from Remote agent and Marionette.
|
||||
// See https://github.com/mozilla/gecko-dev/blob/0c1b4921830e6af8bc951da01d7772de2fe60a08/remote/components/RemoteAgent.jsm#L302
|
||||
async observe(subject, topic) {
|
||||
switch (topic) {
|
||||
case "profile-after-change":
|
||||
Services.obs.addObserver(this, "command-line-startup");
|
||||
Services.obs.addObserver(this, "browser-idle-startup-tasks-finished");
|
||||
break;
|
||||
case "command-line-startup":
|
||||
Services.obs.removeObserver(this, topic);
|
||||
const cmdLine = subject;
|
||||
const jugglerPipeFlag = cmdLine.handleFlag('juggler-pipe', false);
|
||||
if (!jugglerPipeFlag)
|
||||
return;
|
||||
|
||||
this._silent = cmdLine.findFlag('silent', false) >= 0;
|
||||
if (this._silent) {
|
||||
Services.startup.enterLastWindowClosingSurvivalArea();
|
||||
browserStartupFinishedCallback();
|
||||
}
|
||||
Services.obs.addObserver(this, "final-ui-startup");
|
||||
break;
|
||||
case "browser-idle-startup-tasks-finished":
|
||||
browserStartupFinishedCallback();
|
||||
break;
|
||||
// Used to wait until the initial application window has been opened.
|
||||
case "final-ui-startup":
|
||||
Services.obs.removeObserver(this, topic);
|
||||
|
||||
const targetRegistry = new TargetRegistry();
|
||||
new NetworkObserver(targetRegistry);
|
||||
|
||||
const loadFrameScript = () => {
|
||||
Services.mm.loadFrameScript(FRAME_SCRIPT, true /* aAllowDelayedLoad */);
|
||||
if (Cc["@mozilla.org/gfx/info;1"].getService(Ci.nsIGfxInfo).isHeadless) {
|
||||
const styleSheetService = Cc["@mozilla.org/content/style-sheet-service;1"].getService(Components.interfaces.nsIStyleSheetService);
|
||||
const ioService = Cc["@mozilla.org/network/io-service;1"].getService(Components.interfaces.nsIIOService);
|
||||
const uri = ioService.newURI('chrome://juggler/content/content/hidden-scrollbars.css', null, null);
|
||||
styleSheetService.loadAndRegisterSheet(uri, styleSheetService.AGENT_SHEET);
|
||||
}
|
||||
};
|
||||
|
||||
// Force create hidden window here, otherwise its creation later closes the web socket!
|
||||
Services.appShell.hiddenDOMWindow;
|
||||
|
||||
let pipeStopped = false;
|
||||
let browserHandler;
|
||||
const pipe = Cc['@mozilla.org/juggler/remotedebuggingpipe;1'].getService(Ci.nsIRemoteDebuggingPipe);
|
||||
const connection = {
|
||||
QueryInterface: ChromeUtils.generateQI([Ci.nsIRemoteDebuggingPipeClient]),
|
||||
receiveMessage(message) {
|
||||
if (this.onmessage)
|
||||
this.onmessage({ data: message });
|
||||
},
|
||||
disconnected() {
|
||||
if (browserHandler)
|
||||
browserHandler['Browser.close']();
|
||||
},
|
||||
send(message) {
|
||||
if (pipeStopped) {
|
||||
// We are missing the response to Browser.close,
|
||||
// but everything works fine. Once we actually need it,
|
||||
// we have to stop the pipe after the response is sent.
|
||||
return;
|
||||
}
|
||||
pipe.sendMessage(message);
|
||||
},
|
||||
};
|
||||
pipe.init(connection);
|
||||
const dispatcher = new Dispatcher(connection);
|
||||
browserHandler = new BrowserHandler(dispatcher.rootSession(), dispatcher, targetRegistry, () => {
|
||||
if (this._silent)
|
||||
Services.startup.exitLastWindowClosingSurvivalArea();
|
||||
connection.onclose();
|
||||
pipe.stop();
|
||||
pipeStopped = true;
|
||||
}, () => browserStartupFinishedPromise);
|
||||
dispatcher.rootSession().setHandler(browserHandler);
|
||||
loadFrameScript();
|
||||
dump(`\nJuggler listening to the pipe\n`);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
const jugglerInstance = new Juggler();
|
||||
|
||||
// This is used by the XPCOM codepath which expects a constructor
|
||||
var JugglerFactory = function() {
|
||||
return jugglerInstance;
|
||||
};
|
||||
|
|
@ -1,18 +0,0 @@
|
|||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
Classes = [
|
||||
# Juggler
|
||||
{
|
||||
"cid": "{f7a74a33-e2ab-422d-b022-4fb213dd2639}",
|
||||
"contract_ids": ["@mozilla.org/remote/juggler;1"],
|
||||
"categories": {
|
||||
"command-line-handler": "m-remote",
|
||||
"profile-after-change": "Juggler",
|
||||
},
|
||||
"jsm": "chrome://juggler/content/components/Juggler.js",
|
||||
"constructor": "JugglerFactory",
|
||||
},
|
||||
]
|
||||
|
|
@ -1,6 +0,0 @@
|
|||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
XPCOM_MANIFESTS += ["components.conf"]
|
||||
|
|
@ -1,634 +0,0 @@
|
|||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
|
||||
"use strict";
|
||||
const Ci = Components.interfaces;
|
||||
const Cr = Components.results;
|
||||
const Cu = Components.utils;
|
||||
|
||||
const {Helper} = ChromeUtils.import('chrome://juggler/content/Helper.js');
|
||||
const {SimpleChannel} = ChromeUtils.import('chrome://juggler/content/SimpleChannel.js');
|
||||
const {EventEmitter} = ChromeUtils.import('resource://gre/modules/EventEmitter.jsm');
|
||||
const {Runtime} = ChromeUtils.import('chrome://juggler/content/content/Runtime.js');
|
||||
|
||||
const helper = new Helper();
|
||||
|
||||
class FrameTree {
|
||||
constructor(rootDocShell) {
|
||||
EventEmitter.decorate(this);
|
||||
|
||||
this._browsingContextGroup = rootDocShell.browsingContext.group;
|
||||
if (!this._browsingContextGroup.__jugglerFrameTrees)
|
||||
this._browsingContextGroup.__jugglerFrameTrees = new Set();
|
||||
this._browsingContextGroup.__jugglerFrameTrees.add(this);
|
||||
this._isolatedWorlds = new Map();
|
||||
|
||||
this._webSocketEventService = Cc[
|
||||
"@mozilla.org/websocketevent/service;1"
|
||||
].getService(Ci.nsIWebSocketEventService);
|
||||
|
||||
this._runtime = new Runtime(false /* isWorker */);
|
||||
this._workers = new Map();
|
||||
this._docShellToFrame = new Map();
|
||||
this._frameIdToFrame = new Map();
|
||||
this._pageReady = false;
|
||||
this._mainFrame = this._createFrame(rootDocShell);
|
||||
const webProgress = rootDocShell.QueryInterface(Ci.nsIInterfaceRequestor)
|
||||
.getInterface(Ci.nsIWebProgress);
|
||||
this.QueryInterface = ChromeUtils.generateQI([
|
||||
Ci.nsIWebProgressListener,
|
||||
Ci.nsIWebProgressListener2,
|
||||
Ci.nsISupportsWeakReference,
|
||||
]);
|
||||
|
||||
this._addedScrollbarsStylesheetSymbol = Symbol('_addedScrollbarsStylesheetSymbol');
|
||||
|
||||
this._wdm = Cc["@mozilla.org/dom/workers/workerdebuggermanager;1"].createInstance(Ci.nsIWorkerDebuggerManager);
|
||||
this._wdmListener = {
|
||||
QueryInterface: ChromeUtils.generateQI([Ci.nsIWorkerDebuggerManagerListener]),
|
||||
onRegister: this._onWorkerCreated.bind(this),
|
||||
onUnregister: this._onWorkerDestroyed.bind(this),
|
||||
};
|
||||
this._wdm.addListener(this._wdmListener);
|
||||
for (const workerDebugger of this._wdm.getWorkerDebuggerEnumerator())
|
||||
this._onWorkerCreated(workerDebugger);
|
||||
|
||||
const flags = Ci.nsIWebProgress.NOTIFY_STATE_DOCUMENT |
|
||||
Ci.nsIWebProgress.NOTIFY_LOCATION;
|
||||
this._eventListeners = [
|
||||
helper.addObserver(this._onDOMWindowCreated.bind(this), 'content-document-global-created'),
|
||||
helper.addObserver(this._onDOMWindowCreated.bind(this), 'juggler-dom-window-reused'),
|
||||
helper.addObserver(subject => this._onDocShellCreated(subject.QueryInterface(Ci.nsIDocShell)), 'webnavigation-create'),
|
||||
helper.addObserver(subject => this._onDocShellDestroyed(subject.QueryInterface(Ci.nsIDocShell)), 'webnavigation-destroy'),
|
||||
helper.addProgressListener(webProgress, this, flags),
|
||||
];
|
||||
}
|
||||
|
||||
workers() {
|
||||
return [...this._workers.values()];
|
||||
}
|
||||
|
||||
runtime() {
|
||||
return this._runtime;
|
||||
}
|
||||
|
||||
setInitScripts(scripts) {
|
||||
for (const world of this._isolatedWorlds.values())
|
||||
world._scriptsToEvaluateOnNewDocument = [];
|
||||
|
||||
for (let { worldName, script } of scripts) {
|
||||
worldName = worldName || '';
|
||||
const existing = this._isolatedWorlds.has(worldName);
|
||||
const world = this._ensureWorld(worldName);
|
||||
world._scriptsToEvaluateOnNewDocument.push(script);
|
||||
// FIXME: 'should inherit http credentials from browser context' fails without this
|
||||
if (worldName && !existing) {
|
||||
for (const frame of this.frames())
|
||||
frame._createIsolatedContext(worldName);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
_ensureWorld(worldName) {
|
||||
worldName = worldName || '';
|
||||
let world = this._isolatedWorlds.get(worldName);
|
||||
if (!world) {
|
||||
world = new IsolatedWorld(worldName);
|
||||
this._isolatedWorlds.set(worldName, world);
|
||||
}
|
||||
return world;
|
||||
}
|
||||
|
||||
_frameForWorker(workerDebugger) {
|
||||
if (workerDebugger.type !== Ci.nsIWorkerDebugger.TYPE_DEDICATED)
|
||||
return null;
|
||||
if (!workerDebugger.window)
|
||||
return null;
|
||||
const docShell = workerDebugger.window.docShell;
|
||||
return this._docShellToFrame.get(docShell) || null;
|
||||
}
|
||||
|
||||
_onDOMWindowCreated(window) {
|
||||
if (!window[this._addedScrollbarsStylesheetSymbol] && this.scrollbarsHidden) {
|
||||
const styleSheetService = Cc["@mozilla.org/content/style-sheet-service;1"].getService(Components.interfaces.nsIStyleSheetService);
|
||||
const ioService = Cc["@mozilla.org/network/io-service;1"].getService(Components.interfaces.nsIIOService);
|
||||
const uri = ioService.newURI('chrome://juggler/content/content/hidden-scrollbars.css', null, null);
|
||||
const sheet = styleSheetService.preloadSheet(uri, styleSheetService.AGENT_SHEET);
|
||||
window.windowUtils.addSheet(sheet, styleSheetService.AGENT_SHEET);
|
||||
window[this._addedScrollbarsStylesheetSymbol] = true;
|
||||
}
|
||||
const frame = this._docShellToFrame.get(window.docShell) || null;
|
||||
if (!frame)
|
||||
return;
|
||||
frame._onGlobalObjectCleared();
|
||||
}
|
||||
|
||||
setScrollbarsHidden(hidden) {
|
||||
this.scrollbarsHidden = hidden;
|
||||
}
|
||||
|
||||
_onWorkerCreated(workerDebugger) {
|
||||
// Note: we do not interoperate with firefox devtools.
|
||||
if (workerDebugger.isInitialized)
|
||||
return;
|
||||
const frame = this._frameForWorker(workerDebugger);
|
||||
if (!frame)
|
||||
return;
|
||||
const worker = new Worker(frame, workerDebugger);
|
||||
this._workers.set(workerDebugger, worker);
|
||||
this.emit(FrameTree.Events.WorkerCreated, worker);
|
||||
}
|
||||
|
||||
_onWorkerDestroyed(workerDebugger) {
|
||||
const worker = this._workers.get(workerDebugger);
|
||||
if (!worker)
|
||||
return;
|
||||
worker.dispose();
|
||||
this._workers.delete(workerDebugger);
|
||||
this.emit(FrameTree.Events.WorkerDestroyed, worker);
|
||||
}
|
||||
|
||||
allFramesInBrowsingContextGroup(group) {
|
||||
const frames = [];
|
||||
for (const frameTree of (group.__jugglerFrameTrees || []))
|
||||
frames.push(...frameTree.frames());
|
||||
return frames;
|
||||
}
|
||||
|
||||
isPageReady() {
|
||||
return this._pageReady;
|
||||
}
|
||||
|
||||
forcePageReady() {
|
||||
if (this._pageReady)
|
||||
return false;
|
||||
this._pageReady = true;
|
||||
this.emit(FrameTree.Events.PageReady);
|
||||
return true;
|
||||
}
|
||||
|
||||
addBinding(worldName, name, script) {
|
||||
worldName = worldName || '';
|
||||
const world = this._ensureWorld(worldName);
|
||||
world._bindings.set(name, script);
|
||||
for (const frame of this.frames())
|
||||
frame._addBinding(worldName, name, script);
|
||||
}
|
||||
|
||||
frameForDocShell(docShell) {
|
||||
return this._docShellToFrame.get(docShell) || null;
|
||||
}
|
||||
|
||||
frame(frameId) {
|
||||
return this._frameIdToFrame.get(frameId) || null;
|
||||
}
|
||||
|
||||
frames() {
|
||||
let result = [];
|
||||
collect(this._mainFrame);
|
||||
return result;
|
||||
|
||||
function collect(frame) {
|
||||
result.push(frame);
|
||||
for (const subframe of frame._children)
|
||||
collect(subframe);
|
||||
}
|
||||
}
|
||||
|
||||
mainFrame() {
|
||||
return this._mainFrame;
|
||||
}
|
||||
|
||||
dispose() {
|
||||
this._browsingContextGroup.__jugglerFrameTrees.delete(this);
|
||||
this._wdm.removeListener(this._wdmListener);
|
||||
this._runtime.dispose();
|
||||
helper.removeListeners(this._eventListeners);
|
||||
}
|
||||
|
||||
onStateChange(progress, request, flag, status) {
|
||||
if (!(request instanceof Ci.nsIChannel))
|
||||
return;
|
||||
const channel = request.QueryInterface(Ci.nsIChannel);
|
||||
const docShell = progress.DOMWindow.docShell;
|
||||
const frame = this._docShellToFrame.get(docShell);
|
||||
if (!frame) {
|
||||
dump(`ERROR: got a state changed event for un-tracked docshell!\n`);
|
||||
return;
|
||||
}
|
||||
|
||||
if (!channel.isDocument) {
|
||||
// Somehow, we can get worker requests here,
|
||||
// while we are only interested in frame documents.
|
||||
return;
|
||||
}
|
||||
|
||||
const isStart = flag & Ci.nsIWebProgressListener.STATE_START;
|
||||
const isTransferring = flag & Ci.nsIWebProgressListener.STATE_TRANSFERRING;
|
||||
const isStop = flag & Ci.nsIWebProgressListener.STATE_STOP;
|
||||
const isDocument = flag & Ci.nsIWebProgressListener.STATE_IS_DOCUMENT;
|
||||
|
||||
if (isStart) {
|
||||
// Starting a new navigation.
|
||||
frame._pendingNavigationId = channelId(channel);
|
||||
frame._pendingNavigationURL = channel.URI.spec;
|
||||
this.emit(FrameTree.Events.NavigationStarted, frame);
|
||||
} else if (isTransferring || (isStop && frame._pendingNavigationId && !status)) {
|
||||
// Navigation is committed.
|
||||
for (const subframe of frame._children)
|
||||
this._detachFrame(subframe);
|
||||
const navigationId = frame._pendingNavigationId;
|
||||
frame._pendingNavigationId = null;
|
||||
frame._pendingNavigationURL = null;
|
||||
frame._lastCommittedNavigationId = navigationId;
|
||||
frame._url = channel.URI.spec;
|
||||
this.emit(FrameTree.Events.NavigationCommitted, frame);
|
||||
if (frame === this._mainFrame)
|
||||
this.forcePageReady();
|
||||
} else if (isStop && frame._pendingNavigationId && status) {
|
||||
// Navigation is aborted.
|
||||
const navigationId = frame._pendingNavigationId;
|
||||
frame._pendingNavigationId = null;
|
||||
frame._pendingNavigationURL = null;
|
||||
// Always report download navigation as failure to match other browsers.
|
||||
const errorText = helper.getNetworkErrorStatusText(status);
|
||||
this.emit(FrameTree.Events.NavigationAborted, frame, navigationId, errorText);
|
||||
if (frame === this._mainFrame && status !== Cr.NS_BINDING_ABORTED)
|
||||
this.forcePageReady();
|
||||
}
|
||||
|
||||
if (isStop && isDocument)
|
||||
this.emit(FrameTree.Events.Load, frame);
|
||||
}
|
||||
|
||||
onLocationChange(progress, request, location, flags) {
|
||||
const docShell = progress.DOMWindow.docShell;
|
||||
const frame = this._docShellToFrame.get(docShell);
|
||||
const sameDocumentNavigation = !!(flags & Ci.nsIWebProgressListener.LOCATION_CHANGE_SAME_DOCUMENT);
|
||||
if (frame && sameDocumentNavigation) {
|
||||
frame._url = location.spec;
|
||||
this.emit(FrameTree.Events.SameDocumentNavigation, frame);
|
||||
}
|
||||
}
|
||||
|
||||
_onDocShellCreated(docShell) {
|
||||
// Bug 1142752: sometimes, the docshell appears to be immediately
|
||||
// destroyed, bailout early to prevent random exceptions.
|
||||
if (docShell.isBeingDestroyed())
|
||||
return;
|
||||
// If this docShell doesn't belong to our frame tree - do nothing.
|
||||
let root = docShell;
|
||||
while (root.parent)
|
||||
root = root.parent;
|
||||
if (root === this._mainFrame._docShell)
|
||||
this._createFrame(docShell);
|
||||
}
|
||||
|
||||
_createFrame(docShell) {
|
||||
const parentFrame = this._docShellToFrame.get(docShell.parent) || null;
|
||||
const frame = new Frame(this, this._runtime, docShell, parentFrame);
|
||||
this._docShellToFrame.set(docShell, frame);
|
||||
this._frameIdToFrame.set(frame.id(), frame);
|
||||
this.emit(FrameTree.Events.FrameAttached, frame);
|
||||
// Create execution context **after** reporting frame.
|
||||
// This is our protocol contract.
|
||||
if (frame.domWindow())
|
||||
frame._onGlobalObjectCleared();
|
||||
return frame;
|
||||
}
|
||||
|
||||
_onDocShellDestroyed(docShell) {
|
||||
const frame = this._docShellToFrame.get(docShell);
|
||||
if (frame)
|
||||
this._detachFrame(frame);
|
||||
}
|
||||
|
||||
_detachFrame(frame) {
|
||||
// Detach all children first
|
||||
for (const subframe of frame._children)
|
||||
this._detachFrame(subframe);
|
||||
this._docShellToFrame.delete(frame._docShell);
|
||||
this._frameIdToFrame.delete(frame.id());
|
||||
if (frame._parentFrame)
|
||||
frame._parentFrame._children.delete(frame);
|
||||
frame._parentFrame = null;
|
||||
frame.dispose();
|
||||
this.emit(FrameTree.Events.FrameDetached, frame);
|
||||
}
|
||||
}
|
||||
|
||||
FrameTree.Events = {
|
||||
FrameAttached: 'frameattached',
|
||||
FrameDetached: 'framedetached',
|
||||
WorkerCreated: 'workercreated',
|
||||
WorkerDestroyed: 'workerdestroyed',
|
||||
WebSocketCreated: 'websocketcreated',
|
||||
WebSocketOpened: 'websocketopened',
|
||||
WebSocketClosed: 'websocketclosed',
|
||||
WebSocketFrameReceived: 'websocketframereceived',
|
||||
WebSocketFrameSent: 'websocketframesent',
|
||||
NavigationStarted: 'navigationstarted',
|
||||
NavigationCommitted: 'navigationcommitted',
|
||||
NavigationAborted: 'navigationaborted',
|
||||
SameDocumentNavigation: 'samedocumentnavigation',
|
||||
PageReady: 'pageready',
|
||||
Load: 'load',
|
||||
};
|
||||
|
||||
class IsolatedWorld {
|
||||
constructor(name) {
|
||||
this._name = name;
|
||||
this._scriptsToEvaluateOnNewDocument = [];
|
||||
this._bindings = new Map();
|
||||
}
|
||||
}
|
||||
|
||||
class Frame {
|
||||
constructor(frameTree, runtime, docShell, parentFrame) {
|
||||
this._frameTree = frameTree;
|
||||
this._runtime = runtime;
|
||||
this._docShell = docShell;
|
||||
this._children = new Set();
|
||||
this._frameId = helper.browsingContextToFrameId(this._docShell.browsingContext);
|
||||
this._parentFrame = null;
|
||||
this._url = '';
|
||||
if (docShell.domWindow && docShell.domWindow.location)
|
||||
this._url = docShell.domWindow.location.href;
|
||||
if (parentFrame) {
|
||||
this._parentFrame = parentFrame;
|
||||
parentFrame._children.add(this);
|
||||
}
|
||||
|
||||
this._lastCommittedNavigationId = null;
|
||||
this._pendingNavigationId = null;
|
||||
this._pendingNavigationURL = null;
|
||||
|
||||
this._textInputProcessor = null;
|
||||
|
||||
this._worldNameToContext = new Map();
|
||||
this._initialNavigationDone = false;
|
||||
|
||||
this._webSocketListenerInnerWindowId = 0;
|
||||
// WebSocketListener calls frameReceived event before webSocketOpened.
|
||||
// To avoid this, serialize event reporting.
|
||||
this._webSocketInfos = new Map();
|
||||
|
||||
const dispatchWebSocketFrameReceived = (webSocketSerialID, frame) => this._frameTree.emit(FrameTree.Events.WebSocketFrameReceived, {
|
||||
frameId: this._frameId,
|
||||
wsid: webSocketSerialID + '',
|
||||
opcode: frame.opCode,
|
||||
data: frame.opCode !== 1 ? btoa(frame.payload) : frame.payload,
|
||||
});
|
||||
this._webSocketListener = {
|
||||
QueryInterface: ChromeUtils.generateQI([Ci.nsIWebSocketEventListener, ]),
|
||||
|
||||
webSocketCreated: (webSocketSerialID, uri, protocols) => {
|
||||
this._frameTree.emit(FrameTree.Events.WebSocketCreated, {
|
||||
frameId: this._frameId,
|
||||
wsid: webSocketSerialID + '',
|
||||
requestURL: uri,
|
||||
});
|
||||
this._webSocketInfos.set(webSocketSerialID, {
|
||||
opened: false,
|
||||
pendingIncomingFrames: [],
|
||||
});
|
||||
},
|
||||
|
||||
webSocketOpened: (webSocketSerialID, effectiveURI, protocols, extensions, httpChannelId) => {
|
||||
this._frameTree.emit(FrameTree.Events.WebSocketOpened, {
|
||||
frameId: this._frameId,
|
||||
requestId: httpChannelId + '',
|
||||
wsid: webSocketSerialID + '',
|
||||
effectiveURL: effectiveURI,
|
||||
});
|
||||
const info = this._webSocketInfos.get(webSocketSerialID);
|
||||
info.opened = true;
|
||||
for (const frame of info.pendingIncomingFrames)
|
||||
dispatchWebSocketFrameReceived(webSocketSerialID, frame);
|
||||
},
|
||||
|
||||
webSocketMessageAvailable: (webSocketSerialID, data, messageType) => {
|
||||
// We don't use this event.
|
||||
},
|
||||
|
||||
webSocketClosed: (webSocketSerialID, wasClean, code, reason) => {
|
||||
this._webSocketInfos.delete(webSocketSerialID);
|
||||
let error = '';
|
||||
if (!wasClean) {
|
||||
const keys = Object.keys(Ci.nsIWebSocketChannel);
|
||||
for (const key of keys) {
|
||||
if (Ci.nsIWebSocketChannel[key] === code)
|
||||
error = key;
|
||||
}
|
||||
}
|
||||
this._frameTree.emit(FrameTree.Events.WebSocketClosed, {
|
||||
frameId: this._frameId,
|
||||
wsid: webSocketSerialID + '',
|
||||
error,
|
||||
});
|
||||
},
|
||||
|
||||
frameReceived: (webSocketSerialID, frame) => {
|
||||
// Report only text and binary frames.
|
||||
if (frame.opCode !== 1 && frame.opCode !== 2)
|
||||
return;
|
||||
const info = this._webSocketInfos.get(webSocketSerialID);
|
||||
if (info.opened)
|
||||
dispatchWebSocketFrameReceived(webSocketSerialID, frame);
|
||||
else
|
||||
info.pendingIncomingFrames.push(frame);
|
||||
},
|
||||
|
||||
frameSent: (webSocketSerialID, frame) => {
|
||||
// Report only text and binary frames.
|
||||
if (frame.opCode !== 1 && frame.opCode !== 2)
|
||||
return;
|
||||
this._frameTree.emit(FrameTree.Events.WebSocketFrameSent, {
|
||||
frameId: this._frameId,
|
||||
wsid: webSocketSerialID + '',
|
||||
opcode: frame.opCode,
|
||||
data: frame.opCode !== 1 ? btoa(frame.payload) : frame.payload,
|
||||
});
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
_createIsolatedContext(name) {
|
||||
const principal = [this.domWindow()]; // extended principal
|
||||
const sandbox = Cu.Sandbox(principal, {
|
||||
sandboxPrototype: this.domWindow(),
|
||||
wantComponents: false,
|
||||
wantExportHelpers: false,
|
||||
wantXrays: true,
|
||||
});
|
||||
const world = this._runtime.createExecutionContext(this.domWindow(), sandbox, {
|
||||
frameId: this.id(),
|
||||
name,
|
||||
});
|
||||
this._worldNameToContext.set(name, world);
|
||||
return world;
|
||||
}
|
||||
|
||||
unsafeObject(objectId) {
|
||||
for (const context of this._worldNameToContext.values()) {
|
||||
const result = context.unsafeObject(objectId);
|
||||
if (result)
|
||||
return result.object;
|
||||
}
|
||||
throw new Error('Cannot find object with id = ' + objectId);
|
||||
}
|
||||
|
||||
dispose() {
|
||||
for (const context of this._worldNameToContext.values())
|
||||
this._runtime.destroyExecutionContext(context);
|
||||
this._worldNameToContext.clear();
|
||||
}
|
||||
|
||||
_addBinding(worldName, name, script) {
|
||||
let executionContext = this._worldNameToContext.get(worldName);
|
||||
if (worldName && !executionContext)
|
||||
executionContext = this._createIsolatedContext(worldName);
|
||||
if (executionContext)
|
||||
executionContext.addBinding(name, script);
|
||||
}
|
||||
|
||||
_onGlobalObjectCleared() {
|
||||
const webSocketService = this._frameTree._webSocketEventService;
|
||||
if (this._webSocketListenerInnerWindowId)
|
||||
webSocketService.removeListener(this._webSocketListenerInnerWindowId, this._webSocketListener);
|
||||
this._webSocketListenerInnerWindowId = this.domWindow().windowGlobalChild.innerWindowId;
|
||||
webSocketService.addListener(this._webSocketListenerInnerWindowId, this._webSocketListener);
|
||||
|
||||
for (const context of this._worldNameToContext.values())
|
||||
this._runtime.destroyExecutionContext(context);
|
||||
this._worldNameToContext.clear();
|
||||
|
||||
this._worldNameToContext.set('', this._runtime.createExecutionContext(this.domWindow(), this.domWindow(), {
|
||||
frameId: this._frameId,
|
||||
name: '',
|
||||
}));
|
||||
for (const [name, world] of this._frameTree._isolatedWorlds) {
|
||||
if (name)
|
||||
this._createIsolatedContext(name);
|
||||
const executionContext = this._worldNameToContext.get(name);
|
||||
// Add bindings before evaluating scripts.
|
||||
for (const [name, script] of world._bindings)
|
||||
executionContext.addBinding(name, script);
|
||||
for (const script of world._scriptsToEvaluateOnNewDocument)
|
||||
executionContext.evaluateScriptSafely(script);
|
||||
}
|
||||
}
|
||||
|
||||
mainExecutionContext() {
|
||||
return this._worldNameToContext.get('');
|
||||
}
|
||||
|
||||
textInputProcessor() {
|
||||
if (!this._textInputProcessor) {
|
||||
this._textInputProcessor = Cc["@mozilla.org/text-input-processor;1"].createInstance(Ci.nsITextInputProcessor);
|
||||
}
|
||||
this._textInputProcessor.beginInputTransactionForTests(this._docShell.DOMWindow);
|
||||
return this._textInputProcessor;
|
||||
}
|
||||
|
||||
pendingNavigationId() {
|
||||
return this._pendingNavigationId;
|
||||
}
|
||||
|
||||
pendingNavigationURL() {
|
||||
return this._pendingNavigationURL;
|
||||
}
|
||||
|
||||
lastCommittedNavigationId() {
|
||||
return this._lastCommittedNavigationId;
|
||||
}
|
||||
|
||||
docShell() {
|
||||
return this._docShell;
|
||||
}
|
||||
|
||||
domWindow() {
|
||||
return this._docShell.domWindow;
|
||||
}
|
||||
|
||||
name() {
|
||||
const frameElement = this._docShell.domWindow.frameElement;
|
||||
let name = '';
|
||||
if (frameElement)
|
||||
name = frameElement.getAttribute('name') || frameElement.getAttribute('id') || '';
|
||||
return name;
|
||||
}
|
||||
|
||||
parentFrame() {
|
||||
return this._parentFrame;
|
||||
}
|
||||
|
||||
id() {
|
||||
return this._frameId;
|
||||
}
|
||||
|
||||
url() {
|
||||
return this._url;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
class Worker {
|
||||
constructor(frame, workerDebugger) {
|
||||
this._frame = frame;
|
||||
this._workerId = helper.generateId();
|
||||
this._workerDebugger = workerDebugger;
|
||||
|
||||
workerDebugger.initialize('chrome://juggler/content/content/WorkerMain.js');
|
||||
|
||||
this._channel = new SimpleChannel(`content::worker[${this._workerId}]`);
|
||||
this._channel.setTransport({
|
||||
sendMessage: obj => workerDebugger.postMessage(JSON.stringify(obj)),
|
||||
dispose: () => {},
|
||||
});
|
||||
this._workerDebuggerListener = {
|
||||
QueryInterface: ChromeUtils.generateQI([Ci.nsIWorkerDebuggerListener]),
|
||||
onMessage: msg => void this._channel._onMessage(JSON.parse(msg)),
|
||||
onClose: () => void this._channel.dispose(),
|
||||
onError: (filename, lineno, message) => {
|
||||
dump(`Error in worker: ${message} @${filename}:${lineno}\n`);
|
||||
},
|
||||
};
|
||||
workerDebugger.addListener(this._workerDebuggerListener);
|
||||
}
|
||||
|
||||
channel() {
|
||||
return this._channel;
|
||||
}
|
||||
|
||||
frame() {
|
||||
return this._frame;
|
||||
}
|
||||
|
||||
id() {
|
||||
return this._workerId;
|
||||
}
|
||||
|
||||
url() {
|
||||
return this._workerDebugger.url;
|
||||
}
|
||||
|
||||
dispose() {
|
||||
this._channel.dispose();
|
||||
this._workerDebugger.removeListener(this._workerDebuggerListener);
|
||||
}
|
||||
}
|
||||
|
||||
function channelId(channel) {
|
||||
if (channel instanceof Ci.nsIIdentChannel) {
|
||||
const identChannel = channel.QueryInterface(Ci.nsIIdentChannel);
|
||||
return String(identChannel.channelId);
|
||||
}
|
||||
return helper.generateId();
|
||||
}
|
||||
|
||||
|
||||
var EXPORTED_SYMBOLS = ['FrameTree'];
|
||||
this.FrameTree = FrameTree;
|
||||
|
|
@ -1,894 +0,0 @@
|
|||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
|
||||
"use strict";
|
||||
const {Services} = ChromeUtils.import("resource://gre/modules/Services.jsm");
|
||||
const Ci = Components.interfaces;
|
||||
const Cr = Components.results;
|
||||
const Cu = Components.utils;
|
||||
|
||||
const {Helper} = ChromeUtils.import('chrome://juggler/content/Helper.js');
|
||||
const {NetUtil} = ChromeUtils.import('resource://gre/modules/NetUtil.jsm');
|
||||
const dragService = Cc["@mozilla.org/widget/dragservice;1"].getService(
|
||||
Ci.nsIDragService
|
||||
);
|
||||
const obs = Cc["@mozilla.org/observer-service;1"].getService(
|
||||
Ci.nsIObserverService
|
||||
);
|
||||
|
||||
const helper = new Helper();
|
||||
|
||||
class WorkerData {
|
||||
constructor(pageAgent, browserChannel, worker) {
|
||||
this._workerRuntime = worker.channel().connect('runtime');
|
||||
this._browserWorker = browserChannel.connect(worker.id());
|
||||
this._worker = worker;
|
||||
const emit = name => {
|
||||
return (...args) => this._browserWorker.emit(name, ...args);
|
||||
};
|
||||
this._eventListeners = [
|
||||
worker.channel().register('runtime', {
|
||||
runtimeConsole: emit('runtimeConsole'),
|
||||
runtimeExecutionContextCreated: emit('runtimeExecutionContextCreated'),
|
||||
runtimeExecutionContextDestroyed: emit('runtimeExecutionContextDestroyed'),
|
||||
}),
|
||||
browserChannel.register(worker.id(), {
|
||||
evaluate: (options) => this._workerRuntime.send('evaluate', options),
|
||||
callFunction: (options) => this._workerRuntime.send('callFunction', options),
|
||||
getObjectProperties: (options) => this._workerRuntime.send('getObjectProperties', options),
|
||||
disposeObject: (options) => this._workerRuntime.send('disposeObject', options),
|
||||
}),
|
||||
];
|
||||
}
|
||||
|
||||
dispose() {
|
||||
this._workerRuntime.dispose();
|
||||
this._browserWorker.dispose();
|
||||
helper.removeListeners(this._eventListeners);
|
||||
}
|
||||
}
|
||||
|
||||
class PageAgent {
|
||||
constructor(messageManager, browserChannel, frameTree) {
|
||||
this._messageManager = messageManager;
|
||||
this._browserChannel = browserChannel;
|
||||
this._browserPage = browserChannel.connect('page');
|
||||
this._frameTree = frameTree;
|
||||
this._runtime = frameTree.runtime();
|
||||
|
||||
this._workerData = new Map();
|
||||
|
||||
const docShell = frameTree.mainFrame().docShell();
|
||||
this._docShell = docShell;
|
||||
this._initialDPPX = docShell.contentViewer.overrideDPPX;
|
||||
this._dragging = false;
|
||||
|
||||
// Dispatch frameAttached events for all initial frames
|
||||
for (const frame of this._frameTree.frames()) {
|
||||
this._onFrameAttached(frame);
|
||||
if (frame.url())
|
||||
this._onNavigationCommitted(frame);
|
||||
if (frame.pendingNavigationId())
|
||||
this._onNavigationStarted(frame);
|
||||
}
|
||||
|
||||
// Report created workers.
|
||||
for (const worker of this._frameTree.workers())
|
||||
this._onWorkerCreated(worker);
|
||||
|
||||
// Report execution contexts.
|
||||
for (const context of this._runtime.executionContexts())
|
||||
this._onExecutionContextCreated(context);
|
||||
|
||||
if (this._frameTree.isPageReady()) {
|
||||
this._browserPage.emit('pageReady', {});
|
||||
const mainFrame = this._frameTree.mainFrame();
|
||||
const domWindow = mainFrame.domWindow();
|
||||
const document = domWindow ? domWindow.document : null;
|
||||
const readyState = document ? document.readyState : null;
|
||||
// Sometimes we initialize later than the first about:blank page is opened.
|
||||
// In this case, the page might've been loaded already, and we need to issue
|
||||
// the `DOMContentLoaded` and `load` events.
|
||||
if (mainFrame.url() === 'about:blank' && readyState === 'complete')
|
||||
this._emitAllEvents(this._frameTree.mainFrame());
|
||||
}
|
||||
|
||||
this._eventListeners = [
|
||||
helper.addObserver(this._linkClicked.bind(this, false), 'juggler-link-click'),
|
||||
helper.addObserver(this._linkClicked.bind(this, true), 'juggler-link-click-sync'),
|
||||
helper.addObserver(this._onWindowOpenInNewContext.bind(this), 'juggler-window-open-in-new-context'),
|
||||
helper.addObserver(this._filePickerShown.bind(this), 'juggler-file-picker-shown'),
|
||||
helper.addEventListener(this._messageManager, 'DOMContentLoaded', this._onDOMContentLoaded.bind(this)),
|
||||
helper.addObserver(this._onDocumentOpenLoad.bind(this), 'juggler-document-open-loaded'),
|
||||
helper.on(this._frameTree, 'load', this._onLoad.bind(this)),
|
||||
helper.on(this._frameTree, 'frameattached', this._onFrameAttached.bind(this)),
|
||||
helper.on(this._frameTree, 'framedetached', this._onFrameDetached.bind(this)),
|
||||
helper.on(this._frameTree, 'navigationstarted', this._onNavigationStarted.bind(this)),
|
||||
helper.on(this._frameTree, 'navigationcommitted', this._onNavigationCommitted.bind(this)),
|
||||
helper.on(this._frameTree, 'navigationaborted', this._onNavigationAborted.bind(this)),
|
||||
helper.on(this._frameTree, 'samedocumentnavigation', this._onSameDocumentNavigation.bind(this)),
|
||||
helper.on(this._frameTree, 'pageready', () => this._browserPage.emit('pageReady', {})),
|
||||
helper.on(this._frameTree, 'workercreated', this._onWorkerCreated.bind(this)),
|
||||
helper.on(this._frameTree, 'workerdestroyed', this._onWorkerDestroyed.bind(this)),
|
||||
helper.on(this._frameTree, 'websocketcreated', event => this._browserPage.emit('webSocketCreated', event)),
|
||||
helper.on(this._frameTree, 'websocketopened', event => this._browserPage.emit('webSocketOpened', event)),
|
||||
helper.on(this._frameTree, 'websocketframesent', event => this._browserPage.emit('webSocketFrameSent', event)),
|
||||
helper.on(this._frameTree, 'websocketframereceived', event => this._browserPage.emit('webSocketFrameReceived', event)),
|
||||
helper.on(this._frameTree, 'websocketclosed', event => this._browserPage.emit('webSocketClosed', event)),
|
||||
helper.addObserver(this._onWindowOpen.bind(this), 'webNavigation-createdNavigationTarget-from-js'),
|
||||
this._runtime.events.onErrorFromWorker((domWindow, message, stack) => {
|
||||
const frame = this._frameTree.frameForDocShell(domWindow.docShell);
|
||||
if (!frame)
|
||||
return;
|
||||
this._browserPage.emit('pageUncaughtError', {
|
||||
frameId: frame.id(),
|
||||
message,
|
||||
stack,
|
||||
});
|
||||
}),
|
||||
this._runtime.events.onConsoleMessage(msg => this._browserPage.emit('runtimeConsole', msg)),
|
||||
this._runtime.events.onRuntimeError(this._onRuntimeError.bind(this)),
|
||||
this._runtime.events.onExecutionContextCreated(this._onExecutionContextCreated.bind(this)),
|
||||
this._runtime.events.onExecutionContextDestroyed(this._onExecutionContextDestroyed.bind(this)),
|
||||
this._runtime.events.onBindingCalled(this._onBindingCalled.bind(this)),
|
||||
browserChannel.register('page', {
|
||||
addBinding: ({ worldName, name, script }) => this._frameTree.addBinding(worldName, name, script),
|
||||
adoptNode: this._adoptNode.bind(this),
|
||||
crash: this._crash.bind(this),
|
||||
describeNode: this._describeNode.bind(this),
|
||||
dispatchKeyEvent: this._dispatchKeyEvent.bind(this),
|
||||
dispatchMouseEvent: this._dispatchMouseEvent.bind(this),
|
||||
dispatchTouchEvent: this._dispatchTouchEvent.bind(this),
|
||||
dispatchTapEvent: this._dispatchTapEvent.bind(this),
|
||||
getContentQuads: this._getContentQuads.bind(this),
|
||||
getFullAXTree: this._getFullAXTree.bind(this),
|
||||
goBack: this._goBack.bind(this),
|
||||
goForward: this._goForward.bind(this),
|
||||
insertText: this._insertText.bind(this),
|
||||
navigate: this._navigate.bind(this),
|
||||
reload: this._reload.bind(this),
|
||||
scrollIntoViewIfNeeded: this._scrollIntoViewIfNeeded.bind(this),
|
||||
setCacheDisabled: this._setCacheDisabled.bind(this),
|
||||
setFileInputFiles: this._setFileInputFiles.bind(this),
|
||||
setInterceptFileChooserDialog: this._setInterceptFileChooserDialog.bind(this),
|
||||
evaluate: this._runtime.evaluate.bind(this._runtime),
|
||||
callFunction: this._runtime.callFunction.bind(this._runtime),
|
||||
getObjectProperties: this._runtime.getObjectProperties.bind(this._runtime),
|
||||
disposeObject: this._runtime.disposeObject.bind(this._runtime),
|
||||
}),
|
||||
];
|
||||
}
|
||||
|
||||
_setCacheDisabled({cacheDisabled}) {
|
||||
const enable = Ci.nsIRequest.LOAD_NORMAL;
|
||||
const disable = Ci.nsIRequest.LOAD_BYPASS_CACHE |
|
||||
Ci.nsIRequest.INHIBIT_CACHING;
|
||||
|
||||
const docShell = this._frameTree.mainFrame().docShell();
|
||||
docShell.defaultLoadFlags = cacheDisabled ? disable : enable;
|
||||
}
|
||||
|
||||
_emitAllEvents(frame) {
|
||||
this._browserPage.emit('pageEventFired', {
|
||||
frameId: frame.id(),
|
||||
name: 'DOMContentLoaded',
|
||||
});
|
||||
this._browserPage.emit('pageEventFired', {
|
||||
frameId: frame.id(),
|
||||
name: 'load',
|
||||
});
|
||||
}
|
||||
|
||||
_onExecutionContextCreated(executionContext) {
|
||||
this._browserPage.emit('runtimeExecutionContextCreated', {
|
||||
executionContextId: executionContext.id(),
|
||||
auxData: executionContext.auxData(),
|
||||
});
|
||||
}
|
||||
|
||||
_onExecutionContextDestroyed(executionContext) {
|
||||
this._browserPage.emit('runtimeExecutionContextDestroyed', {
|
||||
executionContextId: executionContext.id(),
|
||||
});
|
||||
}
|
||||
|
||||
_onWorkerCreated(worker) {
|
||||
const workerData = new WorkerData(this, this._browserChannel, worker);
|
||||
this._workerData.set(worker.id(), workerData);
|
||||
this._browserPage.emit('pageWorkerCreated', {
|
||||
workerId: worker.id(),
|
||||
frameId: worker.frame().id(),
|
||||
url: worker.url(),
|
||||
});
|
||||
}
|
||||
|
||||
_onWorkerDestroyed(worker) {
|
||||
const workerData = this._workerData.get(worker.id());
|
||||
if (!workerData)
|
||||
return;
|
||||
this._workerData.delete(worker.id());
|
||||
workerData.dispose();
|
||||
this._browserPage.emit('pageWorkerDestroyed', {
|
||||
workerId: worker.id(),
|
||||
});
|
||||
}
|
||||
|
||||
_onWindowOpen(subject) {
|
||||
if (!(subject instanceof Ci.nsIPropertyBag2))
|
||||
return;
|
||||
const props = subject.QueryInterface(Ci.nsIPropertyBag2);
|
||||
const hasUrl = props.hasKey('url');
|
||||
const createdDocShell = props.getPropertyAsInterface('createdTabDocShell', Ci.nsIDocShell);
|
||||
if (!hasUrl && createdDocShell === this._docShell && this._frameTree.forcePageReady())
|
||||
this._emitAllEvents(this._frameTree.mainFrame());
|
||||
}
|
||||
|
||||
_setInterceptFileChooserDialog({enabled}) {
|
||||
this._docShell.fileInputInterceptionEnabled = !!enabled;
|
||||
}
|
||||
|
||||
_linkClicked(sync, anchorElement) {
|
||||
if (anchorElement.ownerGlobal.docShell !== this._docShell)
|
||||
return;
|
||||
this._browserPage.emit('pageLinkClicked', { phase: sync ? 'after' : 'before' });
|
||||
}
|
||||
|
||||
_onWindowOpenInNewContext(docShell) {
|
||||
// TODO: unify this with _onWindowOpen if possible.
|
||||
const frame = this._frameTree.frameForDocShell(docShell);
|
||||
if (!frame)
|
||||
return;
|
||||
this._browserPage.emit('pageWillOpenNewWindowAsynchronously');
|
||||
}
|
||||
|
||||
_filePickerShown(inputElement) {
|
||||
const frame = this._findFrameForNode(inputElement);
|
||||
if (!frame)
|
||||
return;
|
||||
this._browserPage.emit('pageFileChooserOpened', {
|
||||
executionContextId: frame.mainExecutionContext().id(),
|
||||
element: frame.mainExecutionContext().rawValueToRemoteObject(inputElement)
|
||||
});
|
||||
}
|
||||
|
||||
_findFrameForNode(node) {
|
||||
return this._frameTree.frames().find(frame => {
|
||||
const doc = frame.domWindow().document;
|
||||
return node === doc || node.ownerDocument === doc;
|
||||
});
|
||||
}
|
||||
|
||||
_onDOMContentLoaded(event) {
|
||||
if (!event.target.ownerGlobal)
|
||||
return;
|
||||
const docShell = event.target.ownerGlobal.docShell;
|
||||
const frame = this._frameTree.frameForDocShell(docShell);
|
||||
if (!frame)
|
||||
return;
|
||||
this._browserPage.emit('pageEventFired', {
|
||||
frameId: frame.id(),
|
||||
name: 'DOMContentLoaded',
|
||||
});
|
||||
}
|
||||
|
||||
_onRuntimeError({ executionContext, message, stack }) {
|
||||
this._browserPage.emit('pageUncaughtError', {
|
||||
frameId: executionContext.auxData().frameId,
|
||||
message: message.toString(),
|
||||
stack: stack.toString(),
|
||||
});
|
||||
}
|
||||
|
||||
_onDocumentOpenLoad(document) {
|
||||
const docShell = document.ownerGlobal.docShell;
|
||||
const frame = this._frameTree.frameForDocShell(docShell);
|
||||
if (!frame)
|
||||
return;
|
||||
this._browserPage.emit('pageEventFired', {
|
||||
frameId: frame.id(),
|
||||
name: 'load'
|
||||
});
|
||||
}
|
||||
|
||||
_onLoad(frame) {
|
||||
this._browserPage.emit('pageEventFired', {
|
||||
frameId: frame.id(),
|
||||
name: 'load'
|
||||
});
|
||||
}
|
||||
|
||||
_onNavigationStarted(frame) {
|
||||
this._browserPage.emit('pageNavigationStarted', {
|
||||
frameId: frame.id(),
|
||||
navigationId: frame.pendingNavigationId(),
|
||||
url: frame.pendingNavigationURL(),
|
||||
});
|
||||
}
|
||||
|
||||
_onNavigationAborted(frame, navigationId, errorText) {
|
||||
this._browserPage.emit('pageNavigationAborted', {
|
||||
frameId: frame.id(),
|
||||
navigationId,
|
||||
errorText,
|
||||
});
|
||||
if (!frame._initialNavigationDone && frame !== this._frameTree.mainFrame())
|
||||
this._emitAllEvents(frame);
|
||||
frame._initialNavigationDone = true;
|
||||
}
|
||||
|
||||
_onSameDocumentNavigation(frame) {
|
||||
this._browserPage.emit('pageSameDocumentNavigation', {
|
||||
frameId: frame.id(),
|
||||
url: frame.url(),
|
||||
});
|
||||
}
|
||||
|
||||
_onNavigationCommitted(frame) {
|
||||
this._browserPage.emit('pageNavigationCommitted', {
|
||||
frameId: frame.id(),
|
||||
navigationId: frame.lastCommittedNavigationId() || undefined,
|
||||
url: frame.url(),
|
||||
name: frame.name(),
|
||||
});
|
||||
frame._initialNavigationDone = true;
|
||||
}
|
||||
|
||||
_onFrameAttached(frame) {
|
||||
this._browserPage.emit('pageFrameAttached', {
|
||||
frameId: frame.id(),
|
||||
parentFrameId: frame.parentFrame() ? frame.parentFrame().id() : undefined,
|
||||
});
|
||||
}
|
||||
|
||||
_onFrameDetached(frame) {
|
||||
this._browserPage.emit('pageFrameDetached', {
|
||||
frameId: frame.id(),
|
||||
});
|
||||
}
|
||||
|
||||
_onBindingCalled({executionContextId, name, payload}) {
|
||||
this._browserPage.emit('pageBindingCalled', {
|
||||
executionContextId,
|
||||
name,
|
||||
payload
|
||||
});
|
||||
}
|
||||
|
||||
dispose() {
|
||||
for (const workerData of this._workerData.values())
|
||||
workerData.dispose();
|
||||
this._workerData.clear();
|
||||
helper.removeListeners(this._eventListeners);
|
||||
}
|
||||
|
||||
async _navigate({frameId, url, referer}) {
|
||||
try {
|
||||
const uri = NetUtil.newURI(url);
|
||||
} catch (e) {
|
||||
throw new Error(`Invalid url: "${url}"`);
|
||||
}
|
||||
let referrerURI = null;
|
||||
let referrerInfo = null;
|
||||
if (referer) {
|
||||
try {
|
||||
referrerURI = NetUtil.newURI(referer);
|
||||
const ReferrerInfo = Components.Constructor(
|
||||
'@mozilla.org/referrer-info;1',
|
||||
'nsIReferrerInfo',
|
||||
'init'
|
||||
);
|
||||
referrerInfo = new ReferrerInfo(Ci.nsIHttpChannel.REFERRER_POLICY_UNSET, true, referrerURI);
|
||||
} catch (e) {
|
||||
throw new Error(`Invalid referer: "${referer}"`);
|
||||
}
|
||||
}
|
||||
const frame = this._frameTree.frame(frameId);
|
||||
const docShell = frame.docShell().QueryInterface(Ci.nsIWebNavigation);
|
||||
docShell.loadURI(url, {
|
||||
triggeringPrincipal: Services.scriptSecurityManager.getSystemPrincipal(),
|
||||
flags: Ci.nsIWebNavigation.LOAD_FLAGS_NONE,
|
||||
referrerInfo,
|
||||
postData: null,
|
||||
headers: null,
|
||||
});
|
||||
return {navigationId: frame.pendingNavigationId(), navigationURL: frame.pendingNavigationURL()};
|
||||
}
|
||||
|
||||
async _reload({frameId, url}) {
|
||||
const frame = this._frameTree.frame(frameId);
|
||||
const docShell = frame.docShell().QueryInterface(Ci.nsIWebNavigation);
|
||||
docShell.reload(Ci.nsIWebNavigation.LOAD_FLAGS_NONE);
|
||||
}
|
||||
|
||||
async _goBack({frameId, url}) {
|
||||
const frame = this._frameTree.frame(frameId);
|
||||
const docShell = frame.docShell();
|
||||
if (!docShell.canGoBack)
|
||||
return {success: false};
|
||||
docShell.goBack();
|
||||
return {success: true};
|
||||
}
|
||||
|
||||
async _goForward({frameId, url}) {
|
||||
const frame = this._frameTree.frame(frameId);
|
||||
const docShell = frame.docShell();
|
||||
if (!docShell.canGoForward)
|
||||
return {success: false};
|
||||
docShell.goForward();
|
||||
return {success: true};
|
||||
}
|
||||
|
||||
async _adoptNode({frameId, objectId, executionContextId}) {
|
||||
const frame = this._frameTree.frame(frameId);
|
||||
if (!frame)
|
||||
throw new Error('Failed to find frame with id = ' + frameId);
|
||||
const unsafeObject = frame.unsafeObject(objectId);
|
||||
const context = this._runtime.findExecutionContext(executionContextId);
|
||||
const fromPrincipal = unsafeObject.nodePrincipal;
|
||||
const toFrame = this._frameTree.frame(context.auxData().frameId);
|
||||
const toPrincipal = toFrame.domWindow().document.nodePrincipal;
|
||||
if (!toPrincipal.subsumes(fromPrincipal))
|
||||
return { remoteObject: null };
|
||||
return { remoteObject: context.rawValueToRemoteObject(unsafeObject) };
|
||||
}
|
||||
|
||||
async _setFileInputFiles({objectId, frameId, files}) {
|
||||
const frame = this._frameTree.frame(frameId);
|
||||
if (!frame)
|
||||
throw new Error('Failed to find frame with id = ' + frameId);
|
||||
const unsafeObject = frame.unsafeObject(objectId);
|
||||
if (!unsafeObject)
|
||||
throw new Error('Object is not input!');
|
||||
const nsFiles = await Promise.all(files.map(filePath => File.createFromFileName(filePath)));
|
||||
unsafeObject.mozSetFileArray(nsFiles);
|
||||
}
|
||||
|
||||
_getContentQuads({objectId, frameId}) {
|
||||
const frame = this._frameTree.frame(frameId);
|
||||
if (!frame)
|
||||
throw new Error('Failed to find frame with id = ' + frameId);
|
||||
const unsafeObject = frame.unsafeObject(objectId);
|
||||
if (!unsafeObject.getBoxQuads)
|
||||
throw new Error('RemoteObject is not a node');
|
||||
const quads = unsafeObject.getBoxQuads({relativeTo: this._frameTree.mainFrame().domWindow().document, recurseWhenNoFrame: true}).map(quad => {
|
||||
return {
|
||||
p1: {x: quad.p1.x, y: quad.p1.y},
|
||||
p2: {x: quad.p2.x, y: quad.p2.y},
|
||||
p3: {x: quad.p3.x, y: quad.p3.y},
|
||||
p4: {x: quad.p4.x, y: quad.p4.y},
|
||||
};
|
||||
});
|
||||
return {quads};
|
||||
}
|
||||
|
||||
_describeNode({objectId, frameId}) {
|
||||
const frame = this._frameTree.frame(frameId);
|
||||
if (!frame)
|
||||
throw new Error('Failed to find frame with id = ' + frameId);
|
||||
const unsafeObject = frame.unsafeObject(objectId);
|
||||
const browsingContextGroup = frame.docShell().browsingContext.group;
|
||||
const frames = this._frameTree.allFramesInBrowsingContextGroup(browsingContextGroup);
|
||||
let contentFrame;
|
||||
let ownerFrame;
|
||||
for (const frame of frames) {
|
||||
if (unsafeObject.contentWindow && frame.docShell() === unsafeObject.contentWindow.docShell)
|
||||
contentFrame = frame;
|
||||
const document = frame.domWindow().document;
|
||||
if (unsafeObject === document || unsafeObject.ownerDocument === document)
|
||||
ownerFrame = frame;
|
||||
}
|
||||
return {
|
||||
contentFrameId: contentFrame ? contentFrame.id() : undefined,
|
||||
ownerFrameId: ownerFrame ? ownerFrame.id() : undefined,
|
||||
};
|
||||
}
|
||||
|
||||
async _scrollIntoViewIfNeeded({objectId, frameId, rect}) {
|
||||
const frame = this._frameTree.frame(frameId);
|
||||
if (!frame)
|
||||
throw new Error('Failed to find frame with id = ' + frameId);
|
||||
const unsafeObject = frame.unsafeObject(objectId);
|
||||
if (!unsafeObject.isConnected)
|
||||
throw new Error('Node is detached from document');
|
||||
if (!rect)
|
||||
rect = { x: -1, y: -1, width: -1, height: -1};
|
||||
if (unsafeObject.scrollRectIntoViewIfNeeded)
|
||||
unsafeObject.scrollRectIntoViewIfNeeded(rect.x, rect.y, rect.width, rect.height);
|
||||
else
|
||||
throw new Error('Node does not have a layout object');
|
||||
}
|
||||
|
||||
_getNodeBoundingBox(unsafeObject) {
|
||||
if (!unsafeObject.getBoxQuads)
|
||||
throw new Error('RemoteObject is not a node');
|
||||
const quads = unsafeObject.getBoxQuads({relativeTo: this._frameTree.mainFrame().domWindow().document});
|
||||
if (!quads.length)
|
||||
return;
|
||||
let x1 = Infinity;
|
||||
let y1 = Infinity;
|
||||
let x2 = -Infinity;
|
||||
let y2 = -Infinity;
|
||||
for (const quad of quads) {
|
||||
const boundingBox = quad.getBounds();
|
||||
x1 = Math.min(boundingBox.x, x1);
|
||||
y1 = Math.min(boundingBox.y, y1);
|
||||
x2 = Math.max(boundingBox.x + boundingBox.width, x2);
|
||||
y2 = Math.max(boundingBox.y + boundingBox.height, y2);
|
||||
}
|
||||
return {x: x1, y: y1, width: x2 - x1, height: y2 - y1};
|
||||
}
|
||||
|
||||
async _dispatchKeyEvent({type, keyCode, code, key, repeat, location, text}) {
|
||||
// key events don't fire if we are dragging.
|
||||
if (this._dragging) {
|
||||
if (type === 'keydown' && key === 'Escape')
|
||||
this._cancelDragIfNeeded();
|
||||
return;
|
||||
}
|
||||
const frame = this._frameTree.mainFrame();
|
||||
const tip = frame.textInputProcessor();
|
||||
if (key === 'Meta' && Services.appinfo.OS !== 'Darwin')
|
||||
key = 'OS';
|
||||
else if (key === 'OS' && Services.appinfo.OS === 'Darwin')
|
||||
key = 'Meta';
|
||||
let keyEvent = new (frame.domWindow().KeyboardEvent)("", {
|
||||
key,
|
||||
code,
|
||||
location,
|
||||
repeat,
|
||||
keyCode
|
||||
});
|
||||
if (type === 'keydown') {
|
||||
if (text && text !== key) {
|
||||
tip.commitCompositionWith(text, keyEvent);
|
||||
} else {
|
||||
const flags = 0;
|
||||
tip.keydown(keyEvent, flags);
|
||||
}
|
||||
} else if (type === 'keyup') {
|
||||
if (text)
|
||||
throw new Error(`keyup does not support text option`);
|
||||
const flags = 0;
|
||||
tip.keyup(keyEvent, flags);
|
||||
} else {
|
||||
throw new Error(`Unknown type ${type}`);
|
||||
}
|
||||
}
|
||||
|
||||
async _dispatchTouchEvent({type, touchPoints, modifiers}) {
|
||||
const frame = this._frameTree.mainFrame();
|
||||
const defaultPrevented = frame.domWindow().windowUtils.sendTouchEvent(
|
||||
type.toLowerCase(),
|
||||
touchPoints.map((point, id) => id),
|
||||
touchPoints.map(point => point.x),
|
||||
touchPoints.map(point => point.y),
|
||||
touchPoints.map(point => point.radiusX === undefined ? 1.0 : point.radiusX),
|
||||
touchPoints.map(point => point.radiusY === undefined ? 1.0 : point.radiusY),
|
||||
touchPoints.map(point => point.rotationAngle === undefined ? 0.0 : point.rotationAngle),
|
||||
touchPoints.map(point => point.force === undefined ? 1.0 : point.force),
|
||||
touchPoints.length,
|
||||
modifiers);
|
||||
return {defaultPrevented};
|
||||
}
|
||||
|
||||
async _dispatchTapEvent({x, y, modifiers}) {
|
||||
// Force a layout at the point in question, because touch events
|
||||
// do not seem to trigger one like mouse events.
|
||||
this._frameTree.mainFrame().domWindow().windowUtils.elementFromPoint(
|
||||
x,
|
||||
y,
|
||||
false /* aIgnoreRootScrollFrame */,
|
||||
true /* aFlushLayout */);
|
||||
|
||||
const {defaultPrevented: startPrevented} = await this._dispatchTouchEvent({
|
||||
type: 'touchstart',
|
||||
modifiers,
|
||||
touchPoints: [{x, y}]
|
||||
});
|
||||
const {defaultPrevented: endPrevented} = await this._dispatchTouchEvent({
|
||||
type: 'touchend',
|
||||
modifiers,
|
||||
touchPoints: [{x, y}]
|
||||
});
|
||||
if (startPrevented || endPrevented)
|
||||
return;
|
||||
|
||||
const frame = this._frameTree.mainFrame();
|
||||
frame.domWindow().windowUtils.sendMouseEvent(
|
||||
'mousemove',
|
||||
x,
|
||||
y,
|
||||
0 /*button*/,
|
||||
0 /*clickCount*/,
|
||||
modifiers,
|
||||
false /*aIgnoreRootScrollFrame*/,
|
||||
undefined /*pressure*/,
|
||||
5 /*inputSource*/,
|
||||
undefined /*isDOMEventSynthesized*/,
|
||||
false /*isWidgetEventSynthesized*/,
|
||||
0 /*buttons*/,
|
||||
undefined /*pointerIdentifier*/,
|
||||
true /*disablePointerEvent*/);
|
||||
|
||||
frame.domWindow().windowUtils.sendMouseEvent(
|
||||
'mousedown',
|
||||
x,
|
||||
y,
|
||||
0 /*button*/,
|
||||
1 /*clickCount*/,
|
||||
modifiers,
|
||||
false /*aIgnoreRootScrollFrame*/,
|
||||
undefined /*pressure*/,
|
||||
5 /*inputSource*/,
|
||||
undefined /*isDOMEventSynthesized*/,
|
||||
false /*isWidgetEventSynthesized*/,
|
||||
1 /*buttons*/,
|
||||
undefined /*pointerIdentifier*/,
|
||||
true /*disablePointerEvent*/);
|
||||
|
||||
frame.domWindow().windowUtils.sendMouseEvent(
|
||||
'mouseup',
|
||||
x,
|
||||
y,
|
||||
0 /*button*/,
|
||||
1 /*clickCount*/,
|
||||
modifiers,
|
||||
false /*aIgnoreRootScrollFrame*/,
|
||||
undefined /*pressure*/,
|
||||
5 /*inputSource*/,
|
||||
undefined /*isDOMEventSynthesized*/,
|
||||
false /*isWidgetEventSynthesized*/,
|
||||
0 /*buttons*/,
|
||||
undefined /*pointerIdentifier*/,
|
||||
true /*disablePointerEvent*/);
|
||||
}
|
||||
|
||||
_startDragSessionIfNeeded() {
|
||||
const sess = dragService.getCurrentSession();
|
||||
if (sess) return;
|
||||
dragService.startDragSessionForTests(
|
||||
Ci.nsIDragService.DRAGDROP_ACTION_MOVE |
|
||||
Ci.nsIDragService.DRAGDROP_ACTION_COPY |
|
||||
Ci.nsIDragService.DRAGDROP_ACTION_LINK
|
||||
);
|
||||
}
|
||||
|
||||
_simulateDragEvent(type, x, y, modifiers) {
|
||||
const window = this._frameTree.mainFrame().domWindow();
|
||||
const element = window.windowUtils.elementFromPoint(x, y, false, false);
|
||||
const event = window.document.createEvent('DragEvent');
|
||||
|
||||
event.initDragEvent(
|
||||
type,
|
||||
true /* bubble */,
|
||||
true /* cancelable */,
|
||||
window,
|
||||
0 /* clickCount */,
|
||||
window.mozInnerScreenX + x,
|
||||
window.mozInnerScreenY + y,
|
||||
x,
|
||||
y,
|
||||
modifiers & 2 /* ctrlkey */,
|
||||
modifiers & 1 /* altKey */,
|
||||
modifiers & 4 /* shiftKey */,
|
||||
modifiers & 8 /* metaKey */,
|
||||
0 /* button */, // firefox always has the button as 0 on drops, regardless of which was pressed
|
||||
null /* relatedTarget */,
|
||||
null,
|
||||
);
|
||||
if (type !== 'drop' || dragService.dragAction)
|
||||
window.windowUtils.dispatchDOMEventViaPresShellForTesting(element, event);
|
||||
if (type === 'drop')
|
||||
this._cancelDragIfNeeded();
|
||||
}
|
||||
|
||||
_cancelDragIfNeeded() {
|
||||
this._dragging = false;
|
||||
const sess = dragService.getCurrentSession();
|
||||
if (sess)
|
||||
dragService.endDragSession(true);
|
||||
}
|
||||
|
||||
async _dispatchMouseEvent({type, x, y, button, clickCount, modifiers, buttons}) {
|
||||
this._startDragSessionIfNeeded();
|
||||
const trapDrag = subject => {
|
||||
this._dragging = true;
|
||||
}
|
||||
|
||||
// Don't send mouse events if there is an active drag
|
||||
if (!this._dragging) {
|
||||
const frame = this._frameTree.mainFrame();
|
||||
|
||||
obs.addObserver(trapDrag, 'on-datatransfer-available');
|
||||
frame.domWindow().windowUtils.sendMouseEvent(
|
||||
type,
|
||||
x,
|
||||
y,
|
||||
button,
|
||||
clickCount,
|
||||
modifiers,
|
||||
false /*aIgnoreRootScrollFrame*/,
|
||||
undefined /*pressure*/,
|
||||
undefined /*inputSource*/,
|
||||
true /*isDOMEventSynthesized*/,
|
||||
false /*isWidgetEventSynthesized*/,
|
||||
buttons);
|
||||
obs.removeObserver(trapDrag, 'on-datatransfer-available');
|
||||
|
||||
if (type === 'mousedown' && button === 2) {
|
||||
frame.domWindow().windowUtils.sendMouseEvent(
|
||||
'contextmenu',
|
||||
x,
|
||||
y,
|
||||
button,
|
||||
clickCount,
|
||||
modifiers,
|
||||
false /*aIgnoreRootScrollFrame*/,
|
||||
undefined /*pressure*/,
|
||||
undefined /*inputSource*/,
|
||||
undefined /*isDOMEventSynthesized*/,
|
||||
undefined /*isWidgetEventSynthesized*/,
|
||||
buttons);
|
||||
}
|
||||
}
|
||||
|
||||
// update drag state
|
||||
if (this._dragging) {
|
||||
if (type === 'mousemove')
|
||||
this._simulateDragEvent('dragover', x, y, modifiers);
|
||||
else if (type === 'mouseup') // firefox will do drops when any mouse button is released
|
||||
this._simulateDragEvent('drop', x, y, modifiers);
|
||||
} else {
|
||||
this._cancelDragIfNeeded();
|
||||
}
|
||||
}
|
||||
|
||||
async _insertText({text}) {
|
||||
const frame = this._frameTree.mainFrame();
|
||||
frame.textInputProcessor().commitCompositionWith(text);
|
||||
}
|
||||
|
||||
async _crash() {
|
||||
dump(`Crashing intentionally\n`);
|
||||
// This is to intentionally crash the frame.
|
||||
// We crash by using js-ctypes and dereferencing
|
||||
// a bad pointer. The crash should happen immediately
|
||||
// upon loading this frame script.
|
||||
const { ctypes } = ChromeUtils.import('resource://gre/modules/ctypes.jsm');
|
||||
ChromeUtils.privateNoteIntentionalCrash();
|
||||
const zero = new ctypes.intptr_t(8);
|
||||
const badptr = ctypes.cast(zero, ctypes.PointerType(ctypes.int32_t));
|
||||
badptr.contents;
|
||||
}
|
||||
|
||||
async _getFullAXTree({objectId}) {
|
||||
let unsafeObject = null;
|
||||
if (objectId) {
|
||||
unsafeObject = this._frameTree.mainFrame().unsafeObject(objectId);
|
||||
if (!unsafeObject)
|
||||
throw new Error(`No object found for id "${objectId}"`);
|
||||
}
|
||||
|
||||
const service = Cc["@mozilla.org/accessibilityService;1"]
|
||||
.getService(Ci.nsIAccessibilityService);
|
||||
const document = this._frameTree.mainFrame().domWindow().document;
|
||||
const docAcc = service.getAccessibleFor(document);
|
||||
|
||||
while (docAcc.document.isUpdatePendingForJugglerAccessibility)
|
||||
await new Promise(x => this._frameTree.mainFrame().domWindow().requestAnimationFrame(x));
|
||||
|
||||
async function waitForQuiet() {
|
||||
let state = {};
|
||||
docAcc.getState(state, {});
|
||||
if ((state.value & Ci.nsIAccessibleStates.STATE_BUSY) == 0)
|
||||
return;
|
||||
let resolve, reject;
|
||||
const promise = new Promise((x, y) => {resolve = x, reject = y});
|
||||
let eventObserver = {
|
||||
observe(subject, topic) {
|
||||
if (topic !== "accessible-event") {
|
||||
return;
|
||||
}
|
||||
|
||||
// If event type does not match expected type, skip the event.
|
||||
let event = subject.QueryInterface(Ci.nsIAccessibleEvent);
|
||||
if (event.eventType !== Ci.nsIAccessibleEvent.EVENT_STATE_CHANGE) {
|
||||
return;
|
||||
}
|
||||
|
||||
// If event's accessible does not match expected accessible,
|
||||
// skip the event.
|
||||
if (event.accessible !== docAcc) {
|
||||
return;
|
||||
}
|
||||
|
||||
Services.obs.removeObserver(this, "accessible-event");
|
||||
resolve();
|
||||
},
|
||||
};
|
||||
Services.obs.addObserver(eventObserver, "accessible-event");
|
||||
return promise;
|
||||
}
|
||||
function buildNode(accElement) {
|
||||
let a = {}, b = {};
|
||||
accElement.getState(a, b);
|
||||
const tree = {
|
||||
role: service.getStringRole(accElement.role),
|
||||
name: accElement.name || '',
|
||||
};
|
||||
if (unsafeObject && unsafeObject === accElement.DOMNode)
|
||||
tree.foundObject = true;
|
||||
for (const userStringProperty of [
|
||||
'value',
|
||||
'description'
|
||||
]) {
|
||||
tree[userStringProperty] = accElement[userStringProperty] || undefined;
|
||||
}
|
||||
|
||||
const states = {};
|
||||
for (const name of service.getStringStates(a.value, b.value))
|
||||
states[name] = true;
|
||||
for (const name of ['selected',
|
||||
'focused',
|
||||
'pressed',
|
||||
'focusable',
|
||||
'required',
|
||||
'invalid',
|
||||
'modal',
|
||||
'editable',
|
||||
'busy',
|
||||
'checked',
|
||||
'multiselectable']) {
|
||||
if (states[name])
|
||||
tree[name] = true;
|
||||
}
|
||||
|
||||
if (states['multi line'])
|
||||
tree['multiline'] = true;
|
||||
if (states['editable'] && states['readonly'])
|
||||
tree['readonly'] = true;
|
||||
if (states['checked'])
|
||||
tree['checked'] = true;
|
||||
if (states['mixed'])
|
||||
tree['checked'] = 'mixed';
|
||||
if (states['expanded'])
|
||||
tree['expanded'] = true;
|
||||
else if (states['collapsed'])
|
||||
tree['expanded'] = false;
|
||||
if (!states['enabled'])
|
||||
tree['disabled'] = true;
|
||||
|
||||
const attributes = {};
|
||||
if (accElement.attributes) {
|
||||
for (const { key, value } of accElement.attributes.enumerate()) {
|
||||
attributes[key] = value;
|
||||
}
|
||||
}
|
||||
for (const numericalProperty of ['level']) {
|
||||
if (numericalProperty in attributes)
|
||||
tree[numericalProperty] = parseFloat(attributes[numericalProperty]);
|
||||
}
|
||||
for (const stringProperty of ['tag', 'roledescription', 'valuetext', 'orientation', 'autocomplete', 'keyshortcuts', 'haspopup']) {
|
||||
if (stringProperty in attributes)
|
||||
tree[stringProperty] = attributes[stringProperty];
|
||||
}
|
||||
const children = [];
|
||||
|
||||
for (let child = accElement.firstChild; child; child = child.nextSibling) {
|
||||
children.push(buildNode(child));
|
||||
}
|
||||
if (children.length)
|
||||
tree.children = children;
|
||||
return tree;
|
||||
}
|
||||
await waitForQuiet();
|
||||
return {
|
||||
tree: buildNode(docAcc)
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
var EXPORTED_SYMBOLS = ['PageAgent'];
|
||||
this.PageAgent = PageAgent;
|
||||
|
|
@ -1,596 +0,0 @@
|
|||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
|
||||
"use strict";
|
||||
// Note: this file should be loadabale with eval() into worker environment.
|
||||
// Avoid Components.*, ChromeUtils and global const variables.
|
||||
|
||||
if (!this.Debugger) {
|
||||
// Worker has a Debugger defined already.
|
||||
const {addDebuggerToGlobal} = ChromeUtils.import("resource://gre/modules/jsdebugger.jsm", {});
|
||||
addDebuggerToGlobal(Components.utils.getGlobalForObject(this));
|
||||
}
|
||||
|
||||
let lastId = 0;
|
||||
function generateId() {
|
||||
return 'id-' + (++lastId);
|
||||
}
|
||||
|
||||
const consoleLevelToProtocolType = {
|
||||
'dir': 'dir',
|
||||
'log': 'log',
|
||||
'debug': 'debug',
|
||||
'info': 'info',
|
||||
'error': 'error',
|
||||
'warn': 'warning',
|
||||
'dirxml': 'dirxml',
|
||||
'table': 'table',
|
||||
'trace': 'trace',
|
||||
'clear': 'clear',
|
||||
'group': 'startGroup',
|
||||
'groupCollapsed': 'startGroupCollapsed',
|
||||
'groupEnd': 'endGroup',
|
||||
'assert': 'assert',
|
||||
'profile': 'profile',
|
||||
'profileEnd': 'profileEnd',
|
||||
'count': 'count',
|
||||
'countReset': 'countReset',
|
||||
'time': null,
|
||||
'timeLog': 'timeLog',
|
||||
'timeEnd': 'timeEnd',
|
||||
'timeStamp': 'timeStamp',
|
||||
};
|
||||
|
||||
const disallowedMessageCategories = new Set([
|
||||
'XPConnect JavaScript',
|
||||
'component javascript',
|
||||
'chrome javascript',
|
||||
'chrome registration',
|
||||
'XBL',
|
||||
'XBL Prototype Handler',
|
||||
'XBL Content Sink',
|
||||
'xbl javascript',
|
||||
]);
|
||||
|
||||
class Runtime {
|
||||
constructor(isWorker = false) {
|
||||
this._debugger = new Debugger();
|
||||
this._pendingPromises = new Map();
|
||||
this._executionContexts = new Map();
|
||||
this._windowToExecutionContext = new Map();
|
||||
this._eventListeners = [];
|
||||
if (isWorker) {
|
||||
this._registerWorkerConsoleHandler();
|
||||
} else {
|
||||
const {Services} = ChromeUtils.import("resource://gre/modules/Services.jsm");
|
||||
this._registerConsoleServiceListener(Services);
|
||||
this._registerConsoleAPIListener(Services);
|
||||
}
|
||||
// We can't use event listener here to be compatible with Worker Global Context.
|
||||
// Use plain callbacks instead.
|
||||
this.events = {
|
||||
onConsoleMessage: createEvent(),
|
||||
onRuntimeError: createEvent(),
|
||||
onErrorFromWorker: createEvent(),
|
||||
onExecutionContextCreated: createEvent(),
|
||||
onExecutionContextDestroyed: createEvent(),
|
||||
onBindingCalled: createEvent(),
|
||||
};
|
||||
}
|
||||
|
||||
executionContexts() {
|
||||
return [...this._executionContexts.values()];
|
||||
}
|
||||
|
||||
async evaluate({executionContextId, expression, returnByValue}) {
|
||||
const executionContext = this.findExecutionContext(executionContextId);
|
||||
if (!executionContext)
|
||||
throw new Error('Failed to find execution context with id = ' + executionContextId);
|
||||
const exceptionDetails = {};
|
||||
let result = await executionContext.evaluateScript(expression, exceptionDetails);
|
||||
if (!result)
|
||||
return {exceptionDetails};
|
||||
if (returnByValue)
|
||||
result = executionContext.ensureSerializedToValue(result);
|
||||
return {result};
|
||||
}
|
||||
|
||||
async callFunction({executionContextId, functionDeclaration, args, returnByValue}) {
|
||||
const executionContext = this.findExecutionContext(executionContextId);
|
||||
if (!executionContext)
|
||||
throw new Error('Failed to find execution context with id = ' + executionContextId);
|
||||
const exceptionDetails = {};
|
||||
let result = await executionContext.evaluateFunction(functionDeclaration, args, exceptionDetails);
|
||||
if (!result)
|
||||
return {exceptionDetails};
|
||||
if (returnByValue)
|
||||
result = executionContext.ensureSerializedToValue(result);
|
||||
return {result};
|
||||
}
|
||||
|
||||
async getObjectProperties({executionContextId, objectId}) {
|
||||
const executionContext = this.findExecutionContext(executionContextId);
|
||||
if (!executionContext)
|
||||
throw new Error('Failed to find execution context with id = ' + executionContextId);
|
||||
return {properties: executionContext.getObjectProperties(objectId)};
|
||||
}
|
||||
|
||||
async disposeObject({executionContextId, objectId}) {
|
||||
const executionContext = this.findExecutionContext(executionContextId);
|
||||
if (!executionContext)
|
||||
throw new Error('Failed to find execution context with id = ' + executionContextId);
|
||||
return executionContext.disposeObject(objectId);
|
||||
}
|
||||
|
||||
_registerConsoleServiceListener(Services) {
|
||||
const Ci = Components.interfaces;
|
||||
const consoleServiceListener = {
|
||||
QueryInterface: ChromeUtils.generateQI([Ci.nsIConsoleListener]),
|
||||
|
||||
observe: message => {
|
||||
if (!(message instanceof Ci.nsIScriptError) || !message.outerWindowID ||
|
||||
!message.category || disallowedMessageCategories.has(message.category)) {
|
||||
return;
|
||||
}
|
||||
const errorWindow = Services.wm.getOuterWindowWithId(message.outerWindowID);
|
||||
if (message.category === 'Web Worker' && message.logLevel === Ci.nsIConsoleMessage.error) {
|
||||
emitEvent(this.events.onErrorFromWorker, errorWindow, message.message, '' + message.stack);
|
||||
return;
|
||||
}
|
||||
const executionContext = this._windowToExecutionContext.get(errorWindow);
|
||||
if (!executionContext) {
|
||||
return;
|
||||
}
|
||||
const typeNames = {
|
||||
[Ci.nsIConsoleMessage.debug]: 'debug',
|
||||
[Ci.nsIConsoleMessage.info]: 'info',
|
||||
[Ci.nsIConsoleMessage.warn]: 'warn',
|
||||
[Ci.nsIConsoleMessage.error]: 'error',
|
||||
};
|
||||
if (!message.hasException) {
|
||||
emitEvent(this.events.onConsoleMessage, {
|
||||
args: [{
|
||||
value: message.message,
|
||||
}],
|
||||
type: typeNames[message.logLevel],
|
||||
executionContextId: executionContext.id(),
|
||||
location: {
|
||||
lineNumber: message.lineNumber,
|
||||
columnNumber: message.columnNumber,
|
||||
url: message.sourceName,
|
||||
},
|
||||
});
|
||||
} else {
|
||||
emitEvent(this.events.onRuntimeError, {
|
||||
executionContext,
|
||||
message: message.errorMessage,
|
||||
stack: message.stack.toString(),
|
||||
});
|
||||
}
|
||||
},
|
||||
};
|
||||
Services.console.registerListener(consoleServiceListener);
|
||||
this._eventListeners.push(() => Services.console.unregisterListener(consoleServiceListener));
|
||||
}
|
||||
|
||||
_registerConsoleAPIListener(Services) {
|
||||
const Ci = Components.interfaces;
|
||||
const Cc = Components.classes;
|
||||
const ConsoleAPIStorage = Cc["@mozilla.org/consoleAPI-storage;1"].getService(Ci.nsIConsoleAPIStorage);
|
||||
const onMessage = ({ wrappedJSObject }) => {
|
||||
const executionContext = Array.from(this._executionContexts.values()).find(context => {
|
||||
// There is no easy way to determine isolated world context and we normally don't write
|
||||
// objects to console from utility worlds so we always return main world context here.
|
||||
if (context._isIsolatedWorldContext())
|
||||
return false;
|
||||
const domWindow = context._domWindow;
|
||||
return domWindow && domWindow.windowGlobalChild.innerWindowId === wrappedJSObject.innerID;
|
||||
});
|
||||
if (!executionContext)
|
||||
return;
|
||||
this._onConsoleMessage(executionContext, wrappedJSObject);
|
||||
}
|
||||
ConsoleAPIStorage.addLogEventListener(
|
||||
onMessage,
|
||||
Cc["@mozilla.org/systemprincipal;1"].createInstance(Ci.nsIPrincipal)
|
||||
);
|
||||
this._eventListeners.push(() => ConsoleAPIStorage.removeLogEventListener(onMessage));
|
||||
}
|
||||
|
||||
_registerWorkerConsoleHandler() {
|
||||
setConsoleEventHandler(message => {
|
||||
const executionContext = Array.from(this._executionContexts.values())[0];
|
||||
this._onConsoleMessage(executionContext, message);
|
||||
});
|
||||
this._eventListeners.push(() => setConsoleEventHandler(null));
|
||||
}
|
||||
|
||||
_onConsoleMessage(executionContext, message) {
|
||||
const type = consoleLevelToProtocolType[message.level];
|
||||
if (!type)
|
||||
return;
|
||||
const args = message.arguments.map(arg => executionContext.rawValueToRemoteObject(arg));
|
||||
emitEvent(this.events.onConsoleMessage, {
|
||||
args,
|
||||
type,
|
||||
executionContextId: executionContext.id(),
|
||||
location: {
|
||||
lineNumber: message.lineNumber - 1,
|
||||
columnNumber: message.columnNumber - 1,
|
||||
url: message.filename,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
dispose() {
|
||||
for (const tearDown of this._eventListeners)
|
||||
tearDown.call(null);
|
||||
this._eventListeners = [];
|
||||
}
|
||||
|
||||
async _awaitPromise(executionContext, obj, exceptionDetails = {}) {
|
||||
if (obj.promiseState === 'fulfilled')
|
||||
return {success: true, obj: obj.promiseValue};
|
||||
if (obj.promiseState === 'rejected') {
|
||||
const debuggee = executionContext._debuggee;
|
||||
exceptionDetails.text = debuggee.executeInGlobalWithBindings('e.message', {e: obj.promiseReason}).return;
|
||||
exceptionDetails.stack = debuggee.executeInGlobalWithBindings('e.stack', {e: obj.promiseReason}).return;
|
||||
return {success: false, obj: null};
|
||||
}
|
||||
let resolve, reject;
|
||||
const promise = new Promise((a, b) => {
|
||||
resolve = a;
|
||||
reject = b;
|
||||
});
|
||||
this._pendingPromises.set(obj.promiseID, {resolve, reject, executionContext, exceptionDetails});
|
||||
if (this._pendingPromises.size === 1)
|
||||
this._debugger.onPromiseSettled = this._onPromiseSettled.bind(this);
|
||||
return await promise;
|
||||
}
|
||||
|
||||
_onPromiseSettled(obj) {
|
||||
const pendingPromise = this._pendingPromises.get(obj.promiseID);
|
||||
if (!pendingPromise)
|
||||
return;
|
||||
this._pendingPromises.delete(obj.promiseID);
|
||||
if (!this._pendingPromises.size)
|
||||
this._debugger.onPromiseSettled = undefined;
|
||||
|
||||
if (obj.promiseState === 'fulfilled') {
|
||||
pendingPromise.resolve({success: true, obj: obj.promiseValue});
|
||||
return;
|
||||
};
|
||||
const debuggee = pendingPromise.executionContext._debuggee;
|
||||
pendingPromise.exceptionDetails.text = debuggee.executeInGlobalWithBindings('e.message', {e: obj.promiseReason}).return;
|
||||
pendingPromise.exceptionDetails.stack = debuggee.executeInGlobalWithBindings('e.stack', {e: obj.promiseReason}).return;
|
||||
pendingPromise.resolve({success: false, obj: null});
|
||||
}
|
||||
|
||||
createExecutionContext(domWindow, contextGlobal, auxData) {
|
||||
// Note: domWindow is null for workers.
|
||||
const context = new ExecutionContext(this, domWindow, contextGlobal, auxData);
|
||||
this._executionContexts.set(context._id, context);
|
||||
if (domWindow)
|
||||
this._windowToExecutionContext.set(domWindow, context);
|
||||
emitEvent(this.events.onExecutionContextCreated, context);
|
||||
return context;
|
||||
}
|
||||
|
||||
findExecutionContext(executionContextId) {
|
||||
const executionContext = this._executionContexts.get(executionContextId);
|
||||
if (!executionContext)
|
||||
throw new Error('Failed to find execution context with id = ' + executionContextId);
|
||||
return executionContext;
|
||||
}
|
||||
|
||||
destroyExecutionContext(destroyedContext) {
|
||||
for (const [promiseID, {reject, executionContext}] of this._pendingPromises) {
|
||||
if (executionContext === destroyedContext) {
|
||||
reject(new Error('Execution context was destroyed!'));
|
||||
this._pendingPromises.delete(promiseID);
|
||||
}
|
||||
}
|
||||
if (!this._pendingPromises.size)
|
||||
this._debugger.onPromiseSettled = undefined;
|
||||
this._debugger.removeDebuggee(destroyedContext._contextGlobal);
|
||||
this._executionContexts.delete(destroyedContext._id);
|
||||
if (destroyedContext._domWindow)
|
||||
this._windowToExecutionContext.delete(destroyedContext._domWindow);
|
||||
emitEvent(this.events.onExecutionContextDestroyed, destroyedContext);
|
||||
}
|
||||
}
|
||||
|
||||
class ExecutionContext {
|
||||
constructor(runtime, domWindow, contextGlobal, auxData) {
|
||||
this._runtime = runtime;
|
||||
this._domWindow = domWindow;
|
||||
this._contextGlobal = contextGlobal;
|
||||
this._debuggee = runtime._debugger.addDebuggee(contextGlobal);
|
||||
this._remoteObjects = new Map();
|
||||
this._id = generateId();
|
||||
this._auxData = auxData;
|
||||
this._jsonStringifyObject = this._debuggee.executeInGlobal(`((stringify, object) => {
|
||||
const oldToJSON = Date.prototype.toJSON;
|
||||
Date.prototype.toJSON = undefined;
|
||||
const oldArrayToJSON = Array.prototype.toJSON;
|
||||
const oldArrayHadToJSON = Array.prototype.hasOwnProperty('toJSON');
|
||||
if (oldArrayHadToJSON)
|
||||
Array.prototype.toJSON = undefined;
|
||||
|
||||
let hasSymbol = false;
|
||||
const result = stringify(object, (key, value) => {
|
||||
if (typeof value === 'symbol')
|
||||
hasSymbol = true;
|
||||
return value;
|
||||
});
|
||||
|
||||
Date.prototype.toJSON = oldToJSON;
|
||||
if (oldArrayHadToJSON)
|
||||
Array.prototype.toJSON = oldArrayToJSON;
|
||||
|
||||
return hasSymbol ? undefined : result;
|
||||
}).bind(null, JSON.stringify.bind(JSON))`).return;
|
||||
}
|
||||
|
||||
id() {
|
||||
return this._id;
|
||||
}
|
||||
|
||||
auxData() {
|
||||
return this._auxData;
|
||||
}
|
||||
|
||||
_isIsolatedWorldContext() {
|
||||
return !!this._auxData.name;
|
||||
}
|
||||
|
||||
async evaluateScript(script, exceptionDetails = {}) {
|
||||
const userInputHelper = this._domWindow ? this._domWindow.windowUtils.setHandlingUserInput(true) : null;
|
||||
if (this._domWindow && this._domWindow.document)
|
||||
this._domWindow.document.notifyUserGestureActivation();
|
||||
|
||||
let {success, obj} = this._getResult(this._debuggee.executeInGlobal(script), exceptionDetails);
|
||||
userInputHelper && userInputHelper.destruct();
|
||||
if (!success)
|
||||
return null;
|
||||
if (obj && obj.isPromise) {
|
||||
const awaitResult = await this._runtime._awaitPromise(this, obj, exceptionDetails);
|
||||
if (!awaitResult.success)
|
||||
return null;
|
||||
obj = awaitResult.obj;
|
||||
}
|
||||
return this._createRemoteObject(obj);
|
||||
}
|
||||
|
||||
evaluateScriptSafely(script) {
|
||||
try {
|
||||
this._debuggee.executeInGlobal(script);
|
||||
} catch (e) {
|
||||
dump(`ERROR: ${e.message}\n${e.stack}\n`);
|
||||
}
|
||||
}
|
||||
|
||||
async evaluateFunction(functionText, args, exceptionDetails = {}) {
|
||||
const funEvaluation = this._getResult(this._debuggee.executeInGlobal('(' + functionText + ')'), exceptionDetails);
|
||||
if (!funEvaluation.success)
|
||||
return null;
|
||||
if (!funEvaluation.obj.callable)
|
||||
throw new Error('functionText does not evaluate to a function!');
|
||||
args = args.map(arg => {
|
||||
if (arg.objectId) {
|
||||
if (!this._remoteObjects.has(arg.objectId))
|
||||
throw new Error('Cannot find object with id = ' + arg.objectId);
|
||||
return this._remoteObjects.get(arg.objectId);
|
||||
}
|
||||
switch (arg.unserializableValue) {
|
||||
case 'Infinity': return Infinity;
|
||||
case '-Infinity': return -Infinity;
|
||||
case '-0': return -0;
|
||||
case 'NaN': return NaN;
|
||||
default: return this._toDebugger(arg.value);
|
||||
}
|
||||
});
|
||||
const userInputHelper = this._domWindow ? this._domWindow.windowUtils.setHandlingUserInput(true) : null;
|
||||
if (this._domWindow && this._domWindow.document)
|
||||
this._domWindow.document.notifyUserGestureActivation();
|
||||
let {success, obj} = this._getResult(funEvaluation.obj.apply(null, args), exceptionDetails);
|
||||
userInputHelper && userInputHelper.destruct();
|
||||
if (!success)
|
||||
return null;
|
||||
if (obj && obj.isPromise) {
|
||||
const awaitResult = await this._runtime._awaitPromise(this, obj, exceptionDetails);
|
||||
if (!awaitResult.success)
|
||||
return null;
|
||||
obj = awaitResult.obj;
|
||||
}
|
||||
return this._createRemoteObject(obj);
|
||||
}
|
||||
|
||||
addBinding(name, script) {
|
||||
Cu.exportFunction((...args) => {
|
||||
emitEvent(this._runtime.events.onBindingCalled, {
|
||||
executionContextId: this._id,
|
||||
name,
|
||||
payload: args[0],
|
||||
});
|
||||
}, this._contextGlobal, {
|
||||
defineAs: name,
|
||||
});
|
||||
this.evaluateScriptSafely(script);
|
||||
}
|
||||
|
||||
unsafeObject(objectId) {
|
||||
if (!this._remoteObjects.has(objectId))
|
||||
return;
|
||||
return { object: this._remoteObjects.get(objectId).unsafeDereference() };
|
||||
}
|
||||
|
||||
rawValueToRemoteObject(rawValue) {
|
||||
const debuggerObj = this._debuggee.makeDebuggeeValue(rawValue);
|
||||
return this._createRemoteObject(debuggerObj);
|
||||
}
|
||||
|
||||
_instanceOf(debuggerObj, rawObj, className) {
|
||||
if (this._domWindow)
|
||||
return rawObj instanceof this._domWindow[className];
|
||||
return this._debuggee.executeInGlobalWithBindings('o instanceof this[className]', {o: debuggerObj, className: this._debuggee.makeDebuggeeValue(className)}).return;
|
||||
}
|
||||
|
||||
_createRemoteObject(debuggerObj) {
|
||||
if (debuggerObj instanceof Debugger.Object) {
|
||||
const objectId = generateId();
|
||||
this._remoteObjects.set(objectId, debuggerObj);
|
||||
const rawObj = debuggerObj.unsafeDereference();
|
||||
const type = typeof rawObj;
|
||||
let subtype = undefined;
|
||||
if (debuggerObj.isProxy)
|
||||
subtype = 'proxy';
|
||||
else if (Array.isArray(rawObj))
|
||||
subtype = 'array';
|
||||
else if (Object.is(rawObj, null))
|
||||
subtype = 'null';
|
||||
else if (this._instanceOf(debuggerObj, rawObj, 'Node'))
|
||||
subtype = 'node';
|
||||
else if (this._instanceOf(debuggerObj, rawObj, 'RegExp'))
|
||||
subtype = 'regexp';
|
||||
else if (this._instanceOf(debuggerObj, rawObj, 'Date'))
|
||||
subtype = 'date';
|
||||
else if (this._instanceOf(debuggerObj, rawObj, 'Map'))
|
||||
subtype = 'map';
|
||||
else if (this._instanceOf(debuggerObj, rawObj, 'Set'))
|
||||
subtype = 'set';
|
||||
else if (this._instanceOf(debuggerObj, rawObj, 'WeakMap'))
|
||||
subtype = 'weakmap';
|
||||
else if (this._instanceOf(debuggerObj, rawObj, 'WeakSet'))
|
||||
subtype = 'weakset';
|
||||
else if (this._instanceOf(debuggerObj, rawObj, 'Error'))
|
||||
subtype = 'error';
|
||||
else if (this._instanceOf(debuggerObj, rawObj, 'Promise'))
|
||||
subtype = 'promise';
|
||||
else if ((this._instanceOf(debuggerObj, rawObj, 'Int8Array')) || (this._instanceOf(debuggerObj, rawObj, 'Uint8Array')) ||
|
||||
(this._instanceOf(debuggerObj, rawObj, 'Uint8ClampedArray')) || (this._instanceOf(debuggerObj, rawObj, 'Int16Array')) ||
|
||||
(this._instanceOf(debuggerObj, rawObj, 'Uint16Array')) || (this._instanceOf(debuggerObj, rawObj, 'Int32Array')) ||
|
||||
(this._instanceOf(debuggerObj, rawObj, 'Uint32Array')) || (this._instanceOf(debuggerObj, rawObj, 'Float32Array')) ||
|
||||
(this._instanceOf(debuggerObj, rawObj, 'Float64Array'))) {
|
||||
subtype = 'typedarray';
|
||||
}
|
||||
return {objectId, type, subtype};
|
||||
}
|
||||
if (typeof debuggerObj === 'symbol') {
|
||||
const objectId = generateId();
|
||||
this._remoteObjects.set(objectId, debuggerObj);
|
||||
return {objectId, type: 'symbol'};
|
||||
}
|
||||
|
||||
let unserializableValue = undefined;
|
||||
if (Object.is(debuggerObj, NaN))
|
||||
unserializableValue = 'NaN';
|
||||
else if (Object.is(debuggerObj, -0))
|
||||
unserializableValue = '-0';
|
||||
else if (Object.is(debuggerObj, Infinity))
|
||||
unserializableValue = 'Infinity';
|
||||
else if (Object.is(debuggerObj, -Infinity))
|
||||
unserializableValue = '-Infinity';
|
||||
return unserializableValue ? {unserializableValue} : {value: debuggerObj};
|
||||
}
|
||||
|
||||
ensureSerializedToValue(protocolObject) {
|
||||
if (!protocolObject.objectId)
|
||||
return protocolObject;
|
||||
const obj = this._remoteObjects.get(protocolObject.objectId);
|
||||
this._remoteObjects.delete(protocolObject.objectId);
|
||||
return {value: this._serialize(obj)};
|
||||
}
|
||||
|
||||
_toDebugger(obj) {
|
||||
if (typeof obj !== 'object')
|
||||
return obj;
|
||||
if (obj === null)
|
||||
return obj;
|
||||
const properties = {};
|
||||
for (let [key, value] of Object.entries(obj)) {
|
||||
properties[key] = {
|
||||
configurable: true,
|
||||
writable: true,
|
||||
enumerable: true,
|
||||
value: this._toDebugger(value),
|
||||
};
|
||||
}
|
||||
const baseObject = Array.isArray(obj) ? '([])' : '({})';
|
||||
const debuggerObj = this._debuggee.executeInGlobal(baseObject).return;
|
||||
debuggerObj.defineProperties(properties);
|
||||
return debuggerObj;
|
||||
}
|
||||
|
||||
_serialize(obj) {
|
||||
const result = this._debuggee.executeInGlobalWithBindings('stringify(e)', {e: obj, stringify: this._jsonStringifyObject});
|
||||
if (result.throw)
|
||||
throw new Error('Object is not serializable');
|
||||
return result.return === undefined ? undefined : JSON.parse(result.return);
|
||||
}
|
||||
|
||||
disposeObject(objectId) {
|
||||
this._remoteObjects.delete(objectId);
|
||||
}
|
||||
|
||||
getObjectProperties(objectId) {
|
||||
if (!this._remoteObjects.has(objectId))
|
||||
throw new Error('Cannot find object with id = ' + arg.objectId);
|
||||
const result = [];
|
||||
for (let obj = this._remoteObjects.get(objectId); obj; obj = obj.proto) {
|
||||
for (const propertyName of obj.getOwnPropertyNames()) {
|
||||
const descriptor = obj.getOwnPropertyDescriptor(propertyName);
|
||||
if (!descriptor.enumerable)
|
||||
continue;
|
||||
result.push({
|
||||
name: propertyName,
|
||||
value: this._createRemoteObject(descriptor.value),
|
||||
});
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
_getResult(completionValue, exceptionDetails = {}) {
|
||||
if (!completionValue) {
|
||||
exceptionDetails.text = 'Evaluation terminated!';
|
||||
exceptionDetails.stack = '';
|
||||
return {success: false, obj: null};
|
||||
}
|
||||
if (completionValue.throw) {
|
||||
if (this._debuggee.executeInGlobalWithBindings('e instanceof Error', {e: completionValue.throw}).return) {
|
||||
exceptionDetails.text = this._debuggee.executeInGlobalWithBindings('e.message', {e: completionValue.throw}).return;
|
||||
exceptionDetails.stack = this._debuggee.executeInGlobalWithBindings('e.stack', {e: completionValue.throw}).return;
|
||||
} else {
|
||||
exceptionDetails.value = this._serialize(completionValue.throw);
|
||||
}
|
||||
return {success: false, obj: null};
|
||||
}
|
||||
return {success: true, obj: completionValue.return};
|
||||
}
|
||||
}
|
||||
|
||||
const listenersSymbol = Symbol('listeners');
|
||||
|
||||
function createEvent() {
|
||||
const listeners = new Set();
|
||||
const subscribeFunction = listener => {
|
||||
listeners.add(listener);
|
||||
return () => listeners.delete(listener);
|
||||
}
|
||||
subscribeFunction[listenersSymbol] = listeners;
|
||||
return subscribeFunction;
|
||||
}
|
||||
|
||||
function emitEvent(event, ...args) {
|
||||
let listeners = event[listenersSymbol];
|
||||
if (!listeners || !listeners.size)
|
||||
return;
|
||||
listeners = new Set(listeners);
|
||||
for (const listener of listeners)
|
||||
listener.call(null, ...args);
|
||||
}
|
||||
|
||||
var EXPORTED_SYMBOLS = ['Runtime'];
|
||||
this.Runtime = Runtime;
|
|
@ -1,76 +0,0 @@
|
|||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
|
||||
"use strict";
|
||||
loadSubScript('chrome://juggler/content/content/Runtime.js');
|
||||
loadSubScript('chrome://juggler/content/SimpleChannel.js');
|
||||
|
||||
const channel = new SimpleChannel('worker::worker');
|
||||
const eventListener = event => channel._onMessage(JSON.parse(event.data));
|
||||
this.addEventListener('message', eventListener);
|
||||
channel.setTransport({
|
||||
sendMessage: msg => postMessage(JSON.stringify(msg)),
|
||||
dispose: () => this.removeEventListener('message', eventListener),
|
||||
});
|
||||
|
||||
const runtime = new Runtime(true /* isWorker */);
|
||||
|
||||
(() => {
|
||||
// Create execution context in the runtime only when the script
|
||||
// source was actually evaluated in it.
|
||||
const dbg = new Debugger(global);
|
||||
if (dbg.findScripts({global}).length) {
|
||||
runtime.createExecutionContext(null /* domWindow */, global, {});
|
||||
} else {
|
||||
dbg.onNewScript = function(s) {
|
||||
dbg.onNewScript = undefined;
|
||||
dbg.removeAllDebuggees();
|
||||
runtime.createExecutionContext(null /* domWindow */, global, {});
|
||||
};
|
||||
}
|
||||
})();
|
||||
|
||||
class RuntimeAgent {
|
||||
constructor(runtime, channel) {
|
||||
this._runtime = runtime;
|
||||
this._browserRuntime = channel.connect('runtime');
|
||||
|
||||
for (const context of this._runtime.executionContexts())
|
||||
this._onExecutionContextCreated(context);
|
||||
|
||||
this._eventListeners = [
|
||||
this._runtime.events.onConsoleMessage(msg => this._browserRuntime.emit('runtimeConsole', msg)),
|
||||
this._runtime.events.onExecutionContextCreated(this._onExecutionContextCreated.bind(this)),
|
||||
this._runtime.events.onExecutionContextDestroyed(this._onExecutionContextDestroyed.bind(this)),
|
||||
channel.register('runtime', {
|
||||
evaluate: this._runtime.evaluate.bind(this._runtime),
|
||||
callFunction: this._runtime.callFunction.bind(this._runtime),
|
||||
getObjectProperties: this._runtime.getObjectProperties.bind(this._runtime),
|
||||
disposeObject: this._runtime.disposeObject.bind(this._runtime),
|
||||
}),
|
||||
];
|
||||
}
|
||||
|
||||
_onExecutionContextCreated(executionContext) {
|
||||
this._browserRuntime.emit('runtimeExecutionContextCreated', {
|
||||
executionContextId: executionContext.id(),
|
||||
auxData: executionContext.auxData(),
|
||||
});
|
||||
}
|
||||
|
||||
_onExecutionContextDestroyed(executionContext) {
|
||||
this._browserRuntime.emit('runtimeExecutionContextDestroyed', {
|
||||
executionContextId: executionContext.id(),
|
||||
});
|
||||
}
|
||||
|
||||
dispose() {
|
||||
for (const disposer of this._eventListeners)
|
||||
disposer();
|
||||
this._eventListeners = [];
|
||||
}
|
||||
}
|
||||
|
||||
new RuntimeAgent(runtime, channel);
|
||||
|
|
@ -1,7 +0,0 @@
|
|||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
|
||||
* You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
|
||||
* {
|
||||
scrollbar-width: none !important;
|
||||
}
|
|
@ -1,155 +0,0 @@
|
|||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
|
||||
const {Services} = ChromeUtils.import("resource://gre/modules/Services.jsm");
|
||||
const {Helper} = ChromeUtils.import('chrome://juggler/content/Helper.js');
|
||||
const {FrameTree} = ChromeUtils.import('chrome://juggler/content/content/FrameTree.js');
|
||||
const {SimpleChannel} = ChromeUtils.import('chrome://juggler/content/SimpleChannel.js');
|
||||
const {PageAgent} = ChromeUtils.import('chrome://juggler/content/content/PageAgent.js');
|
||||
|
||||
let frameTree;
|
||||
const helper = new Helper();
|
||||
const messageManager = this;
|
||||
|
||||
let pageAgent;
|
||||
|
||||
let failedToOverrideTimezone = false;
|
||||
|
||||
const applySetting = {
|
||||
geolocation: (geolocation) => {
|
||||
if (geolocation) {
|
||||
docShell.setGeolocationOverride({
|
||||
coords: {
|
||||
latitude: geolocation.latitude,
|
||||
longitude: geolocation.longitude,
|
||||
accuracy: geolocation.accuracy,
|
||||
altitude: NaN,
|
||||
altitudeAccuracy: NaN,
|
||||
heading: NaN,
|
||||
speed: NaN,
|
||||
},
|
||||
address: null,
|
||||
timestamp: Date.now()
|
||||
});
|
||||
} else {
|
||||
docShell.setGeolocationOverride(null);
|
||||
}
|
||||
},
|
||||
|
||||
onlineOverride: (onlineOverride) => {
|
||||
if (!onlineOverride) {
|
||||
docShell.onlineOverride = Ci.nsIDocShell.ONLINE_OVERRIDE_NONE;
|
||||
return;
|
||||
}
|
||||
docShell.onlineOverride = onlineOverride === 'online' ?
|
||||
Ci.nsIDocShell.ONLINE_OVERRIDE_ONLINE : Ci.nsIDocShell.ONLINE_OVERRIDE_OFFLINE;
|
||||
},
|
||||
|
||||
bypassCSP: (bypassCSP) => {
|
||||
docShell.bypassCSPEnabled = bypassCSP;
|
||||
},
|
||||
|
||||
timezoneId: (timezoneId) => {
|
||||
failedToOverrideTimezone = !docShell.overrideTimezone(timezoneId);
|
||||
},
|
||||
|
||||
locale: (locale) => {
|
||||
docShell.languageOverride = locale;
|
||||
},
|
||||
|
||||
scrollbarsHidden: (hidden) => {
|
||||
frameTree.setScrollbarsHidden(hidden);
|
||||
},
|
||||
|
||||
colorScheme: (colorScheme) => {
|
||||
frameTree.setColorScheme(colorScheme);
|
||||
},
|
||||
|
||||
reducedMotion: (reducedMotion) => {
|
||||
frameTree.setReducedMotion(reducedMotion);
|
||||
},
|
||||
|
||||
forcedColors: (forcedColors) => {
|
||||
frameTree.setForcedColors(forcedColors);
|
||||
},
|
||||
};
|
||||
|
||||
const channel = SimpleChannel.createForMessageManager('content::page', messageManager);
|
||||
|
||||
function initialize() {
|
||||
const response = sendSyncMessage('juggler:content-ready')[0];
|
||||
// If we didn't get a response, then we don't want to do anything
|
||||
// as a part of this frame script.
|
||||
if (!response)
|
||||
return;
|
||||
const {
|
||||
initScripts = [],
|
||||
bindings = [],
|
||||
settings = {}
|
||||
} = response || {};
|
||||
// Enforce focused state for all top level documents.
|
||||
docShell.overrideHasFocus = true;
|
||||
docShell.forceActiveState = true;
|
||||
frameTree = new FrameTree(docShell);
|
||||
for (const [name, value] of Object.entries(settings)) {
|
||||
if (value !== undefined)
|
||||
applySetting[name](value);
|
||||
}
|
||||
for (const { worldName, name, script } of bindings)
|
||||
frameTree.addBinding(worldName, name, script);
|
||||
frameTree.setInitScripts(initScripts);
|
||||
|
||||
pageAgent = new PageAgent(messageManager, channel, frameTree);
|
||||
|
||||
channel.register('', {
|
||||
setInitScripts(scripts) {
|
||||
frameTree.setInitScripts(scripts);
|
||||
},
|
||||
|
||||
addBinding({worldName, name, script}) {
|
||||
frameTree.addBinding(worldName, name, script);
|
||||
},
|
||||
|
||||
applyContextSetting({name, value}) {
|
||||
applySetting[name](value);
|
||||
},
|
||||
|
||||
ensurePermissions() {
|
||||
// noop, just a rountrip.
|
||||
},
|
||||
|
||||
hasFailedToOverrideTimezone() {
|
||||
return failedToOverrideTimezone;
|
||||
},
|
||||
|
||||
async awaitViewportDimensions({width, height, deviceSizeIsPageSize}) {
|
||||
docShell.deviceSizeIsPageSize = deviceSizeIsPageSize;
|
||||
const win = docShell.domWindow;
|
||||
if (win.innerWidth === width && win.innerHeight === height)
|
||||
return;
|
||||
await new Promise(resolve => {
|
||||
const listener = helper.addEventListener(win, 'resize', () => {
|
||||
if (win.innerWidth === width && win.innerHeight === height) {
|
||||
helper.removeListeners([listener]);
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
});
|
||||
},
|
||||
|
||||
dispose() {
|
||||
},
|
||||
});
|
||||
|
||||
const gListeners = [
|
||||
helper.addEventListener(messageManager, 'unload', msg => {
|
||||
helper.removeListeners(gListeners);
|
||||
pageAgent.dispose();
|
||||
frameTree.dispose();
|
||||
channel.dispose();
|
||||
}),
|
||||
];
|
||||
}
|
||||
|
||||
initialize();
|
|
@ -1,25 +0,0 @@
|
|||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
juggler.jar:
|
||||
% content juggler %content/
|
||||
|
||||
content/components/Juggler.js (components/Juggler.js)
|
||||
|
||||
content/Helper.js (Helper.js)
|
||||
content/NetworkObserver.js (NetworkObserver.js)
|
||||
content/TargetRegistry.js (TargetRegistry.js)
|
||||
content/SimpleChannel.js (SimpleChannel.js)
|
||||
content/protocol/PrimitiveTypes.js (protocol/PrimitiveTypes.js)
|
||||
content/protocol/Protocol.js (protocol/Protocol.js)
|
||||
content/protocol/Dispatcher.js (protocol/Dispatcher.js)
|
||||
content/protocol/PageHandler.js (protocol/PageHandler.js)
|
||||
content/protocol/BrowserHandler.js (protocol/BrowserHandler.js)
|
||||
content/content/main.js (content/main.js)
|
||||
content/content/FrameTree.js (content/FrameTree.js)
|
||||
content/content/PageAgent.js (content/PageAgent.js)
|
||||
content/content/Runtime.js (content/Runtime.js)
|
||||
content/content/WorkerMain.js (content/WorkerMain.js)
|
||||
content/content/hidden-scrollbars.css (content/hidden-scrollbars.css)
|
||||
|
|
@ -1,10 +0,0 @@
|
|||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
DIRS += ["components", "screencast", "pipe"]
|
||||
|
||||
JAR_MANIFESTS += ["jar.mn"]
|
||||
with Files("**"):
|
||||
BUG_COMPONENT = ("Testing", "Juggler")
|
||||
|
|
@ -1,15 +0,0 @@
|
|||
# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
|
||||
# vim: set filetype=python:
|
||||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
Classes = [
|
||||
{
|
||||
'cid': '{d69ecefe-3df7-4d11-9dc7-f604edb96da2}',
|
||||
'contract_ids': ['@mozilla.org/juggler/remotedebuggingpipe;1'],
|
||||
'type': 'nsIRemoteDebuggingPipe',
|
||||
'constructor': 'mozilla::nsRemoteDebuggingPipe::GetSingleton',
|
||||
'headers': ['/juggler/pipe/nsRemoteDebuggingPipe.h'],
|
||||
},
|
||||
]
|
|
@ -1,24 +0,0 @@
|
|||
# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
|
||||
# vim: set filetype=python:
|
||||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
XPIDL_SOURCES += [
|
||||
'nsIRemoteDebuggingPipe.idl',
|
||||
]
|
||||
|
||||
XPIDL_MODULE = 'jugglerpipe'
|
||||
|
||||
SOURCES += [
|
||||
'nsRemoteDebuggingPipe.cpp',
|
||||
]
|
||||
|
||||
XPCOM_MANIFESTS += [
|
||||
'components.conf',
|
||||
]
|
||||
|
||||
LOCAL_INCLUDES += [
|
||||
]
|
||||
|
||||
FINAL_LIBRARY = 'xul'
|
|
@ -1,20 +0,0 @@
|
|||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
|
||||
#include "nsISupports.idl"
|
||||
|
||||
[scriptable, uuid(7910c231-971a-4653-abdc-a8599a986c4c)]
|
||||
interface nsIRemoteDebuggingPipeClient : nsISupports
|
||||
{
|
||||
void receiveMessage(in AString message);
|
||||
void disconnected();
|
||||
};
|
||||
|
||||
[scriptable, uuid(b7bfb66b-fd46-4aa2-b4ad-396177186d94)]
|
||||
interface nsIRemoteDebuggingPipe : nsISupports
|
||||
{
|
||||
void init(in nsIRemoteDebuggingPipeClient client);
|
||||
void sendMessage(in AString message);
|
||||
void stop();
|
||||
};
|
|
@ -1,223 +0,0 @@
|
|||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
|
||||
#include "nsRemoteDebuggingPipe.h"
|
||||
|
||||
#include <cstring>
|
||||
#if defined(_WIN32)
|
||||
#include <io.h>
|
||||
#include <windows.h>
|
||||
#else
|
||||
#include <stdio.h>
|
||||
#include <unistd.h>
|
||||
#include <sys/socket.h>
|
||||
#endif
|
||||
|
||||
#include "mozilla/StaticPtr.h"
|
||||
#include "nsISupportsPrimitives.h"
|
||||
#include "nsThreadUtils.h"
|
||||
|
||||
namespace mozilla {
|
||||
|
||||
NS_IMPL_ISUPPORTS(nsRemoteDebuggingPipe, nsIRemoteDebuggingPipe)
|
||||
|
||||
namespace {
|
||||
|
||||
StaticRefPtr<nsRemoteDebuggingPipe> gPipe;
|
||||
|
||||
const size_t kWritePacketSize = 1 << 16;
|
||||
|
||||
#if defined(_WIN32)
|
||||
HANDLE readHandle;
|
||||
HANDLE writeHandle;
|
||||
#else
|
||||
const int readFD = 3;
|
||||
const int writeFD = 4;
|
||||
#endif
|
||||
|
||||
size_t ReadBytes(void* buffer, size_t size, bool exact_size)
|
||||
{
|
||||
size_t bytesRead = 0;
|
||||
while (bytesRead < size) {
|
||||
#if defined(_WIN32)
|
||||
DWORD sizeRead = 0;
|
||||
bool hadError = !ReadFile(readHandle, static_cast<char*>(buffer) + bytesRead,
|
||||
size - bytesRead, &sizeRead, nullptr);
|
||||
#else
|
||||
int sizeRead = read(readFD, static_cast<char*>(buffer) + bytesRead,
|
||||
size - bytesRead);
|
||||
if (sizeRead < 0 && errno == EINTR)
|
||||
continue;
|
||||
bool hadError = sizeRead <= 0;
|
||||
#endif
|
||||
if (hadError) {
|
||||
return 0;
|
||||
}
|
||||
bytesRead += sizeRead;
|
||||
if (!exact_size)
|
||||
break;
|
||||
}
|
||||
return bytesRead;
|
||||
}
|
||||
|
||||
void WriteBytes(const char* bytes, size_t size)
|
||||
{
|
||||
size_t totalWritten = 0;
|
||||
while (totalWritten < size) {
|
||||
size_t length = size - totalWritten;
|
||||
if (length > kWritePacketSize)
|
||||
length = kWritePacketSize;
|
||||
#if defined(_WIN32)
|
||||
DWORD bytesWritten = 0;
|
||||
bool hadError = !WriteFile(writeHandle, bytes + totalWritten, static_cast<DWORD>(length), &bytesWritten, nullptr);
|
||||
#else
|
||||
int bytesWritten = write(writeFD, bytes + totalWritten, length);
|
||||
if (bytesWritten < 0 && errno == EINTR)
|
||||
continue;
|
||||
bool hadError = bytesWritten <= 0;
|
||||
#endif
|
||||
if (hadError)
|
||||
return;
|
||||
totalWritten += bytesWritten;
|
||||
}
|
||||
}
|
||||
|
||||
} // namespace
|
||||
|
||||
// static
|
||||
already_AddRefed<nsIRemoteDebuggingPipe> nsRemoteDebuggingPipe::GetSingleton() {
|
||||
if (!gPipe) {
|
||||
gPipe = new nsRemoteDebuggingPipe();
|
||||
}
|
||||
return do_AddRef(gPipe);
|
||||
}
|
||||
|
||||
nsRemoteDebuggingPipe::nsRemoteDebuggingPipe() = default;
|
||||
|
||||
nsRemoteDebuggingPipe::~nsRemoteDebuggingPipe() = default;
|
||||
|
||||
nsresult nsRemoteDebuggingPipe::Init(nsIRemoteDebuggingPipeClient* aClient) {
|
||||
MOZ_RELEASE_ASSERT(NS_IsMainThread(), "Remote debugging pipe must be used on the Main thread.");
|
||||
if (mClient) {
|
||||
return NS_ERROR_FAILURE;
|
||||
}
|
||||
mClient = aClient;
|
||||
|
||||
MOZ_ALWAYS_SUCCEEDS(NS_NewNamedThread("Pipe Reader", getter_AddRefs(mReaderThread)));
|
||||
MOZ_ALWAYS_SUCCEEDS(NS_NewNamedThread("Pipe Writer", getter_AddRefs(mWriterThread)));
|
||||
|
||||
#if defined(_WIN32)
|
||||
CHAR pipeReadStr[20];
|
||||
CHAR pipeWriteStr[20];
|
||||
GetEnvironmentVariableA("PW_PIPE_READ", pipeReadStr, 20);
|
||||
GetEnvironmentVariableA("PW_PIPE_WRITE", pipeWriteStr, 20);
|
||||
readHandle = reinterpret_cast<HANDLE>(atoi(pipeReadStr));
|
||||
writeHandle = reinterpret_cast<HANDLE>(atoi(pipeWriteStr));
|
||||
#endif
|
||||
|
||||
MOZ_ALWAYS_SUCCEEDS(mReaderThread->Dispatch(NewRunnableMethod(
|
||||
"nsRemoteDebuggingPipe::ReaderLoop",
|
||||
this, &nsRemoteDebuggingPipe::ReaderLoop), nsIThread::DISPATCH_NORMAL));
|
||||
return NS_OK;
|
||||
}
|
||||
|
||||
nsresult nsRemoteDebuggingPipe::Stop() {
|
||||
MOZ_RELEASE_ASSERT(NS_IsMainThread(), "Remote debugging pipe must be used on the Main thread.");
|
||||
if (!mClient) {
|
||||
return NS_ERROR_FAILURE;
|
||||
}
|
||||
m_terminated = true;
|
||||
mClient = nullptr;
|
||||
// Cancel pending synchronous read.
|
||||
#if defined(_WIN32)
|
||||
CancelIoEx(readHandle, nullptr);
|
||||
CloseHandle(readHandle);
|
||||
CloseHandle(writeHandle);
|
||||
#else
|
||||
shutdown(readFD, SHUT_RDWR);
|
||||
shutdown(writeFD, SHUT_RDWR);
|
||||
#endif
|
||||
mReaderThread->Shutdown();
|
||||
mReaderThread = nullptr;
|
||||
mWriterThread->Shutdown();
|
||||
mWriterThread = nullptr;
|
||||
return NS_OK;
|
||||
}
|
||||
|
||||
void nsRemoteDebuggingPipe::ReaderLoop() {
|
||||
const size_t bufSize = 256 * 1024;
|
||||
std::vector<char> buffer;
|
||||
buffer.resize(bufSize);
|
||||
std::vector<char> line;
|
||||
while (!m_terminated) {
|
||||
size_t size = ReadBytes(buffer.data(), bufSize, false);
|
||||
if (!size) {
|
||||
nsCOMPtr<nsIRunnable> runnable = NewRunnableMethod<>(
|
||||
"nsRemoteDebuggingPipe::Disconnected",
|
||||
this, &nsRemoteDebuggingPipe::Disconnected);
|
||||
NS_DispatchToMainThread(runnable.forget());
|
||||
break;
|
||||
}
|
||||
size_t start = 0;
|
||||
size_t end = line.size();
|
||||
line.insert(line.end(), buffer.begin(), buffer.begin() + size);
|
||||
while (true) {
|
||||
for (; end < line.size(); ++end) {
|
||||
if (line[end] == '\0') {
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (end == line.size()) {
|
||||
break;
|
||||
}
|
||||
if (end > start) {
|
||||
nsCString message;
|
||||
message.Append(line.data() + start, end - start);
|
||||
nsCOMPtr<nsIRunnable> runnable = NewRunnableMethod<nsCString>(
|
||||
"nsRemoteDebuggingPipe::ReceiveMessage",
|
||||
this, &nsRemoteDebuggingPipe::ReceiveMessage, std::move(message));
|
||||
NS_DispatchToMainThread(runnable.forget());
|
||||
}
|
||||
++end;
|
||||
start = end;
|
||||
}
|
||||
if (start != 0 && start < line.size()) {
|
||||
memmove(line.data(), line.data() + start, line.size() - start);
|
||||
}
|
||||
line.resize(line.size() - start);
|
||||
}
|
||||
}
|
||||
|
||||
void nsRemoteDebuggingPipe::ReceiveMessage(const nsCString& aMessage) {
|
||||
MOZ_RELEASE_ASSERT(NS_IsMainThread(), "Remote debugging pipe must be used on the Main thread.");
|
||||
if (mClient) {
|
||||
NS_ConvertUTF8toUTF16 utf16(aMessage);
|
||||
mClient->ReceiveMessage(utf16);
|
||||
}
|
||||
}
|
||||
|
||||
void nsRemoteDebuggingPipe::Disconnected() {
|
||||
MOZ_RELEASE_ASSERT(NS_IsMainThread(), "Remote debugging pipe must be used on the Main thread.");
|
||||
if (mClient)
|
||||
mClient->Disconnected();
|
||||
}
|
||||
|
||||
nsresult nsRemoteDebuggingPipe::SendMessage(const nsAString& aMessage) {
|
||||
MOZ_RELEASE_ASSERT(NS_IsMainThread(), "Remote debugging pipe must be used on the Main thread.");
|
||||
if (!mClient) {
|
||||
return NS_ERROR_FAILURE;
|
||||
}
|
||||
NS_ConvertUTF16toUTF8 utf8(aMessage);
|
||||
nsCOMPtr<nsIRunnable> runnable = NS_NewRunnableFunction(
|
||||
"nsRemoteDebuggingPipe::SendMessage",
|
||||
[message = std::move(utf8)] {
|
||||
const nsCString& flat = PromiseFlatCString(message);
|
||||
WriteBytes(flat.Data(), flat.Length());
|
||||
WriteBytes("\0", 1);
|
||||
});
|
||||
MOZ_ALWAYS_SUCCEEDS(mWriterThread->Dispatch(runnable.forget(), nsIThread::DISPATCH_NORMAL));
|
||||
return NS_OK;
|
||||
}
|
||||
|
||||
} // namespace mozilla
|
|
@ -1,34 +0,0 @@
|
|||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <memory>
|
||||
#include "nsCOMPtr.h"
|
||||
#include "nsIRemoteDebuggingPipe.h"
|
||||
#include "nsThread.h"
|
||||
|
||||
namespace mozilla {
|
||||
|
||||
class nsRemoteDebuggingPipe final : public nsIRemoteDebuggingPipe {
|
||||
public:
|
||||
NS_DECL_THREADSAFE_ISUPPORTS
|
||||
NS_DECL_NSIREMOTEDEBUGGINGPIPE
|
||||
|
||||
static already_AddRefed<nsIRemoteDebuggingPipe> GetSingleton();
|
||||
nsRemoteDebuggingPipe();
|
||||
|
||||
private:
|
||||
void ReaderLoop();
|
||||
void ReceiveMessage(const nsCString& aMessage);
|
||||
void Disconnected();
|
||||
~nsRemoteDebuggingPipe();
|
||||
|
||||
RefPtr<nsIRemoteDebuggingPipeClient> mClient;
|
||||
nsCOMPtr<nsIThread> mReaderThread;
|
||||
nsCOMPtr<nsIThread> mWriterThread;
|
||||
std::atomic<bool> m_terminated { false };
|
||||
};
|
||||
|
||||
} // namespace mozilla
|
|
@ -1,296 +0,0 @@
|
|||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
|
||||
"use strict";
|
||||
|
||||
const {AddonManager} = ChromeUtils.import("resource://gre/modules/AddonManager.jsm");
|
||||
const {Services} = ChromeUtils.import("resource://gre/modules/Services.jsm");
|
||||
const {TargetRegistry} = ChromeUtils.import("chrome://juggler/content/TargetRegistry.js");
|
||||
const {Helper} = ChromeUtils.import('chrome://juggler/content/Helper.js');
|
||||
const {PageHandler} = ChromeUtils.import("chrome://juggler/content/protocol/PageHandler.js");
|
||||
const {AppConstants} = ChromeUtils.import("resource://gre/modules/AppConstants.jsm");
|
||||
|
||||
const helper = new Helper();
|
||||
|
||||
class BrowserHandler {
|
||||
constructor(session, dispatcher, targetRegistry, onclose, onstart) {
|
||||
this._session = session;
|
||||
this._dispatcher = dispatcher;
|
||||
this._targetRegistry = targetRegistry;
|
||||
this._enabled = false;
|
||||
this._attachToDefaultContext = false;
|
||||
this._eventListeners = [];
|
||||
this._createdBrowserContextIds = new Set();
|
||||
this._attachedSessions = new Map();
|
||||
this._onclose = onclose;
|
||||
this._onstart = onstart;
|
||||
}
|
||||
|
||||
async ['Browser.enable']({attachToDefaultContext}) {
|
||||
if (this._enabled)
|
||||
return;
|
||||
await this._onstart();
|
||||
this._enabled = true;
|
||||
this._attachToDefaultContext = attachToDefaultContext;
|
||||
|
||||
this._eventListeners = [
|
||||
helper.on(this._targetRegistry, TargetRegistry.Events.TargetCreated, this._onTargetCreated.bind(this)),
|
||||
helper.on(this._targetRegistry, TargetRegistry.Events.TargetDestroyed, this._onTargetDestroyed.bind(this)),
|
||||
helper.on(this._targetRegistry, TargetRegistry.Events.DownloadCreated, this._onDownloadCreated.bind(this)),
|
||||
helper.on(this._targetRegistry, TargetRegistry.Events.DownloadFinished, this._onDownloadFinished.bind(this)),
|
||||
helper.on(this._targetRegistry, TargetRegistry.Events.ScreencastStopped, sessionId => {
|
||||
this._session.emitEvent('Browser.videoRecordingFinished', {screencastId: '' + sessionId});
|
||||
})
|
||||
];
|
||||
|
||||
for (const target of this._targetRegistry.targets())
|
||||
this._onTargetCreated(target);
|
||||
}
|
||||
|
||||
async ['Browser.createBrowserContext']({removeOnDetach}) {
|
||||
if (!this._enabled)
|
||||
throw new Error('Browser domain is not enabled');
|
||||
const browserContext = this._targetRegistry.createBrowserContext(removeOnDetach);
|
||||
this._createdBrowserContextIds.add(browserContext.browserContextId);
|
||||
return {browserContextId: browserContext.browserContextId};
|
||||
}
|
||||
|
||||
async ['Browser.removeBrowserContext']({browserContextId}) {
|
||||
if (!this._enabled)
|
||||
throw new Error('Browser domain is not enabled');
|
||||
await this._targetRegistry.browserContextForId(browserContextId).destroy();
|
||||
this._createdBrowserContextIds.delete(browserContextId);
|
||||
}
|
||||
|
||||
dispose() {
|
||||
helper.removeListeners(this._eventListeners);
|
||||
for (const [target, session] of this._attachedSessions)
|
||||
this._dispatcher.destroySession(session);
|
||||
this._attachedSessions.clear();
|
||||
for (const browserContextId of this._createdBrowserContextIds) {
|
||||
const browserContext = this._targetRegistry.browserContextForId(browserContextId);
|
||||
if (browserContext.removeOnDetach)
|
||||
browserContext.destroy();
|
||||
}
|
||||
this._createdBrowserContextIds.clear();
|
||||
}
|
||||
|
||||
_shouldAttachToTarget(target) {
|
||||
if (this._createdBrowserContextIds.has(target._browserContext.browserContextId))
|
||||
return true;
|
||||
return this._attachToDefaultContext && target._browserContext === this._targetRegistry.defaultContext();
|
||||
}
|
||||
|
||||
_onTargetCreated(target) {
|
||||
if (!this._shouldAttachToTarget(target))
|
||||
return;
|
||||
const channel = target.channel();
|
||||
const session = this._dispatcher.createSession();
|
||||
this._attachedSessions.set(target, session);
|
||||
this._session.emitEvent('Browser.attachedToTarget', {
|
||||
sessionId: session.sessionId(),
|
||||
targetInfo: target.info()
|
||||
});
|
||||
session.setHandler(new PageHandler(target, session, channel));
|
||||
}
|
||||
|
||||
_onTargetDestroyed(target) {
|
||||
const session = this._attachedSessions.get(target);
|
||||
if (!session)
|
||||
return;
|
||||
this._attachedSessions.delete(target);
|
||||
this._dispatcher.destroySession(session);
|
||||
this._session.emitEvent('Browser.detachedFromTarget', {
|
||||
sessionId: session.sessionId(),
|
||||
targetId: target.id(),
|
||||
});
|
||||
}
|
||||
|
||||
_onDownloadCreated(downloadInfo) {
|
||||
this._session.emitEvent('Browser.downloadCreated', downloadInfo);
|
||||
}
|
||||
|
||||
_onDownloadFinished(downloadInfo) {
|
||||
this._session.emitEvent('Browser.downloadFinished', downloadInfo);
|
||||
}
|
||||
|
||||
async ['Browser.cancelDownload']({uuid}) {
|
||||
await this._targetRegistry.cancelDownload({uuid});
|
||||
}
|
||||
|
||||
async ['Browser.newPage']({browserContextId}) {
|
||||
const targetId = await this._targetRegistry.newPage({browserContextId});
|
||||
return {targetId};
|
||||
}
|
||||
|
||||
async ['Browser.close']() {
|
||||
let browserWindow = Services.wm.getMostRecentWindow(
|
||||
"navigator:browser"
|
||||
);
|
||||
if (browserWindow && browserWindow.gBrowserInit) {
|
||||
// idleTasksFinishedPromise does not resolve when the window
|
||||
// is closed early enough, so we race against window closure.
|
||||
await Promise.race([
|
||||
browserWindow.gBrowserInit.idleTasksFinishedPromise,
|
||||
waitForWindowClosed(browserWindow),
|
||||
]);
|
||||
}
|
||||
this._onclose();
|
||||
Services.startup.quit(Ci.nsIAppStartup.eForceQuit);
|
||||
}
|
||||
|
||||
async ['Browser.grantPermissions']({browserContextId, origin, permissions}) {
|
||||
await this._targetRegistry.browserContextForId(browserContextId).grantPermissions(origin, permissions);
|
||||
}
|
||||
|
||||
async ['Browser.resetPermissions']({browserContextId}) {
|
||||
this._targetRegistry.browserContextForId(browserContextId).resetPermissions();
|
||||
}
|
||||
|
||||
['Browser.setExtraHTTPHeaders']({browserContextId, headers}) {
|
||||
this._targetRegistry.browserContextForId(browserContextId).extraHTTPHeaders = headers;
|
||||
}
|
||||
|
||||
['Browser.setHTTPCredentials']({browserContextId, credentials}) {
|
||||
this._targetRegistry.browserContextForId(browserContextId).httpCredentials = nullToUndefined(credentials);
|
||||
}
|
||||
|
||||
async ['Browser.setBrowserProxy']({type, host, port, bypass, username, password}) {
|
||||
this._targetRegistry.setBrowserProxy({ type, host, port, bypass, username, password});
|
||||
}
|
||||
|
||||
async ['Browser.setContextProxy']({browserContextId, type, host, port, bypass, username, password}) {
|
||||
const browserContext = this._targetRegistry.browserContextForId(browserContextId);
|
||||
browserContext.setProxy({ type, host, port, bypass, username, password });
|
||||
}
|
||||
|
||||
['Browser.setRequestInterception']({browserContextId, enabled}) {
|
||||
this._targetRegistry.browserContextForId(browserContextId).requestInterceptionEnabled = enabled;
|
||||
}
|
||||
|
||||
['Browser.setIgnoreHTTPSErrors']({browserContextId, ignoreHTTPSErrors}) {
|
||||
this._targetRegistry.browserContextForId(browserContextId).setIgnoreHTTPSErrors(nullToUndefined(ignoreHTTPSErrors));
|
||||
}
|
||||
|
||||
['Browser.setDownloadOptions']({browserContextId, downloadOptions}) {
|
||||
this._targetRegistry.browserContextForId(browserContextId).downloadOptions = nullToUndefined(downloadOptions);
|
||||
}
|
||||
|
||||
async ['Browser.setGeolocationOverride']({browserContextId, geolocation}) {
|
||||
await this._targetRegistry.browserContextForId(browserContextId).applySetting('geolocation', nullToUndefined(geolocation));
|
||||
}
|
||||
|
||||
async ['Browser.setOnlineOverride']({browserContextId, override}) {
|
||||
await this._targetRegistry.browserContextForId(browserContextId).applySetting('onlineOverride', nullToUndefined(override));
|
||||
}
|
||||
|
||||
async ['Browser.setColorScheme']({browserContextId, colorScheme}) {
|
||||
await this._targetRegistry.browserContextForId(browserContextId).setColorScheme(nullToUndefined(colorScheme));
|
||||
}
|
||||
|
||||
async ['Browser.setReducedMotion']({browserContextId, reducedMotion}) {
|
||||
await this._targetRegistry.browserContextForId(browserContextId).setReducedMotion(nullToUndefined(reducedMotion));
|
||||
}
|
||||
|
||||
async ['Browser.setForcedColors']({browserContextId, forcedColors}) {
|
||||
await this._targetRegistry.browserContextForId(browserContextId).setForcedColors(nullToUndefined(forcedColors));
|
||||
}
|
||||
|
||||
async ['Browser.setVideoRecordingOptions']({browserContextId, options}) {
|
||||
await this._targetRegistry.browserContextForId(browserContextId).setVideoRecordingOptions(options);
|
||||
}
|
||||
|
||||
async ['Browser.setUserAgentOverride']({browserContextId, userAgent}) {
|
||||
await this._targetRegistry.browserContextForId(browserContextId).setDefaultUserAgent(userAgent);
|
||||
}
|
||||
|
||||
async ['Browser.setPlatformOverride']({browserContextId, platform}) {
|
||||
await this._targetRegistry.browserContextForId(browserContextId).setDefaultPlatform(platform);
|
||||
}
|
||||
|
||||
async ['Browser.setBypassCSP']({browserContextId, bypassCSP}) {
|
||||
await this._targetRegistry.browserContextForId(browserContextId).applySetting('bypassCSP', nullToUndefined(bypassCSP));
|
||||
}
|
||||
|
||||
async ['Browser.setJavaScriptDisabled']({browserContextId, javaScriptDisabled}) {
|
||||
await this._targetRegistry.browserContextForId(browserContextId).setJavaScriptDisabled(javaScriptDisabled);
|
||||
}
|
||||
|
||||
async ['Browser.setLocaleOverride']({browserContextId, locale}) {
|
||||
await this._targetRegistry.browserContextForId(browserContextId).applySetting('locale', nullToUndefined(locale));
|
||||
}
|
||||
|
||||
async ['Browser.setTimezoneOverride']({browserContextId, timezoneId}) {
|
||||
await this._targetRegistry.browserContextForId(browserContextId).applySetting('timezoneId', nullToUndefined(timezoneId));
|
||||
}
|
||||
|
||||
async ['Browser.setTouchOverride']({browserContextId, hasTouch}) {
|
||||
await this._targetRegistry.browserContextForId(browserContextId).setTouchOverride(nullToUndefined(hasTouch));
|
||||
}
|
||||
|
||||
async ['Browser.setDefaultViewport']({browserContextId, viewport}) {
|
||||
await this._targetRegistry.browserContextForId(browserContextId).setDefaultViewport(nullToUndefined(viewport));
|
||||
}
|
||||
|
||||
async ['Browser.setScrollbarsHidden']({browserContextId, hidden}) {
|
||||
await this._targetRegistry.browserContextForId(browserContextId).applySetting('scrollbarsHidden', nullToUndefined(hidden));
|
||||
}
|
||||
|
||||
async ['Browser.setInitScripts']({browserContextId, scripts}) {
|
||||
await this._targetRegistry.browserContextForId(browserContextId).setInitScripts(scripts);
|
||||
}
|
||||
|
||||
async ['Browser.addBinding']({browserContextId, worldName, name, script}) {
|
||||
await this._targetRegistry.browserContextForId(browserContextId).addBinding(worldName, name, script);
|
||||
}
|
||||
|
||||
['Browser.setCookies']({browserContextId, cookies}) {
|
||||
this._targetRegistry.browserContextForId(browserContextId).setCookies(cookies);
|
||||
}
|
||||
|
||||
['Browser.clearCookies']({browserContextId}) {
|
||||
this._targetRegistry.browserContextForId(browserContextId).clearCookies();
|
||||
}
|
||||
|
||||
['Browser.getCookies']({browserContextId}) {
|
||||
const cookies = this._targetRegistry.browserContextForId(browserContextId).getCookies();
|
||||
return {cookies};
|
||||
}
|
||||
|
||||
async ['Browser.getInfo']() {
|
||||
const version = AppConstants.MOZ_APP_VERSION_DISPLAY;
|
||||
const userAgent = Components.classes["@mozilla.org/network/protocol;1?name=http"]
|
||||
.getService(Components.interfaces.nsIHttpProtocolHandler)
|
||||
.userAgent;
|
||||
return {version: 'Firefox/' + version, userAgent};
|
||||
}
|
||||
}
|
||||
|
||||
async function waitForWindowClosed(browserWindow) {
|
||||
if (browserWindow.closed)
|
||||
return;
|
||||
await new Promise((resolve => {
|
||||
const listener = {
|
||||
onCloseWindow: window => {
|
||||
let domWindow;
|
||||
if (window instanceof Ci.nsIAppWindow)
|
||||
domWindow = window.QueryInterface(Ci.nsIInterfaceRequestor).getInterface(Ci.nsIDOMWindowInternal || Ci.nsIDOMWindow);
|
||||
else
|
||||
domWindow = window;
|
||||
if (domWindow === browserWindow) {
|
||||
Services.wm.removeListener(listener);
|
||||
resolve();
|
||||
}
|
||||
},
|
||||
};
|
||||
Services.wm.addListener(listener);
|
||||
}));
|
||||
}
|
||||
|
||||
function nullToUndefined(value) {
|
||||
return value === null ? undefined : value;
|
||||
}
|
||||
|
||||
var EXPORTED_SYMBOLS = ['BrowserHandler'];
|
||||
this.BrowserHandler = BrowserHandler;
|
|
@ -1,135 +0,0 @@
|
|||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
|
||||
const {protocol, checkScheme} = ChromeUtils.import("chrome://juggler/content/protocol/Protocol.js");
|
||||
const {Helper} = ChromeUtils.import('chrome://juggler/content/Helper.js');
|
||||
|
||||
const helper = new Helper();
|
||||
|
||||
class Dispatcher {
|
||||
/**
|
||||
* @param {Connection} connection
|
||||
*/
|
||||
constructor(connection) {
|
||||
this._connection = connection;
|
||||
this._connection.onmessage = this._dispatch.bind(this);
|
||||
this._connection.onclose = this._dispose.bind(this);
|
||||
this._sessions = new Map();
|
||||
this._rootSession = new ProtocolSession(this, undefined);
|
||||
}
|
||||
|
||||
rootSession() {
|
||||
return this._rootSession;
|
||||
}
|
||||
|
||||
createSession() {
|
||||
const session = new ProtocolSession(this, helper.generateId());
|
||||
this._sessions.set(session.sessionId(), session);
|
||||
return session;
|
||||
}
|
||||
|
||||
destroySession(session) {
|
||||
this._sessions.delete(session.sessionId());
|
||||
session._dispose();
|
||||
}
|
||||
|
||||
_dispose() {
|
||||
this._connection.onmessage = null;
|
||||
this._connection.onclose = null;
|
||||
this._rootSession._dispose();
|
||||
this._rootSession = null;
|
||||
this._sessions.clear();
|
||||
}
|
||||
|
||||
async _dispatch(event) {
|
||||
const data = JSON.parse(event.data);
|
||||
const id = data.id;
|
||||
const sessionId = data.sessionId;
|
||||
delete data.sessionId;
|
||||
try {
|
||||
const session = sessionId ? this._sessions.get(sessionId) : this._rootSession;
|
||||
if (!session)
|
||||
throw new Error(`ERROR: cannot find session with id "${sessionId}"`);
|
||||
const method = data.method;
|
||||
const params = data.params || {};
|
||||
if (!id)
|
||||
throw new Error(`ERROR: every message must have an 'id' parameter`);
|
||||
if (!method)
|
||||
throw new Error(`ERROR: every message must have a 'method' parameter`);
|
||||
|
||||
const [domain, methodName] = method.split('.');
|
||||
const descriptor = protocol.domains[domain] ? protocol.domains[domain].methods[methodName] : null;
|
||||
if (!descriptor)
|
||||
throw new Error(`ERROR: method '${method}' is not supported`);
|
||||
let details = {};
|
||||
if (!checkScheme(descriptor.params || {}, params, details))
|
||||
throw new Error(`ERROR: failed to call method '${method}' with parameters ${JSON.stringify(params, null, 2)}\n${details.error}`);
|
||||
|
||||
const result = await session.dispatch(method, params);
|
||||
|
||||
details = {};
|
||||
if ((descriptor.returns || result) && !checkScheme(descriptor.returns, result, details))
|
||||
throw new Error(`ERROR: failed to dispatch method '${method}' result ${JSON.stringify(result, null, 2)}\n${details.error}`);
|
||||
|
||||
this._connection.send(JSON.stringify({id, sessionId, result}));
|
||||
} catch (e) {
|
||||
this._connection.send(JSON.stringify({id, sessionId, error: {
|
||||
message: e.message,
|
||||
data: e.stack
|
||||
}}));
|
||||
}
|
||||
}
|
||||
|
||||
_emitEvent(sessionId, eventName, params) {
|
||||
const [domain, eName] = eventName.split('.');
|
||||
const scheme = protocol.domains[domain] ? protocol.domains[domain].events[eName] : null;
|
||||
if (!scheme)
|
||||
throw new Error(`ERROR: event '${eventName}' is not supported`);
|
||||
const details = {};
|
||||
if (!checkScheme(scheme, params || {}, details))
|
||||
throw new Error(`ERROR: failed to emit event '${eventName}' ${JSON.stringify(params, null, 2)}\n${details.error}`);
|
||||
this._connection.send(JSON.stringify({method: eventName, params, sessionId}));
|
||||
}
|
||||
}
|
||||
|
||||
class ProtocolSession {
|
||||
constructor(dispatcher, sessionId) {
|
||||
this._sessionId = sessionId;
|
||||
this._dispatcher = dispatcher;
|
||||
this._handler = null;
|
||||
}
|
||||
|
||||
sessionId() {
|
||||
return this._sessionId;
|
||||
}
|
||||
|
||||
setHandler(handler) {
|
||||
this._handler = handler;
|
||||
}
|
||||
|
||||
_dispose() {
|
||||
if (this._handler)
|
||||
this._handler.dispose();
|
||||
this._handler = null;
|
||||
this._dispatcher = null;
|
||||
}
|
||||
|
||||
emitEvent(eventName, params) {
|
||||
if (!this._dispatcher)
|
||||
throw new Error(`Session has been disposed.`);
|
||||
this._dispatcher._emitEvent(this._sessionId, eventName, params);
|
||||
}
|
||||
|
||||
async dispatch(method, params) {
|
||||
if (!this._handler)
|
||||
throw new Error(`Session does not have a handler!`);
|
||||
if (!this._handler[method])
|
||||
throw new Error(`Handler for does not implement method "${method}"`);
|
||||
return await this._handler[method](params);
|
||||
}
|
||||
}
|
||||
|
||||
this.EXPORTED_SYMBOLS = ['Dispatcher'];
|
||||
this.Dispatcher = Dispatcher;
|
||||
|
|
@ -1,466 +0,0 @@
|
|||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
|
||||
"use strict";
|
||||
|
||||
const {Helper} = ChromeUtils.import('chrome://juggler/content/Helper.js');
|
||||
const {Services} = ChromeUtils.import("resource://gre/modules/Services.jsm");
|
||||
const {NetworkObserver, PageNetwork} = ChromeUtils.import('chrome://juggler/content/NetworkObserver.js');
|
||||
const {PageTarget} = ChromeUtils.import('chrome://juggler/content/TargetRegistry.js');
|
||||
const {setTimeout} = ChromeUtils.import('resource://gre/modules/Timer.jsm');
|
||||
|
||||
const Cc = Components.classes;
|
||||
const Ci = Components.interfaces;
|
||||
const Cu = Components.utils;
|
||||
const XUL_NS = 'http://www.mozilla.org/keymaster/gatekeeper/there.is.only.xul';
|
||||
const helper = new Helper();
|
||||
|
||||
function hashConsoleMessage(params) {
|
||||
return params.location.lineNumber + ':' + params.location.columnNumber + ':' + params.location.url;
|
||||
}
|
||||
|
||||
class WorkerHandler {
|
||||
constructor(session, contentChannel, workerId) {
|
||||
this._session = session;
|
||||
this._contentWorker = contentChannel.connect(workerId);
|
||||
this._workerConsoleMessages = new Set();
|
||||
this._workerId = workerId;
|
||||
|
||||
const emitWrappedProtocolEvent = eventName => {
|
||||
return params => {
|
||||
this._session.emitEvent('Page.dispatchMessageFromWorker', {
|
||||
workerId,
|
||||
message: JSON.stringify({method: eventName, params}),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
this._eventListeners = [
|
||||
contentChannel.register(workerId, {
|
||||
runtimeConsole: (params) => {
|
||||
this._workerConsoleMessages.add(hashConsoleMessage(params));
|
||||
emitWrappedProtocolEvent('Runtime.console')(params);
|
||||
},
|
||||
runtimeExecutionContextCreated: emitWrappedProtocolEvent('Runtime.executionContextCreated'),
|
||||
runtimeExecutionContextDestroyed: emitWrappedProtocolEvent('Runtime.executionContextDestroyed'),
|
||||
}),
|
||||
];
|
||||
}
|
||||
|
||||
async sendMessage(message) {
|
||||
const [domain, method] = message.method.split('.');
|
||||
if (domain !== 'Runtime')
|
||||
throw new Error('ERROR: can only dispatch to Runtime domain inside worker');
|
||||
const result = await this._contentWorker.send(method, message.params);
|
||||
this._session.emitEvent('Page.dispatchMessageFromWorker', {
|
||||
workerId: this._workerId,
|
||||
message: JSON.stringify({result, id: message.id}),
|
||||
});
|
||||
}
|
||||
|
||||
dispose() {
|
||||
this._contentWorker.dispose();
|
||||
helper.removeListeners(this._eventListeners);
|
||||
}
|
||||
}
|
||||
|
||||
class PageHandler {
|
||||
constructor(target, session, contentChannel) {
|
||||
this._session = session;
|
||||
this._contentChannel = contentChannel;
|
||||
this._contentPage = contentChannel.connect('page');
|
||||
this._workers = new Map();
|
||||
|
||||
this._pageTarget = target;
|
||||
this._pageNetwork = PageNetwork.forPageTarget(target);
|
||||
|
||||
const emitProtocolEvent = eventName => {
|
||||
return (...args) => this._session.emitEvent(eventName, ...args);
|
||||
}
|
||||
|
||||
this._reportedFrameIds = new Set();
|
||||
this._networkEventsForUnreportedFrameIds = new Map();
|
||||
|
||||
// `Page.ready` protocol event is emitted whenever page has completed initialization, e.g.
|
||||
// finished all the transient navigations to the `about:blank`.
|
||||
//
|
||||
// We'd like to avoid reporting meaningful events before the `Page.ready` since they are likely
|
||||
// to be ignored by the protocol clients.
|
||||
this._isPageReady = false;
|
||||
|
||||
if (this._pageTarget.videoRecordingInfo())
|
||||
this._onVideoRecordingStarted();
|
||||
|
||||
this._eventListeners = [
|
||||
helper.on(this._pageTarget, PageTarget.Events.DialogOpened, this._onDialogOpened.bind(this)),
|
||||
helper.on(this._pageTarget, PageTarget.Events.DialogClosed, this._onDialogClosed.bind(this)),
|
||||
helper.on(this._pageTarget, PageTarget.Events.Crashed, () => {
|
||||
this._session.emitEvent('Page.crashed', {});
|
||||
}),
|
||||
helper.on(this._pageTarget, PageTarget.Events.ScreencastStarted, this._onVideoRecordingStarted.bind(this)),
|
||||
helper.on(this._pageTarget, PageTarget.Events.ScreencastFrame, this._onScreencastFrame.bind(this)),
|
||||
helper.on(this._pageNetwork, PageNetwork.Events.Request, this._handleNetworkEvent.bind(this, 'Network.requestWillBeSent')),
|
||||
helper.on(this._pageNetwork, PageNetwork.Events.Response, this._handleNetworkEvent.bind(this, 'Network.responseReceived')),
|
||||
helper.on(this._pageNetwork, PageNetwork.Events.RequestFinished, this._handleNetworkEvent.bind(this, 'Network.requestFinished')),
|
||||
helper.on(this._pageNetwork, PageNetwork.Events.RequestFailed, this._handleNetworkEvent.bind(this, 'Network.requestFailed')),
|
||||
contentChannel.register('page', {
|
||||
pageBindingCalled: emitProtocolEvent('Page.bindingCalled'),
|
||||
pageDispatchMessageFromWorker: emitProtocolEvent('Page.dispatchMessageFromWorker'),
|
||||
pageEventFired: emitProtocolEvent('Page.eventFired'),
|
||||
pageFileChooserOpened: emitProtocolEvent('Page.fileChooserOpened'),
|
||||
pageFrameAttached: this._onFrameAttached.bind(this),
|
||||
pageFrameDetached: emitProtocolEvent('Page.frameDetached'),
|
||||
pageLinkClicked: emitProtocolEvent('Page.linkClicked'),
|
||||
pageWillOpenNewWindowAsynchronously: emitProtocolEvent('Page.willOpenNewWindowAsynchronously'),
|
||||
pageNavigationAborted: emitProtocolEvent('Page.navigationAborted'),
|
||||
pageNavigationCommitted: emitProtocolEvent('Page.navigationCommitted'),
|
||||
pageNavigationStarted: emitProtocolEvent('Page.navigationStarted'),
|
||||
pageReady: this._onPageReady.bind(this),
|
||||
pageSameDocumentNavigation: emitProtocolEvent('Page.sameDocumentNavigation'),
|
||||
pageUncaughtError: emitProtocolEvent('Page.uncaughtError'),
|
||||
pageWorkerCreated: this._onWorkerCreated.bind(this),
|
||||
pageWorkerDestroyed: this._onWorkerDestroyed.bind(this),
|
||||
runtimeConsole: params => {
|
||||
const consoleMessageHash = hashConsoleMessage(params);
|
||||
for (const worker of this._workers.values()) {
|
||||
if (worker._workerConsoleMessages.has(consoleMessageHash)) {
|
||||
worker._workerConsoleMessages.delete(consoleMessageHash);
|
||||
return;
|
||||
}
|
||||
}
|
||||
emitProtocolEvent('Runtime.console')(params);
|
||||
},
|
||||
runtimeExecutionContextCreated: emitProtocolEvent('Runtime.executionContextCreated'),
|
||||
runtimeExecutionContextDestroyed: emitProtocolEvent('Runtime.executionContextDestroyed'),
|
||||
|
||||
webSocketCreated: emitProtocolEvent('Page.webSocketCreated'),
|
||||
webSocketOpened: emitProtocolEvent('Page.webSocketOpened'),
|
||||
webSocketClosed: emitProtocolEvent('Page.webSocketClosed'),
|
||||
webSocketFrameReceived: emitProtocolEvent('Page.webSocketFrameReceived'),
|
||||
webSocketFrameSent: emitProtocolEvent('Page.webSocketFrameSent'),
|
||||
}),
|
||||
];
|
||||
}
|
||||
|
||||
async dispose() {
|
||||
this._contentPage.dispose();
|
||||
helper.removeListeners(this._eventListeners);
|
||||
}
|
||||
|
||||
_onVideoRecordingStarted() {
|
||||
const info = this._pageTarget.videoRecordingInfo();
|
||||
this._session.emitEvent('Page.videoRecordingStarted', { screencastId: info.sessionId, file: info.file });
|
||||
}
|
||||
|
||||
_onScreencastFrame(params) {
|
||||
this._session.emitEvent('Page.screencastFrame', params);
|
||||
}
|
||||
|
||||
_onPageReady(event) {
|
||||
this._isPageReady = true;
|
||||
this._session.emitEvent('Page.ready');
|
||||
for (const dialog of this._pageTarget.dialogs())
|
||||
this._onDialogOpened(dialog);
|
||||
}
|
||||
|
||||
_onDialogOpened(dialog) {
|
||||
if (!this._isPageReady)
|
||||
return;
|
||||
this._session.emitEvent('Page.dialogOpened', {
|
||||
dialogId: dialog.id(),
|
||||
type: dialog.type(),
|
||||
message: dialog.message(),
|
||||
defaultValue: dialog.defaultValue(),
|
||||
});
|
||||
}
|
||||
|
||||
_onDialogClosed(dialog) {
|
||||
if (!this._isPageReady)
|
||||
return;
|
||||
this._session.emitEvent('Page.dialogClosed', { dialogId: dialog.id(), });
|
||||
}
|
||||
|
||||
_onWorkerCreated({workerId, frameId, url}) {
|
||||
const worker = new WorkerHandler(this._session, this._contentChannel, workerId);
|
||||
this._workers.set(workerId, worker);
|
||||
this._session.emitEvent('Page.workerCreated', {workerId, frameId, url});
|
||||
}
|
||||
|
||||
_onWorkerDestroyed({workerId}) {
|
||||
const worker = this._workers.get(workerId);
|
||||
if (!worker)
|
||||
return;
|
||||
this._workers.delete(workerId);
|
||||
worker.dispose();
|
||||
this._session.emitEvent('Page.workerDestroyed', {workerId});
|
||||
}
|
||||
|
||||
_handleNetworkEvent(protocolEventName, eventDetails, frameId) {
|
||||
if (!this._reportedFrameIds.has(frameId)) {
|
||||
let events = this._networkEventsForUnreportedFrameIds.get(frameId);
|
||||
if (!events) {
|
||||
events = [];
|
||||
this._networkEventsForUnreportedFrameIds.set(frameId, events);
|
||||
}
|
||||
events.push({eventName: protocolEventName, eventDetails});
|
||||
} else {
|
||||
this._session.emitEvent(protocolEventName, eventDetails);
|
||||
}
|
||||
}
|
||||
|
||||
_onFrameAttached({frameId, parentFrameId}) {
|
||||
this._session.emitEvent('Page.frameAttached', {frameId, parentFrameId});
|
||||
this._reportedFrameIds.add(frameId);
|
||||
const events = this._networkEventsForUnreportedFrameIds.get(frameId) || [];
|
||||
this._networkEventsForUnreportedFrameIds.delete(frameId);
|
||||
for (const {eventName, eventDetails} of events)
|
||||
this._session.emitEvent(eventName, eventDetails);
|
||||
}
|
||||
|
||||
async ['Page.close']({runBeforeUnload}) {
|
||||
// Postpone target close to deliver response in session.
|
||||
Services.tm.dispatchToMainThread(() => {
|
||||
this._pageTarget.close(runBeforeUnload);
|
||||
});
|
||||
}
|
||||
|
||||
async ['Page.setViewportSize']({viewportSize}) {
|
||||
await this._pageTarget.setViewportSize(viewportSize === null ? undefined : viewportSize);
|
||||
}
|
||||
|
||||
async ['Runtime.evaluate'](options) {
|
||||
return await this._contentPage.send('evaluate', options);
|
||||
}
|
||||
|
||||
async ['Runtime.callFunction'](options) {
|
||||
return await this._contentPage.send('callFunction', options);
|
||||
}
|
||||
|
||||
async ['Runtime.getObjectProperties'](options) {
|
||||
return await this._contentPage.send('getObjectProperties', options);
|
||||
}
|
||||
|
||||
async ['Runtime.disposeObject'](options) {
|
||||
return await this._contentPage.send('disposeObject', options);
|
||||
}
|
||||
|
||||
async ['Network.getResponseBody']({requestId}) {
|
||||
return this._pageNetwork.getResponseBody(requestId);
|
||||
}
|
||||
|
||||
async ['Network.setExtraHTTPHeaders']({headers}) {
|
||||
this._pageNetwork.setExtraHTTPHeaders(headers);
|
||||
}
|
||||
|
||||
async ['Network.setRequestInterception']({enabled}) {
|
||||
if (enabled)
|
||||
this._pageNetwork.enableRequestInterception();
|
||||
else
|
||||
this._pageNetwork.disableRequestInterception();
|
||||
}
|
||||
|
||||
async ['Network.resumeInterceptedRequest']({requestId, url, method, headers, postData}) {
|
||||
this._pageNetwork.resumeInterceptedRequest(requestId, url, method, headers, postData);
|
||||
}
|
||||
|
||||
async ['Network.abortInterceptedRequest']({requestId, errorCode}) {
|
||||
this._pageNetwork.abortInterceptedRequest(requestId, errorCode);
|
||||
}
|
||||
|
||||
async ['Network.fulfillInterceptedRequest']({requestId, status, statusText, headers, base64body}) {
|
||||
this._pageNetwork.fulfillInterceptedRequest(requestId, status, statusText, headers, base64body);
|
||||
}
|
||||
|
||||
async ['Accessibility.getFullAXTree'](params) {
|
||||
return await this._contentPage.send('getFullAXTree', params);
|
||||
}
|
||||
|
||||
async ['Page.setFileInputFiles'](options) {
|
||||
return await this._contentPage.send('setFileInputFiles', options);
|
||||
}
|
||||
|
||||
async ['Page.setEmulatedMedia']({colorScheme, type, reducedMotion, forcedColors}) {
|
||||
this._pageTarget.setColorScheme(colorScheme || null);
|
||||
this._pageTarget.setReducedMotion(reducedMotion || null);
|
||||
this._pageTarget.setForcedColors(forcedColors || null);
|
||||
this._pageTarget.setEmulatedMedia(type);
|
||||
}
|
||||
|
||||
async ['Page.bringToFront'](options) {
|
||||
this._pageTarget._window.focus();
|
||||
}
|
||||
|
||||
async ['Page.setCacheDisabled'](options) {
|
||||
return await this._contentPage.send('setCacheDisabled', options);
|
||||
}
|
||||
|
||||
async ['Page.addBinding'](options) {
|
||||
return await this._contentPage.send('addBinding', options);
|
||||
}
|
||||
|
||||
async ['Page.adoptNode'](options) {
|
||||
return await this._contentPage.send('adoptNode', options);
|
||||
}
|
||||
|
||||
async ['Page.screenshot']({ mimeType, clip, omitDeviceScaleFactor }) {
|
||||
const rect = new DOMRect(clip.x, clip.y, clip.width, clip.height);
|
||||
|
||||
const browsingContext = this._pageTarget.linkedBrowser().browsingContext;
|
||||
// `win.devicePixelRatio` returns a non-overriden value to priveleged code.
|
||||
// See https://bugzilla.mozilla.org/show_bug.cgi?id=1761032
|
||||
// See https://phabricator.services.mozilla.com/D141323
|
||||
const devicePixelRatio = browsingContext.overrideDPPX || this._pageTarget._window.devicePixelRatio;
|
||||
const scale = omitDeviceScaleFactor ? 1 : devicePixelRatio;
|
||||
const canvasWidth = rect.width * scale;
|
||||
const canvasHeight = rect.height * scale;
|
||||
|
||||
const MAX_CANVAS_DIMENSIONS = 32767;
|
||||
const MAX_CANVAS_AREA = 472907776;
|
||||
if (canvasWidth > MAX_CANVAS_DIMENSIONS || canvasHeight > MAX_CANVAS_DIMENSIONS)
|
||||
throw new Error('Cannot take screenshot larger than ' + MAX_CANVAS_DIMENSIONS);
|
||||
if (canvasWidth * canvasHeight > MAX_CANVAS_AREA)
|
||||
throw new Error('Cannot take screenshot with more than ' + MAX_CANVAS_AREA + ' pixels');
|
||||
|
||||
let snapshot;
|
||||
while (!snapshot) {
|
||||
try {
|
||||
//TODO(fission): browsingContext will change in case of cross-group navigation.
|
||||
snapshot = await browsingContext.currentWindowGlobal.drawSnapshot(
|
||||
rect,
|
||||
scale,
|
||||
"rgb(255,255,255)"
|
||||
);
|
||||
} catch (e) {
|
||||
// The currentWindowGlobal.drawSnapshot might throw
|
||||
// NS_ERROR_LOSS_OF_SIGNIFICANT_DATA if called during navigation.
|
||||
// wait a little and re-try.
|
||||
await new Promise(x => setTimeout(x, 50));
|
||||
}
|
||||
}
|
||||
|
||||
const win = browsingContext.topChromeWindow.ownerGlobal;
|
||||
const canvas = win.document.createElementNS('http://www.w3.org/1999/xhtml', 'canvas');
|
||||
canvas.width = canvasWidth;
|
||||
canvas.height = canvasHeight;
|
||||
let ctx = canvas.getContext('2d');
|
||||
ctx.drawImage(snapshot, 0, 0);
|
||||
snapshot.close();
|
||||
const dataURL = canvas.toDataURL(mimeType);
|
||||
return { data: dataURL.substring(dataURL.indexOf(',') + 1) };
|
||||
}
|
||||
|
||||
async ['Page.getContentQuads'](options) {
|
||||
return await this._contentPage.send('getContentQuads', options);
|
||||
}
|
||||
|
||||
async ['Page.navigate'](options) {
|
||||
return await this._contentPage.send('navigate', options);
|
||||
}
|
||||
|
||||
async ['Page.goBack'](options) {
|
||||
return await this._contentPage.send('goBack', options);
|
||||
}
|
||||
|
||||
async ['Page.goForward'](options) {
|
||||
return await this._contentPage.send('goForward', options);
|
||||
}
|
||||
|
||||
async ['Page.reload'](options) {
|
||||
return await this._contentPage.send('reload', options);
|
||||
}
|
||||
|
||||
async ['Page.describeNode'](options) {
|
||||
return await this._contentPage.send('describeNode', options);
|
||||
}
|
||||
|
||||
async ['Page.scrollIntoViewIfNeeded'](options) {
|
||||
return await this._contentPage.send('scrollIntoViewIfNeeded', options);
|
||||
}
|
||||
|
||||
async ['Page.setInitScripts']({ scripts }) {
|
||||
return await this._pageTarget.setInitScripts(scripts);
|
||||
}
|
||||
|
||||
async ['Page.dispatchKeyEvent'](options) {
|
||||
return await this._contentPage.send('dispatchKeyEvent', options);
|
||||
}
|
||||
|
||||
async ['Page.dispatchTouchEvent'](options) {
|
||||
return await this._contentPage.send('dispatchTouchEvent', options);
|
||||
}
|
||||
|
||||
async ['Page.dispatchTapEvent'](options) {
|
||||
return await this._contentPage.send('dispatchTapEvent', options);
|
||||
}
|
||||
|
||||
async ['Page.dispatchMouseEvent'](options) {
|
||||
return await this._contentPage.send('dispatchMouseEvent', options);
|
||||
}
|
||||
|
||||
async ['Page.dispatchWheelEvent']({x, y, button, deltaX, deltaY, deltaZ, modifiers }) {
|
||||
const boundingBox = this._pageTarget._linkedBrowser.getBoundingClientRect();
|
||||
x += boundingBox.left;
|
||||
y += boundingBox.top;
|
||||
const deltaMode = 0; // WheelEvent.DOM_DELTA_PIXEL
|
||||
const lineOrPageDeltaX = deltaX > 0 ? Math.floor(deltaX) : Math.ceil(deltaX);
|
||||
const lineOrPageDeltaY = deltaY > 0 ? Math.floor(deltaY) : Math.ceil(deltaY);
|
||||
|
||||
const win = this._pageTarget._window;
|
||||
win.windowUtils.sendWheelEvent(
|
||||
x,
|
||||
y,
|
||||
deltaX,
|
||||
deltaY,
|
||||
deltaZ,
|
||||
deltaMode,
|
||||
modifiers,
|
||||
lineOrPageDeltaX,
|
||||
lineOrPageDeltaY,
|
||||
0 /* options */);
|
||||
}
|
||||
|
||||
async ['Page.insertText'](options) {
|
||||
return await this._contentPage.send('insertText', options);
|
||||
}
|
||||
|
||||
async ['Page.crash'](options) {
|
||||
return await this._contentPage.send('crash', options);
|
||||
}
|
||||
|
||||
async ['Page.handleDialog']({dialogId, accept, promptText}) {
|
||||
const dialog = this._pageTarget.dialog(dialogId);
|
||||
if (!dialog)
|
||||
throw new Error('Failed to find dialog with id = ' + dialogId);
|
||||
if (accept)
|
||||
dialog.accept(promptText);
|
||||
else
|
||||
dialog.dismiss();
|
||||
}
|
||||
|
||||
async ['Page.setInterceptFileChooserDialog'](options) {
|
||||
return await this._contentPage.send('setInterceptFileChooserDialog', options);
|
||||
}
|
||||
|
||||
async ['Page.startScreencast'](options) {
|
||||
return await this._pageTarget.startScreencast(options);
|
||||
}
|
||||
|
||||
async ['Page.screencastFrameAck'](options) {
|
||||
await this._pageTarget.screencastFrameAck(options);
|
||||
}
|
||||
|
||||
async ['Page.stopScreencast'](options) {
|
||||
await this._pageTarget.stopScreencast(options);
|
||||
}
|
||||
|
||||
async ['Page.sendMessageToWorker']({workerId, message}) {
|
||||
const worker = this._workers.get(workerId);
|
||||
if (!worker)
|
||||
throw new Error('ERROR: cannot find worker with id ' + workerId);
|
||||
return await worker.sendMessage(JSON.parse(message));
|
||||
}
|
||||
}
|
||||
|
||||
var EXPORTED_SYMBOLS = ['PageHandler'];
|
||||
this.PageHandler = PageHandler;
|
|
@ -1,147 +0,0 @@
|
|||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
|
||||
const t = {};
|
||||
|
||||
t.String = function(x, details = {}, path = ['<root>']) {
|
||||
if (typeof x === 'string' || typeof x === 'String')
|
||||
return true;
|
||||
details.error = `Expected "${path.join('.')}" to be |string|; found |${typeof x}| \`${JSON.stringify(x)}\` instead.`;
|
||||
return false;
|
||||
}
|
||||
|
||||
t.Number = function(x, details = {}, path = ['<root>']) {
|
||||
if (typeof x === 'number')
|
||||
return true;
|
||||
details.error = `Expected "${path.join('.')}" to be |number|; found |${typeof x}| \`${JSON.stringify(x)}\` instead.`;
|
||||
return false;
|
||||
}
|
||||
|
||||
t.Boolean = function(x, details = {}, path = ['<root>']) {
|
||||
if (typeof x === 'boolean')
|
||||
return true;
|
||||
details.error = `Expected "${path.join('.')}" to be |boolean|; found |${typeof x}| \`${JSON.stringify(x)}\` instead.`;
|
||||
return false;
|
||||
}
|
||||
|
||||
t.Null = function(x, details = {}, path = ['<root>']) {
|
||||
if (Object.is(x, null))
|
||||
return true;
|
||||
details.error = `Expected "${path.join('.')}" to be \`null\`; found \`${JSON.stringify(x)}\` instead.`;
|
||||
return false;
|
||||
}
|
||||
|
||||
t.Undefined = function(x, details = {}, path = ['<root>']) {
|
||||
if (Object.is(x, undefined))
|
||||
return true;
|
||||
details.error = `Expected "${path.join('.')}" to be \`undefined\`; found \`${JSON.stringify(x)}\` instead.`;
|
||||
return false;
|
||||
}
|
||||
|
||||
t.Any = x => true,
|
||||
|
||||
t.Enum = function(values) {
|
||||
return function(x, details = {}, path = ['<root>']) {
|
||||
if (values.indexOf(x) !== -1)
|
||||
return true;
|
||||
details.error = `Expected "${path.join('.')}" to be one of [${values.join(', ')}]; found \`${JSON.stringify(x)}\` (${typeof x}) instead.`;
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
t.Nullable = function(scheme) {
|
||||
return function(x, details = {}, path = ['<root>']) {
|
||||
if (Object.is(x, null))
|
||||
return true;
|
||||
return checkScheme(scheme, x, details, path);
|
||||
}
|
||||
}
|
||||
|
||||
t.Optional = function(scheme) {
|
||||
return function(x, details = {}, path = ['<root>']) {
|
||||
if (Object.is(x, undefined))
|
||||
return true;
|
||||
return checkScheme(scheme, x, details, path);
|
||||
}
|
||||
}
|
||||
|
||||
t.Array = function(scheme) {
|
||||
return function(x, details = {}, path = ['<root>']) {
|
||||
if (!Array.isArray(x)) {
|
||||
details.error = `Expected "${path.join('.')}" to be an array; found \`${JSON.stringify(x)}\` (${typeof x}) instead.`;
|
||||
return false;
|
||||
}
|
||||
const lastPathElement = path[path.length - 1];
|
||||
for (let i = 0; i < x.length; ++i) {
|
||||
path[path.length - 1] = lastPathElement + `[${i}]`;
|
||||
if (!checkScheme(scheme, x[i], details, path))
|
||||
return false;
|
||||
}
|
||||
path[path.length - 1] = lastPathElement;
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
t.Recursive = function(types, schemeName) {
|
||||
return function(x, details = {}, path = ['<root>']) {
|
||||
const scheme = types[schemeName];
|
||||
return checkScheme(scheme, x, details, path);
|
||||
}
|
||||
}
|
||||
|
||||
function beauty(path, obj) {
|
||||
if (path.length === 1)
|
||||
return `object ${JSON.stringify(obj, null, 2)}`;
|
||||
return `property "${path.join('.')}" - ${JSON.stringify(obj, null, 2)}`;
|
||||
}
|
||||
|
||||
function checkScheme(scheme, x, details = {}, path = ['<root>']) {
|
||||
if (!scheme)
|
||||
throw new Error(`ILLDEFINED SCHEME: ${path.join('.')}`);
|
||||
if (typeof scheme === 'object') {
|
||||
if (!x) {
|
||||
details.error = `Object "${path.join('.')}" is undefined, but has some scheme`;
|
||||
return false;
|
||||
}
|
||||
for (const [propertyName, aScheme] of Object.entries(scheme)) {
|
||||
path.push(propertyName);
|
||||
const result = checkScheme(aScheme, x[propertyName], details, path);
|
||||
path.pop();
|
||||
if (!result)
|
||||
return false;
|
||||
}
|
||||
for (const propertyName of Object.keys(x)) {
|
||||
if (!scheme[propertyName]) {
|
||||
path.push(propertyName);
|
||||
details.error = `Found ${beauty(path, x[propertyName])} which is not described in this scheme`;
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
return scheme(x, details, path);
|
||||
}
|
||||
|
||||
/*
|
||||
|
||||
function test(scheme, obj) {
|
||||
const details = {};
|
||||
if (!checkScheme(scheme, obj, details)) {
|
||||
dump(`FAILED: ${JSON.stringify(obj)}
|
||||
details.error: ${details.error}
|
||||
`);
|
||||
} else {
|
||||
dump(`SUCCESS: ${JSON.stringify(obj)}
|
||||
`);
|
||||
}
|
||||
}
|
||||
|
||||
test(t.Array(t.String), ['a', 'b', 2, 'c']);
|
||||
test(t.Either(t.String, t.Number), {});
|
||||
|
||||
*/
|
||||
|
||||
this.t = t;
|
||||
this.checkScheme = checkScheme;
|
||||
this.EXPORTED_SYMBOLS = ['t', 'checkScheme'];
|
|
@ -1,993 +0,0 @@
|
|||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
|
||||
const {t, checkScheme} = ChromeUtils.import('chrome://juggler/content/protocol/PrimitiveTypes.js');
|
||||
|
||||
// Protocol-specific types.
|
||||
const browserTypes = {};
|
||||
|
||||
browserTypes.TargetInfo = {
|
||||
type: t.Enum(['page']),
|
||||
targetId: t.String,
|
||||
browserContextId: t.Optional(t.String),
|
||||
// PageId of parent tab, if any.
|
||||
openerId: t.Optional(t.String),
|
||||
};
|
||||
|
||||
browserTypes.CookieOptions = {
|
||||
name: t.String,
|
||||
value: t.String,
|
||||
url: t.Optional(t.String),
|
||||
domain: t.Optional(t.String),
|
||||
path: t.Optional(t.String),
|
||||
secure: t.Optional(t.Boolean),
|
||||
httpOnly: t.Optional(t.Boolean),
|
||||
sameSite: t.Optional(t.Enum(['Strict', 'Lax', 'None'])),
|
||||
expires: t.Optional(t.Number),
|
||||
};
|
||||
|
||||
browserTypes.Cookie = {
|
||||
name: t.String,
|
||||
domain: t.String,
|
||||
path: t.String,
|
||||
value: t.String,
|
||||
expires: t.Number,
|
||||
size: t.Number,
|
||||
httpOnly: t.Boolean,
|
||||
secure: t.Boolean,
|
||||
session: t.Boolean,
|
||||
sameSite: t.Enum(['Strict', 'Lax', 'None']),
|
||||
};
|
||||
|
||||
browserTypes.Geolocation = {
|
||||
latitude: t.Number,
|
||||
longitude: t.Number,
|
||||
accuracy: t.Optional(t.Number),
|
||||
};
|
||||
|
||||
browserTypes.DownloadOptions = {
|
||||
behavior: t.Optional(t.Enum(['saveToDisk', 'cancel'])),
|
||||
downloadsDir: t.Optional(t.String),
|
||||
};
|
||||
|
||||
const pageTypes = {};
|
||||
pageTypes.DOMPoint = {
|
||||
x: t.Number,
|
||||
y: t.Number,
|
||||
};
|
||||
|
||||
pageTypes.Rect = {
|
||||
x: t.Number,
|
||||
y: t.Number,
|
||||
width: t.Number,
|
||||
height: t.Number,
|
||||
};
|
||||
|
||||
pageTypes.Size = {
|
||||
width: t.Number,
|
||||
height: t.Number,
|
||||
};
|
||||
|
||||
pageTypes.Viewport = {
|
||||
viewportSize: pageTypes.Size,
|
||||
deviceScaleFactor: t.Optional(t.Number),
|
||||
};
|
||||
|
||||
pageTypes.DOMQuad = {
|
||||
p1: pageTypes.DOMPoint,
|
||||
p2: pageTypes.DOMPoint,
|
||||
p3: pageTypes.DOMPoint,
|
||||
p4: pageTypes.DOMPoint,
|
||||
};
|
||||
|
||||
pageTypes.TouchPoint = {
|
||||
x: t.Number,
|
||||
y: t.Number,
|
||||
radiusX: t.Optional(t.Number),
|
||||
radiusY: t.Optional(t.Number),
|
||||
rotationAngle: t.Optional(t.Number),
|
||||
force: t.Optional(t.Number),
|
||||
};
|
||||
|
||||
pageTypes.Clip = {
|
||||
x: t.Number,
|
||||
y: t.Number,
|
||||
width: t.Number,
|
||||
height: t.Number,
|
||||
};
|
||||
|
||||
pageTypes.InitScript = {
|
||||
script: t.String,
|
||||
worldName: t.Optional(t.String),
|
||||
};
|
||||
|
||||
const runtimeTypes = {};
|
||||
runtimeTypes.RemoteObject = {
|
||||
type: t.Optional(t.Enum(['object', 'function', 'undefined', 'string', 'number', 'boolean', 'symbol', 'bigint'])),
|
||||
subtype: t.Optional(t.Enum(['array', 'null', 'node', 'regexp', 'date', 'map', 'set', 'weakmap', 'weakset', 'error', 'proxy', 'promise', 'typedarray'])),
|
||||
objectId: t.Optional(t.String),
|
||||
unserializableValue: t.Optional(t.Enum(['Infinity', '-Infinity', '-0', 'NaN'])),
|
||||
value: t.Any
|
||||
};
|
||||
|
||||
runtimeTypes.ObjectProperty = {
|
||||
name: t.String,
|
||||
value: runtimeTypes.RemoteObject,
|
||||
};
|
||||
|
||||
runtimeTypes.ScriptLocation = {
|
||||
columnNumber: t.Number,
|
||||
lineNumber: t.Number,
|
||||
url: t.String,
|
||||
};
|
||||
|
||||
runtimeTypes.ExceptionDetails = {
|
||||
text: t.Optional(t.String),
|
||||
stack: t.Optional(t.String),
|
||||
value: t.Optional(t.Any),
|
||||
};
|
||||
|
||||
runtimeTypes.CallFunctionArgument = {
|
||||
objectId: t.Optional(t.String),
|
||||
unserializableValue: t.Optional(t.Enum(['Infinity', '-Infinity', '-0', 'NaN'])),
|
||||
value: t.Any,
|
||||
};
|
||||
|
||||
runtimeTypes.AuxData = {
|
||||
frameId: t.Optional(t.String),
|
||||
name: t.Optional(t.String),
|
||||
};
|
||||
|
||||
const axTypes = {};
|
||||
axTypes.AXTree = {
|
||||
role: t.String,
|
||||
name: t.String,
|
||||
children: t.Optional(t.Array(t.Recursive(axTypes, 'AXTree'))),
|
||||
|
||||
selected: t.Optional(t.Boolean),
|
||||
focused: t.Optional(t.Boolean),
|
||||
pressed: t.Optional(t.Boolean),
|
||||
focusable: t.Optional(t.Boolean),
|
||||
haspopup: t.Optional(t.String),
|
||||
required: t.Optional(t.Boolean),
|
||||
invalid: t.Optional(t.Boolean),
|
||||
modal: t.Optional(t.Boolean),
|
||||
editable: t.Optional(t.Boolean),
|
||||
busy: t.Optional(t.Boolean),
|
||||
multiline: t.Optional(t.Boolean),
|
||||
readonly: t.Optional(t.Boolean),
|
||||
checked: t.Optional(t.Enum(['mixed', true])),
|
||||
expanded: t.Optional(t.Boolean),
|
||||
disabled: t.Optional(t.Boolean),
|
||||
multiselectable: t.Optional(t.Boolean),
|
||||
|
||||
value: t.Optional(t.String),
|
||||
description: t.Optional(t.String),
|
||||
|
||||
roledescription: t.Optional(t.String),
|
||||
valuetext: t.Optional(t.String),
|
||||
orientation: t.Optional(t.String),
|
||||
autocomplete: t.Optional(t.String),
|
||||
keyshortcuts: t.Optional(t.String),
|
||||
|
||||
level: t.Optional(t.Number),
|
||||
|
||||
tag: t.Optional(t.String),
|
||||
|
||||
foundObject: t.Optional(t.Boolean),
|
||||
}
|
||||
|
||||
const networkTypes = {};
|
||||
|
||||
networkTypes.HTTPHeader = {
|
||||
name: t.String,
|
||||
value: t.String,
|
||||
};
|
||||
|
||||
networkTypes.HTTPCredentials = {
|
||||
username: t.String,
|
||||
password: t.String,
|
||||
};
|
||||
|
||||
networkTypes.SecurityDetails = {
|
||||
protocol: t.String,
|
||||
subjectName: t.String,
|
||||
issuer: t.String,
|
||||
validFrom: t.Number,
|
||||
validTo: t.Number,
|
||||
};
|
||||
|
||||
networkTypes.ResourceTiming = {
|
||||
startTime: t.Number,
|
||||
domainLookupStart: t.Number,
|
||||
domainLookupEnd: t.Number,
|
||||
connectStart: t.Number,
|
||||
secureConnectionStart: t.Number,
|
||||
connectEnd: t.Number,
|
||||
requestStart: t.Number,
|
||||
responseStart: t.Number,
|
||||
};
|
||||
|
||||
const Browser = {
|
||||
targets: ['browser'],
|
||||
|
||||
types: browserTypes,
|
||||
|
||||
events: {
|
||||
'attachedToTarget': {
|
||||
sessionId: t.String,
|
||||
targetInfo: browserTypes.TargetInfo,
|
||||
},
|
||||
'detachedFromTarget': {
|
||||
sessionId: t.String,
|
||||
targetId: t.String,
|
||||
},
|
||||
'downloadCreated': {
|
||||
uuid: t.String,
|
||||
browserContextId: t.Optional(t.String),
|
||||
pageTargetId: t.String,
|
||||
url: t.String,
|
||||
suggestedFileName: t.String,
|
||||
},
|
||||
'downloadFinished': {
|
||||
uuid: t.String,
|
||||
canceled: t.Optional(t.Boolean),
|
||||
error: t.Optional(t.String),
|
||||
},
|
||||
'videoRecordingFinished': {
|
||||
screencastId: t.String,
|
||||
},
|
||||
},
|
||||
|
||||
methods: {
|
||||
'enable': {
|
||||
params: {
|
||||
attachToDefaultContext: t.Boolean,
|
||||
},
|
||||
},
|
||||
'createBrowserContext': {
|
||||
params: {
|
||||
removeOnDetach: t.Optional(t.Boolean),
|
||||
},
|
||||
returns: {
|
||||
browserContextId: t.String,
|
||||
},
|
||||
},
|
||||
'removeBrowserContext': {
|
||||
params: {
|
||||
browserContextId: t.String,
|
||||
},
|
||||
},
|
||||
'newPage': {
|
||||
params: {
|
||||
browserContextId: t.Optional(t.String),
|
||||
},
|
||||
returns: {
|
||||
targetId: t.String,
|
||||
}
|
||||
},
|
||||
'close': {},
|
||||
'getInfo': {
|
||||
returns: {
|
||||
userAgent: t.String,
|
||||
version: t.String,
|
||||
},
|
||||
},
|
||||
'setExtraHTTPHeaders': {
|
||||
params: {
|
||||
browserContextId: t.Optional(t.String),
|
||||
headers: t.Array(networkTypes.HTTPHeader),
|
||||
},
|
||||
},
|
||||
'setBrowserProxy': {
|
||||
params: {
|
||||
type: t.Enum(['http', 'https', 'socks', 'socks4']),
|
||||
bypass: t.Array(t.String),
|
||||
host: t.String,
|
||||
port: t.Number,
|
||||
username: t.Optional(t.String),
|
||||
password: t.Optional(t.String),
|
||||
},
|
||||
},
|
||||
'setContextProxy': {
|
||||
params: {
|
||||
browserContextId: t.Optional(t.String),
|
||||
type: t.Enum(['http', 'https', 'socks', 'socks4']),
|
||||
bypass: t.Array(t.String),
|
||||
host: t.String,
|
||||
port: t.Number,
|
||||
username: t.Optional(t.String),
|
||||
password: t.Optional(t.String),
|
||||
},
|
||||
},
|
||||
'setHTTPCredentials': {
|
||||
params: {
|
||||
browserContextId: t.Optional(t.String),
|
||||
credentials: t.Nullable(networkTypes.HTTPCredentials),
|
||||
},
|
||||
},
|
||||
'setRequestInterception': {
|
||||
params: {
|
||||
browserContextId: t.Optional(t.String),
|
||||
enabled: t.Boolean,
|
||||
},
|
||||
},
|
||||
'setGeolocationOverride': {
|
||||
params: {
|
||||
browserContextId: t.Optional(t.String),
|
||||
geolocation: t.Nullable(browserTypes.Geolocation),
|
||||
}
|
||||
},
|
||||
'setUserAgentOverride': {
|
||||
params: {
|
||||
browserContextId: t.Optional(t.String),
|
||||
userAgent: t.Nullable(t.String),
|
||||
}
|
||||
},
|
||||
'setPlatformOverride': {
|
||||
params: {
|
||||
browserContextId: t.Optional(t.String),
|
||||
platform: t.Nullable(t.String),
|
||||
}
|
||||
},
|
||||
'setBypassCSP': {
|
||||
params: {
|
||||
browserContextId: t.Optional(t.String),
|
||||
bypassCSP: t.Nullable(t.Boolean),
|
||||
}
|
||||
},
|
||||
'setIgnoreHTTPSErrors': {
|
||||
params: {
|
||||
browserContextId: t.Optional(t.String),
|
||||
ignoreHTTPSErrors: t.Nullable(t.Boolean),
|
||||
}
|
||||
},
|
||||
'setJavaScriptDisabled': {
|
||||
params: {
|
||||
browserContextId: t.Optional(t.String),
|
||||
javaScriptDisabled: t.Boolean,
|
||||
}
|
||||
},
|
||||
'setLocaleOverride': {
|
||||
params: {
|
||||
browserContextId: t.Optional(t.String),
|
||||
locale: t.Nullable(t.String),
|
||||
}
|
||||
},
|
||||
'setTimezoneOverride': {
|
||||
params: {
|
||||
browserContextId: t.Optional(t.String),
|
||||
timezoneId: t.Nullable(t.String),
|
||||
}
|
||||
},
|
||||
'setDownloadOptions': {
|
||||
params: {
|
||||
browserContextId: t.Optional(t.String),
|
||||
downloadOptions: t.Nullable(browserTypes.DownloadOptions),
|
||||
}
|
||||
},
|
||||
'setTouchOverride': {
|
||||
params: {
|
||||
browserContextId: t.Optional(t.String),
|
||||
hasTouch: t.Nullable(t.Boolean),
|
||||
}
|
||||
},
|
||||
'setDefaultViewport': {
|
||||
params: {
|
||||
browserContextId: t.Optional(t.String),
|
||||
viewport: t.Nullable(pageTypes.Viewport),
|
||||
}
|
||||
},
|
||||
'setScrollbarsHidden': {
|
||||
params: {
|
||||
browserContextId: t.Optional(t.String),
|
||||
hidden: t.Boolean,
|
||||
}
|
||||
},
|
||||
'setInitScripts': {
|
||||
params: {
|
||||
browserContextId: t.Optional(t.String),
|
||||
scripts: t.Array(pageTypes.InitScript),
|
||||
}
|
||||
},
|
||||
'addBinding': {
|
||||
params: {
|
||||
browserContextId: t.Optional(t.String),
|
||||
worldName: t.Optional(t.String),
|
||||
name: t.String,
|
||||
script: t.String,
|
||||
},
|
||||
},
|
||||
'grantPermissions': {
|
||||
params: {
|
||||
origin: t.String,
|
||||
browserContextId: t.Optional(t.String),
|
||||
permissions: t.Array(t.String),
|
||||
},
|
||||
},
|
||||
'resetPermissions': {
|
||||
params: {
|
||||
browserContextId: t.Optional(t.String),
|
||||
}
|
||||
},
|
||||
'setCookies': {
|
||||
params: {
|
||||
browserContextId: t.Optional(t.String),
|
||||
cookies: t.Array(browserTypes.CookieOptions),
|
||||
}
|
||||
},
|
||||
'clearCookies': {
|
||||
params: {
|
||||
browserContextId: t.Optional(t.String),
|
||||
}
|
||||
},
|
||||
'getCookies': {
|
||||
params: {
|
||||
browserContextId: t.Optional(t.String)
|
||||
},
|
||||
returns: {
|
||||
cookies: t.Array(browserTypes.Cookie),
|
||||
},
|
||||
},
|
||||
'setOnlineOverride': {
|
||||
params: {
|
||||
browserContextId: t.Optional(t.String),
|
||||
override: t.Nullable(t.Enum(['online', 'offline'])),
|
||||
}
|
||||
},
|
||||
'setColorScheme': {
|
||||
params: {
|
||||
browserContextId: t.Optional(t.String),
|
||||
colorScheme: t.Nullable(t.Enum(['dark', 'light', 'no-preference'])),
|
||||
},
|
||||
},
|
||||
'setReducedMotion': {
|
||||
params: {
|
||||
browserContextId: t.Optional(t.String),
|
||||
reducedMotion: t.Nullable(t.Enum(['reduce', 'no-preference'])),
|
||||
},
|
||||
},
|
||||
'setForcedColors': {
|
||||
params: {
|
||||
browserContextId: t.Optional(t.String),
|
||||
forcedColors: t.Nullable(t.Enum(['active', 'none'])),
|
||||
},
|
||||
},
|
||||
'setVideoRecordingOptions': {
|
||||
params: {
|
||||
browserContextId: t.Optional(t.String),
|
||||
options: t.Optional({
|
||||
dir: t.String,
|
||||
width: t.Number,
|
||||
height: t.Number,
|
||||
}),
|
||||
},
|
||||
},
|
||||
'cancelDownload': {
|
||||
params: {
|
||||
uuid: t.Optional(t.String),
|
||||
}
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
const Network = {
|
||||
targets: ['page'],
|
||||
types: networkTypes,
|
||||
events: {
|
||||
'requestWillBeSent': {
|
||||
// frameId may be absent for redirected requests.
|
||||
frameId: t.Optional(t.String),
|
||||
requestId: t.String,
|
||||
// RequestID of redirected request.
|
||||
redirectedFrom: t.Optional(t.String),
|
||||
postData: t.Optional(t.String),
|
||||
headers: t.Array(networkTypes.HTTPHeader),
|
||||
isIntercepted: t.Boolean,
|
||||
url: t.String,
|
||||
method: t.String,
|
||||
navigationId: t.Optional(t.String),
|
||||
cause: t.String,
|
||||
internalCause: t.String,
|
||||
},
|
||||
'responseReceived': {
|
||||
securityDetails: t.Nullable(networkTypes.SecurityDetails),
|
||||
requestId: t.String,
|
||||
fromCache: t.Boolean,
|
||||
remoteIPAddress: t.Optional(t.String),
|
||||
remotePort: t.Optional(t.Number),
|
||||
status: t.Number,
|
||||
statusText: t.String,
|
||||
headers: t.Array(networkTypes.HTTPHeader),
|
||||
timing: networkTypes.ResourceTiming,
|
||||
fromServiceWorker: t.Boolean,
|
||||
},
|
||||
'requestFinished': {
|
||||
requestId: t.String,
|
||||
responseEndTime: t.Number,
|
||||
transferSize: t.Number,
|
||||
encodedBodySize: t.Number,
|
||||
protocolVersion: t.Optional(t.String),
|
||||
},
|
||||
'requestFailed': {
|
||||
requestId: t.String,
|
||||
errorCode: t.String,
|
||||
},
|
||||
},
|
||||
methods: {
|
||||
'setRequestInterception': {
|
||||
params: {
|
||||
enabled: t.Boolean,
|
||||
},
|
||||
},
|
||||
'setExtraHTTPHeaders': {
|
||||
params: {
|
||||
headers: t.Array(networkTypes.HTTPHeader),
|
||||
},
|
||||
},
|
||||
'abortInterceptedRequest': {
|
||||
params: {
|
||||
requestId: t.String,
|
||||
errorCode: t.String,
|
||||
},
|
||||
},
|
||||
'resumeInterceptedRequest': {
|
||||
params: {
|
||||
requestId: t.String,
|
||||
url: t.Optional(t.String),
|
||||
method: t.Optional(t.String),
|
||||
headers: t.Optional(t.Array(networkTypes.HTTPHeader)),
|
||||
postData: t.Optional(t.String),
|
||||
},
|
||||
},
|
||||
'fulfillInterceptedRequest': {
|
||||
params: {
|
||||
requestId: t.String,
|
||||
status: t.Number,
|
||||
statusText: t.String,
|
||||
headers: t.Array(networkTypes.HTTPHeader),
|
||||
base64body: t.Optional(t.String), // base64-encoded
|
||||
},
|
||||
},
|
||||
'getResponseBody': {
|
||||
params: {
|
||||
requestId: t.String,
|
||||
},
|
||||
returns: {
|
||||
base64body: t.String,
|
||||
evicted: t.Optional(t.Boolean),
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const Runtime = {
|
||||
targets: ['page'],
|
||||
types: runtimeTypes,
|
||||
events: {
|
||||
'executionContextCreated': {
|
||||
executionContextId: t.String,
|
||||
auxData: runtimeTypes.AuxData,
|
||||
},
|
||||
'executionContextDestroyed': {
|
||||
executionContextId: t.String,
|
||||
},
|
||||
'console': {
|
||||
executionContextId: t.String,
|
||||
args: t.Array(runtimeTypes.RemoteObject),
|
||||
type: t.String,
|
||||
location: runtimeTypes.ScriptLocation,
|
||||
},
|
||||
},
|
||||
methods: {
|
||||
'evaluate': {
|
||||
params: {
|
||||
// Pass frameId here.
|
||||
executionContextId: t.String,
|
||||
expression: t.String,
|
||||
returnByValue: t.Optional(t.Boolean),
|
||||
},
|
||||
|
||||
returns: {
|
||||
result: t.Optional(runtimeTypes.RemoteObject),
|
||||
exceptionDetails: t.Optional(runtimeTypes.ExceptionDetails),
|
||||
}
|
||||
},
|
||||
'callFunction': {
|
||||
params: {
|
||||
// Pass frameId here.
|
||||
executionContextId: t.String,
|
||||
functionDeclaration: t.String,
|
||||
returnByValue: t.Optional(t.Boolean),
|
||||
args: t.Array(runtimeTypes.CallFunctionArgument),
|
||||
},
|
||||
|
||||
returns: {
|
||||
result: t.Optional(runtimeTypes.RemoteObject),
|
||||
exceptionDetails: t.Optional(runtimeTypes.ExceptionDetails),
|
||||
}
|
||||
},
|
||||
'disposeObject': {
|
||||
params: {
|
||||
executionContextId: t.String,
|
||||
objectId: t.String,
|
||||
},
|
||||
},
|
||||
|
||||
'getObjectProperties': {
|
||||
params: {
|
||||
executionContextId: t.String,
|
||||
objectId: t.String,
|
||||
},
|
||||
|
||||
returns: {
|
||||
properties: t.Array(runtimeTypes.ObjectProperty),
|
||||
}
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const Page = {
|
||||
targets: ['page'],
|
||||
|
||||
types: pageTypes,
|
||||
events: {
|
||||
'ready': {
|
||||
},
|
||||
'crashed': {
|
||||
},
|
||||
'eventFired': {
|
||||
frameId: t.String,
|
||||
name: t.Enum(['load', 'DOMContentLoaded']),
|
||||
},
|
||||
'uncaughtError': {
|
||||
frameId: t.String,
|
||||
message: t.String,
|
||||
stack: t.String,
|
||||
},
|
||||
'frameAttached': {
|
||||
frameId: t.String,
|
||||
parentFrameId: t.Optional(t.String),
|
||||
},
|
||||
'frameDetached': {
|
||||
frameId: t.String,
|
||||
},
|
||||
'navigationStarted': {
|
||||
frameId: t.String,
|
||||
navigationId: t.String,
|
||||
url: t.String,
|
||||
},
|
||||
'navigationCommitted': {
|
||||
frameId: t.String,
|
||||
// |navigationId| can only be null in response to enable.
|
||||
navigationId: t.Optional(t.String),
|
||||
url: t.String,
|
||||
// frame.id or frame.name
|
||||
name: t.String,
|
||||
},
|
||||
'navigationAborted': {
|
||||
frameId: t.String,
|
||||
navigationId: t.String,
|
||||
errorText: t.String,
|
||||
},
|
||||
'sameDocumentNavigation': {
|
||||
frameId: t.String,
|
||||
url: t.String,
|
||||
},
|
||||
'dialogOpened': {
|
||||
dialogId: t.String,
|
||||
type: t.Enum(['prompt', 'alert', 'confirm', 'beforeunload']),
|
||||
message: t.String,
|
||||
defaultValue: t.Optional(t.String),
|
||||
},
|
||||
'dialogClosed': {
|
||||
dialogId: t.String,
|
||||
},
|
||||
'bindingCalled': {
|
||||
executionContextId: t.String,
|
||||
name: t.String,
|
||||
payload: t.Any,
|
||||
},
|
||||
'linkClicked': {
|
||||
phase: t.Enum(['before', 'after']),
|
||||
},
|
||||
'willOpenNewWindowAsynchronously': {},
|
||||
'fileChooserOpened': {
|
||||
executionContextId: t.String,
|
||||
element: runtimeTypes.RemoteObject
|
||||
},
|
||||
'workerCreated': {
|
||||
workerId: t.String,
|
||||
frameId: t.String,
|
||||
url: t.String,
|
||||
},
|
||||
'workerDestroyed': {
|
||||
workerId: t.String,
|
||||
},
|
||||
'dispatchMessageFromWorker': {
|
||||
workerId: t.String,
|
||||
message: t.String,
|
||||
},
|
||||
'videoRecordingStarted': {
|
||||
screencastId: t.String,
|
||||
file: t.String,
|
||||
},
|
||||
'webSocketCreated': {
|
||||
frameId: t.String,
|
||||
wsid: t.String,
|
||||
requestURL: t.String,
|
||||
},
|
||||
'webSocketOpened': {
|
||||
frameId: t.String,
|
||||
requestId: t.String,
|
||||
wsid: t.String,
|
||||
effectiveURL: t.String,
|
||||
},
|
||||
'webSocketClosed': {
|
||||
frameId: t.String,
|
||||
wsid: t.String,
|
||||
error: t.String,
|
||||
},
|
||||
'webSocketFrameSent': {
|
||||
frameId: t.String,
|
||||
wsid: t.String,
|
||||
opcode: t.Number,
|
||||
data: t.String,
|
||||
},
|
||||
'webSocketFrameReceived': {
|
||||
frameId: t.String,
|
||||
wsid: t.String,
|
||||
opcode: t.Number,
|
||||
data: t.String,
|
||||
},
|
||||
'screencastFrame': {
|
||||
data: t.String,
|
||||
deviceWidth: t.Number,
|
||||
deviceHeight: t.Number,
|
||||
},
|
||||
},
|
||||
|
||||
methods: {
|
||||
'close': {
|
||||
params: {
|
||||
runBeforeUnload: t.Optional(t.Boolean),
|
||||
},
|
||||
},
|
||||
'setFileInputFiles': {
|
||||
params: {
|
||||
frameId: t.String,
|
||||
objectId: t.String,
|
||||
files: t.Array(t.String),
|
||||
},
|
||||
},
|
||||
'addBinding': {
|
||||
params: {
|
||||
worldName: t.Optional(t.String),
|
||||
name: t.String,
|
||||
script: t.String,
|
||||
},
|
||||
},
|
||||
'setViewportSize': {
|
||||
params: {
|
||||
viewportSize: t.Nullable(pageTypes.Size),
|
||||
},
|
||||
},
|
||||
'bringToFront': {
|
||||
params: {
|
||||
},
|
||||
},
|
||||
'setEmulatedMedia': {
|
||||
params: {
|
||||
type: t.Optional(t.Enum(['screen', 'print', ''])),
|
||||
colorScheme: t.Optional(t.Enum(['dark', 'light', 'no-preference'])),
|
||||
reducedMotion: t.Optional(t.Enum(['reduce', 'no-preference'])),
|
||||
forcedColors: t.Optional(t.Enum(['active', 'none'])),
|
||||
},
|
||||
},
|
||||
'setCacheDisabled': {
|
||||
params: {
|
||||
cacheDisabled: t.Boolean,
|
||||
},
|
||||
},
|
||||
'describeNode': {
|
||||
params: {
|
||||
frameId: t.String,
|
||||
objectId: t.String,
|
||||
},
|
||||
returns: {
|
||||
contentFrameId: t.Optional(t.String),
|
||||
ownerFrameId: t.Optional(t.String),
|
||||
},
|
||||
},
|
||||
'scrollIntoViewIfNeeded': {
|
||||
params: {
|
||||
frameId: t.String,
|
||||
objectId: t.String,
|
||||
rect: t.Optional(pageTypes.Rect),
|
||||
},
|
||||
},
|
||||
'setInitScripts': {
|
||||
params: {
|
||||
scripts: t.Array(pageTypes.InitScript)
|
||||
}
|
||||
},
|
||||
'navigate': {
|
||||
params: {
|
||||
frameId: t.String,
|
||||
url: t.String,
|
||||
referer: t.Optional(t.String),
|
||||
},
|
||||
returns: {
|
||||
navigationId: t.Nullable(t.String),
|
||||
navigationURL: t.Nullable(t.String),
|
||||
}
|
||||
},
|
||||
'goBack': {
|
||||
params: {
|
||||
frameId: t.String,
|
||||
},
|
||||
returns: {
|
||||
success: t.Boolean,
|
||||
},
|
||||
},
|
||||
'goForward': {
|
||||
params: {
|
||||
frameId: t.String,
|
||||
},
|
||||
returns: {
|
||||
success: t.Boolean,
|
||||
},
|
||||
},
|
||||
'reload': {
|
||||
params: {
|
||||
frameId: t.String,
|
||||
},
|
||||
},
|
||||
'adoptNode': {
|
||||
params: {
|
||||
frameId: t.String,
|
||||
objectId: t.String,
|
||||
executionContextId: t.String,
|
||||
},
|
||||
returns: {
|
||||
remoteObject: t.Nullable(runtimeTypes.RemoteObject),
|
||||
},
|
||||
},
|
||||
'screenshot': {
|
||||
params: {
|
||||
mimeType: t.Enum(['image/png', 'image/jpeg']),
|
||||
clip: pageTypes.Clip,
|
||||
omitDeviceScaleFactor: t.Optional(t.Boolean),
|
||||
},
|
||||
returns: {
|
||||
data: t.String,
|
||||
}
|
||||
},
|
||||
'getContentQuads': {
|
||||
params: {
|
||||
frameId: t.String,
|
||||
objectId: t.String,
|
||||
},
|
||||
returns: {
|
||||
quads: t.Array(pageTypes.DOMQuad),
|
||||
},
|
||||
},
|
||||
'dispatchKeyEvent': {
|
||||
params: {
|
||||
type: t.String,
|
||||
key: t.String,
|
||||
keyCode: t.Number,
|
||||
location: t.Number,
|
||||
code: t.String,
|
||||
repeat: t.Boolean,
|
||||
text: t.Optional(t.String),
|
||||
}
|
||||
},
|
||||
'dispatchTouchEvent': {
|
||||
params: {
|
||||
type: t.Enum(['touchStart', 'touchEnd', 'touchMove', 'touchCancel']),
|
||||
touchPoints: t.Array(pageTypes.TouchPoint),
|
||||
modifiers: t.Number,
|
||||
},
|
||||
returns: {
|
||||
defaultPrevented: t.Boolean,
|
||||
}
|
||||
},
|
||||
'dispatchTapEvent': {
|
||||
params: {
|
||||
x: t.Number,
|
||||
y: t.Number,
|
||||
modifiers: t.Number,
|
||||
}
|
||||
},
|
||||
'dispatchMouseEvent': {
|
||||
params: {
|
||||
type: t.String,
|
||||
button: t.Number,
|
||||
x: t.Number,
|
||||
y: t.Number,
|
||||
modifiers: t.Number,
|
||||
clickCount: t.Optional(t.Number),
|
||||
buttons: t.Number,
|
||||
}
|
||||
},
|
||||
'dispatchWheelEvent': {
|
||||
params: {
|
||||
x: t.Number,
|
||||
y: t.Number,
|
||||
deltaX: t.Number,
|
||||
deltaY: t.Number,
|
||||
deltaZ: t.Number,
|
||||
modifiers: t.Number,
|
||||
}
|
||||
},
|
||||
'insertText': {
|
||||
params: {
|
||||
text: t.String,
|
||||
}
|
||||
},
|
||||
'crash': {
|
||||
params: {}
|
||||
},
|
||||
'handleDialog': {
|
||||
params: {
|
||||
dialogId: t.String,
|
||||
accept: t.Boolean,
|
||||
promptText: t.Optional(t.String),
|
||||
},
|
||||
},
|
||||
'setInterceptFileChooserDialog': {
|
||||
params: {
|
||||
enabled: t.Boolean,
|
||||
},
|
||||
},
|
||||
'sendMessageToWorker': {
|
||||
params: {
|
||||
frameId: t.String,
|
||||
workerId: t.String,
|
||||
message: t.String,
|
||||
},
|
||||
},
|
||||
'startScreencast': {
|
||||
params: {
|
||||
width: t.Number,
|
||||
height: t.Number,
|
||||
quality: t.Number,
|
||||
},
|
||||
returns: {
|
||||
screencastId: t.String,
|
||||
},
|
||||
},
|
||||
'screencastFrameAck': {
|
||||
params: {
|
||||
screencastId: t.String,
|
||||
},
|
||||
},
|
||||
'stopScreencast': {
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
|
||||
const Accessibility = {
|
||||
targets: ['page'],
|
||||
types: axTypes,
|
||||
events: {},
|
||||
methods: {
|
||||
'getFullAXTree': {
|
||||
params: {
|
||||
objectId: t.Optional(t.String),
|
||||
},
|
||||
returns: {
|
||||
tree: axTypes.AXTree
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
this.protocol = {
|
||||
domains: {Browser, Page, Runtime, Network, Accessibility},
|
||||
};
|
||||
this.checkScheme = checkScheme;
|
||||
this.EXPORTED_SYMBOLS = ['protocol', 'checkScheme'];
|
|
@ -1,144 +0,0 @@
|
|||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
|
||||
#include "HeadlessWindowCapturer.h"
|
||||
|
||||
#include "api/video/i420_buffer.h"
|
||||
#include "HeadlessWidget.h"
|
||||
#include "libyuv.h"
|
||||
#include "mozilla/EndianUtils.h"
|
||||
#include "mozilla/gfx/DataSurfaceHelpers.h"
|
||||
#include "rtc_base/ref_counted_object.h"
|
||||
#include "rtc_base/time_utils.h"
|
||||
#include "api/scoped_refptr.h"
|
||||
|
||||
using namespace mozilla::widget;
|
||||
using namespace webrtc;
|
||||
|
||||
namespace mozilla {
|
||||
|
||||
rtc::scoped_refptr<webrtc::VideoCaptureModuleEx> HeadlessWindowCapturer::Create(HeadlessWidget* headlessWindow) {
|
||||
return new rtc::RefCountedObject<HeadlessWindowCapturer>(headlessWindow);
|
||||
}
|
||||
|
||||
HeadlessWindowCapturer::HeadlessWindowCapturer(mozilla::widget::HeadlessWidget* window)
|
||||
: mWindow(window) {
|
||||
}
|
||||
HeadlessWindowCapturer::~HeadlessWindowCapturer() {
|
||||
StopCapture();
|
||||
}
|
||||
|
||||
|
||||
void HeadlessWindowCapturer::RegisterCaptureDataCallback(rtc::VideoSinkInterface<webrtc::VideoFrame>* dataCallback) {
|
||||
rtc::CritScope lock2(&_callBackCs);
|
||||
_dataCallBacks.insert(dataCallback);
|
||||
}
|
||||
void HeadlessWindowCapturer::DeRegisterCaptureDataCallback(rtc::VideoSinkInterface<webrtc::VideoFrame>* dataCallback) {
|
||||
rtc::CritScope lock2(&_callBackCs);
|
||||
auto it = _dataCallBacks.find(dataCallback);
|
||||
if (it != _dataCallBacks.end()) {
|
||||
_dataCallBacks.erase(it);
|
||||
}
|
||||
}
|
||||
|
||||
void HeadlessWindowCapturer::RegisterRawFrameCallback(webrtc::RawFrameCallback* rawFrameCallback) {
|
||||
rtc::CritScope lock2(&_callBackCs);
|
||||
_rawFrameCallbacks.insert(rawFrameCallback);
|
||||
}
|
||||
|
||||
void HeadlessWindowCapturer::DeRegisterRawFrameCallback(webrtc::RawFrameCallback* rawFrameCallback) {
|
||||
rtc::CritScope lock2(&_callBackCs);
|
||||
auto it = _rawFrameCallbacks.find(rawFrameCallback);
|
||||
if (it != _rawFrameCallbacks.end()) {
|
||||
_rawFrameCallbacks.erase(it);
|
||||
}
|
||||
}
|
||||
|
||||
void HeadlessWindowCapturer::NotifyFrameCaptured(const webrtc::VideoFrame& frame) {
|
||||
rtc::CritScope lock2(&_callBackCs);
|
||||
for (auto dataCallBack : _dataCallBacks)
|
||||
dataCallBack->OnFrame(frame);
|
||||
}
|
||||
|
||||
int32_t HeadlessWindowCapturer::StopCaptureIfAllClientsClose() {
|
||||
if (_dataCallBacks.empty()) {
|
||||
return StopCapture();
|
||||
} else {
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
int32_t HeadlessWindowCapturer::StartCapture(const webrtc::VideoCaptureCapability& capability) {
|
||||
mWindow->SetSnapshotListener([this] (RefPtr<gfx::DataSourceSurface>&& dataSurface){
|
||||
if (!NS_IsInCompositorThread()) {
|
||||
fprintf(stderr, "SnapshotListener is called not on the Compositor thread!\n");
|
||||
return;
|
||||
}
|
||||
|
||||
if (dataSurface->GetFormat() != gfx::SurfaceFormat::B8G8R8A8) {
|
||||
fprintf(stderr, "Unexpected snapshot surface format: %hhd\n", dataSurface->GetFormat());
|
||||
return;
|
||||
}
|
||||
|
||||
webrtc::VideoCaptureCapability frameInfo;
|
||||
frameInfo.width = dataSurface->GetSize().width;
|
||||
frameInfo.height = dataSurface->GetSize().height;
|
||||
#if MOZ_LITTLE_ENDIAN()
|
||||
frameInfo.videoType = VideoType::kARGB;
|
||||
#else
|
||||
frameInfo.videoType = VideoType::kBGRA;
|
||||
#endif
|
||||
|
||||
{
|
||||
rtc::CritScope lock2(&_callBackCs);
|
||||
for (auto rawFrameCallback : _rawFrameCallbacks) {
|
||||
rawFrameCallback->OnRawFrame(dataSurface->GetData(), dataSurface->Stride(), frameInfo);
|
||||
}
|
||||
if (!_dataCallBacks.size())
|
||||
return;
|
||||
}
|
||||
|
||||
int width = dataSurface->GetSize().width;
|
||||
int height = dataSurface->GetSize().height;
|
||||
rtc::scoped_refptr<I420Buffer> buffer = I420Buffer::Create(width, height);
|
||||
|
||||
gfx::DataSourceSurface::ScopedMap map(dataSurface.get(), gfx::DataSourceSurface::MapType::READ);
|
||||
if (!map.IsMapped()) {
|
||||
fprintf(stderr, "Failed to map snapshot bytes!\n");
|
||||
return;
|
||||
}
|
||||
|
||||
#if MOZ_LITTLE_ENDIAN()
|
||||
const int conversionResult = libyuv::ARGBToI420(
|
||||
#else
|
||||
const int conversionResult = libyuv::BGRAToI420(
|
||||
#endif
|
||||
map.GetData(), map.GetStride(),
|
||||
buffer->MutableDataY(), buffer->StrideY(),
|
||||
buffer->MutableDataU(), buffer->StrideU(),
|
||||
buffer->MutableDataV(), buffer->StrideV(),
|
||||
width, height);
|
||||
if (conversionResult != 0) {
|
||||
fprintf(stderr, "Failed to convert capture frame to I420: %d\n", conversionResult);
|
||||
return;
|
||||
}
|
||||
|
||||
VideoFrame captureFrame(buffer, 0, rtc::TimeMillis(), kVideoRotation_0);
|
||||
NotifyFrameCaptured(captureFrame);
|
||||
});
|
||||
return 0;
|
||||
}
|
||||
|
||||
int32_t HeadlessWindowCapturer::StopCapture() {
|
||||
if (!CaptureStarted())
|
||||
return 0;
|
||||
mWindow->SetSnapshotListener(nullptr);
|
||||
return 0;
|
||||
}
|
||||
|
||||
bool HeadlessWindowCapturer::CaptureStarted() {
|
||||
return true;
|
||||
}
|
||||
|
||||
} // namespace mozilla
|
Некоторые файлы не были показаны из-за слишком большого количества измененных файлов Показать больше
Загрузка…
Ссылка в новой задаче