mirror of
https://github.com/lightpanda-io/browser.git
synced 2026-03-28 07:33:16 +00:00
Compare commits
274 Commits
structured
...
docs/updat
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
129e8e8340 | ||
|
|
a865b86fa5 | ||
|
|
de28d14aff | ||
|
|
4cdc24326a | ||
|
|
cf46f0097a | ||
|
|
d94fd2a43b | ||
|
|
8c5e737669 | ||
|
|
fb29a1c5bf | ||
|
|
94190f93af | ||
|
|
93e239f682 | ||
|
|
a2e59af44c | ||
|
|
00c962bdd8 | ||
|
|
1fa87442b8 | ||
|
|
ac5400696a | ||
|
|
5062273b7a | ||
|
|
9c2393351d | ||
|
|
f0cfe3ffc8 | ||
|
|
615fcffb99 | ||
|
|
13b746f9e4 | ||
|
|
e90fce4c55 | ||
|
|
59175437b5 | ||
|
|
e950384b9b | ||
|
|
78440350dc | ||
|
|
f435297949 | ||
|
|
54d1563cf3 | ||
|
|
f36499b806 | ||
|
|
fa1dd5237d | ||
|
|
2b9d5fd4d9 | ||
|
|
964fa0a8aa | ||
|
|
db01158d2d | ||
|
|
e997f8317e | ||
|
|
a88c21cdb5 | ||
|
|
7a7c4b9f49 | ||
|
|
edd0c5c83f | ||
|
|
c6861829c3 | ||
|
|
e14c8b3025 | ||
|
|
5bc00c595c | ||
|
|
db5fb40de0 | ||
|
|
4e6a357e6e | ||
|
|
6cf515151d | ||
|
|
bf6e4cf3a6 | ||
|
|
60936baa96 | ||
|
|
c29f72a7e8 | ||
|
|
d4427e4370 | ||
|
|
b85ec04175 | ||
|
|
da05ba0eb7 | ||
|
|
414a68abeb | ||
|
|
52455b732b | ||
|
|
ba71268eb3 | ||
|
|
694aac5ce8 | ||
|
|
cbab0b712a | ||
|
|
1aee3db521 | ||
|
|
f634c9843d | ||
|
|
e1e45d1c5d | ||
|
|
ff288c8aa2 | ||
|
|
e1b14a6833 | ||
|
|
015edc3848 | ||
|
|
bd2406f803 | ||
|
|
3c29e7dbd4 | ||
|
|
586413357e | ||
|
|
9a055a61a6 | ||
|
|
5fb561dc9c | ||
|
|
b14ae02548 | ||
|
|
51fb08e6aa | ||
|
|
a6d699ad5d | ||
|
|
8372b45cc5 | ||
|
|
1739ae6b9a | ||
|
|
ba62150f7a | ||
|
|
8143a61955 | ||
|
|
e16c479781 | ||
|
|
c0c4e26d63 | ||
|
|
b252aa71d0 | ||
|
|
9ef8d9c189 | ||
|
|
9f27416603 | ||
|
|
0729f4a03a | ||
|
|
21f7b95db9 | ||
|
|
4125a5aa1e | ||
|
|
6d0dc6cb1e | ||
|
|
0675c23217 | ||
|
|
d0e6a1f5bb | ||
|
|
91afe08235 | ||
|
|
041d9d41fb | ||
|
|
7009fb5899 | ||
|
|
d2003c7c9a | ||
|
|
ce002b999c | ||
|
|
5b1056862a | ||
|
|
cc4ac99b4a | ||
|
|
46df341506 | ||
|
|
b698e2d078 | ||
|
|
5cc5e513dd | ||
|
|
e048b0372f | ||
|
|
d7aaa1c870 | ||
|
|
463aac9b59 | ||
|
|
d9cdd78138 | ||
|
|
44a83c0e1c | ||
|
|
96f24a2662 | ||
|
|
5d2801c652 | ||
|
|
deb08b7880 | ||
|
|
96e5054ffc | ||
|
|
c9753a690d | ||
|
|
27aaf46630 | ||
|
|
84190e1e06 | ||
|
|
b0b1f755ea | ||
|
|
fcf1d30c77 | ||
|
|
3c532e5aef | ||
|
|
3efcb2705d | ||
|
|
c25f389e91 | ||
|
|
533f4075a3 | ||
|
|
f508d37426 | ||
|
|
548c6eeb7a | ||
|
|
c8265f4807 | ||
|
|
a74e46debf | ||
|
|
1ceaabe69f | ||
|
|
91a2441ed8 | ||
|
|
2ecbc833a9 | ||
|
|
dac456d98c | ||
|
|
422320d9ac | ||
|
|
18b635936c | ||
|
|
7b2895ef08 | ||
|
|
b09e9f7398 | ||
|
|
ac651328c3 | ||
|
|
0380df1cb4 | ||
|
|
21421d5b53 | ||
|
|
80c309aa69 | ||
|
|
f5bc7310b1 | ||
|
|
21e9967a8a | ||
|
|
32f450f803 | ||
|
|
1972142703 | ||
|
|
b10d866e4b | ||
|
|
b373fb4a42 | ||
|
|
ddd34dc57b | ||
|
|
265c5aba2e | ||
|
|
21fc6d1cf6 | ||
|
|
1a7fe6129c | ||
|
|
37462a16c5 | ||
|
|
323ec0046c | ||
|
|
dc7c6984fb | ||
|
|
92f7248a16 | ||
|
|
1ec3e156fb | ||
|
|
1121bed49b | ||
|
|
0eb43fb530 | ||
|
|
1f50dc38c3 | ||
|
|
a9d044ec10 | ||
|
|
1bdf464ef2 | ||
|
|
a70da0d176 | ||
|
|
8c52b8357c | ||
|
|
0243c6b450 | ||
|
|
f7071447cb | ||
|
|
c038bfafa1 | ||
|
|
4d60f56e66 | ||
|
|
56d3cf51e8 | ||
|
|
3013e3a9e6 | ||
|
|
fe9b2e672b | ||
|
|
3e9fa4ca47 | ||
|
|
a2e66f85a1 | ||
|
|
a9b9cf14c3 | ||
|
|
d4b941cf30 | ||
|
|
4b6bf29b83 | ||
|
|
a8b147dfc0 | ||
|
|
65627c1296 | ||
|
|
3dcdaa0a9b | ||
|
|
5bc00045c7 | ||
|
|
93ea95af24 | ||
|
|
f754773bf6 | ||
|
|
42bb2f3c58 | ||
|
|
68337a6989 | ||
|
|
bf6dbedbe4 | ||
|
|
a204f40968 | ||
|
|
fe3faa0a5a | ||
|
|
39d5a25258 | ||
|
|
f4044230fd | ||
|
|
4d6d8d9a83 | ||
|
|
c4176a282f | ||
|
|
1352839472 | ||
|
|
535128da71 | ||
|
|
099550dddc | ||
|
|
7fe26bc966 | ||
|
|
cc6587d6e5 | ||
|
|
8b310ce993 | ||
|
|
be8ba53263 | ||
|
|
043d48d1c7 | ||
|
|
e8fe80189b | ||
|
|
0e48f317cb | ||
|
|
867745c71d | ||
|
|
a1a7919f74 | ||
|
|
c3de47de90 | ||
|
|
dd35bdfeb4 | ||
|
|
07c3aec34f | ||
|
|
bce3e8f7c6 | ||
|
|
ba9777e754 | ||
|
|
7040801dfa | ||
|
|
4f8a6b62b8 | ||
|
|
d3dad772cf | ||
|
|
944b672fea | ||
|
|
b1c54aa92d | ||
|
|
4ca6f43aeb | ||
|
|
f09e66e1cc | ||
|
|
8b7a4ceaaa | ||
|
|
51e90f5971 | ||
|
|
8db64772b7 | ||
|
|
bf0be60b89 | ||
|
|
172481dd72 | ||
|
|
c6c0492c33 | ||
|
|
fca29a8be2 | ||
|
|
d365240f91 | ||
|
|
1ed61d4783 | ||
|
|
a1fb11ae33 | ||
|
|
9971816711 | ||
|
|
c38d9a3098 | ||
|
|
02198de455 | ||
|
|
6cd8202310 | ||
|
|
4d7b7d1d42 | ||
|
|
e4e21f52b5 | ||
|
|
84e1cd08b6 | ||
|
|
7796753e7a | ||
|
|
880205e874 | ||
|
|
1b96087b08 | ||
|
|
aa246c9e9f | ||
|
|
f1d311d232 | ||
|
|
e4f7fca10d | ||
|
|
3d6d669a50 | ||
|
|
c4097e2b7e | ||
|
|
619d27c773 | ||
|
|
1522c90294 | ||
|
|
794e15ce21 | ||
|
|
34771b835e | ||
|
|
8df51b232a | ||
|
|
13b8ce18b2 | ||
|
|
448386e52b | ||
|
|
bf07659dd5 | ||
|
|
16dfad0895 | ||
|
|
f61449c31c | ||
|
|
60699229ca | ||
|
|
e1dd26b307 | ||
|
|
ee637c3662 | ||
|
|
65d7a39554 | ||
|
|
37735b1caa | ||
|
|
1866e7141e | ||
|
|
feccc9f5ce | ||
|
|
af803da5c8 | ||
|
|
5329d05005 | ||
|
|
2e6dd3edfe | ||
|
|
a95b4ea7b9 | ||
|
|
ca931a11be | ||
|
|
6c7272061c | ||
|
|
4f262e5bed | ||
|
|
a6ccc72d15 | ||
|
|
d1ee0442ea | ||
|
|
064e7b404b | ||
|
|
56f47ee574 | ||
|
|
a318c6263d | ||
|
|
83ba974f94 | ||
|
|
85ebbe8759 | ||
|
|
61cba3f6eb | ||
|
|
3c97332fd8 | ||
|
|
c77cb317c4 | ||
|
|
c3a53752e7 | ||
|
|
0a5eb93565 | ||
|
|
b8a3135835 | ||
|
|
330dfccb89 | ||
|
|
d80e926015 | ||
|
|
2a2b067633 | ||
|
|
be73c14395 | ||
|
|
9cd5afe5b6 | ||
|
|
4ba40f2295 | ||
|
|
b674c2e448 | ||
|
|
b8139a6e83 | ||
|
|
bde5fc9264 | ||
|
|
45705a3e29 | ||
|
|
e0f0b9f210 | ||
|
|
f2832447d4 | ||
|
|
471ba5baf6 | ||
|
|
248851701f | ||
|
|
0f46277b1f |
4
.github/actions/install/action.yml
vendored
4
.github/actions/install/action.yml
vendored
@@ -13,7 +13,7 @@ inputs:
|
||||
zig-v8:
|
||||
description: 'zig v8 version to install'
|
||||
required: false
|
||||
default: 'v0.3.2'
|
||||
default: 'v0.3.4'
|
||||
v8:
|
||||
description: 'v8 version to install'
|
||||
required: false
|
||||
@@ -46,7 +46,7 @@ runs:
|
||||
|
||||
- name: Cache v8
|
||||
id: cache-v8
|
||||
uses: actions/cache@v4
|
||||
uses: actions/cache@v5
|
||||
env:
|
||||
cache-name: cache-v8
|
||||
with:
|
||||
|
||||
10
.github/workflows/e2e-integration-test.yml
vendored
10
.github/workflows/e2e-integration-test.yml
vendored
@@ -20,11 +20,9 @@ jobs:
|
||||
if: github.event.pull_request.draft == false
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v6
|
||||
with:
|
||||
fetch-depth: 0
|
||||
# fetch submodules recusively, to get zig-js-runtime submodules also.
|
||||
submodules: recursive
|
||||
|
||||
- uses: ./.github/actions/install
|
||||
|
||||
@@ -32,7 +30,7 @@ jobs:
|
||||
run: zig build -Dprebuilt_v8_path=v8/libc_v8.a -Doptimize=ReleaseFast -Dcpu=x86_64 -Dgit_commit=$(git rev-parse --short ${{ github.sha }})
|
||||
|
||||
- name: upload artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v7
|
||||
with:
|
||||
name: lightpanda-build-release
|
||||
path: |
|
||||
@@ -47,7 +45,7 @@ jobs:
|
||||
timeout-minutes: 15
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v6
|
||||
with:
|
||||
repository: 'lightpanda-io/demo'
|
||||
fetch-depth: 0
|
||||
@@ -55,7 +53,7 @@ jobs:
|
||||
- run: npm install
|
||||
|
||||
- name: download artifact
|
||||
uses: actions/download-artifact@v4
|
||||
uses: actions/download-artifact@v8
|
||||
with:
|
||||
name: lightpanda-build-release
|
||||
|
||||
|
||||
144
.github/workflows/e2e-test.yml
vendored
144
.github/workflows/e2e-test.yml
vendored
@@ -9,15 +9,13 @@ env:
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
branches: [main]
|
||||
paths:
|
||||
- "build.zig"
|
||||
- "src/**/*.zig"
|
||||
- "src/*.zig"
|
||||
- "vendor/zig-js-runtime"
|
||||
- ".github/**"
|
||||
- "vendor/**"
|
||||
- "src/**"
|
||||
- "build.zig"
|
||||
- "build.zig.zon"
|
||||
|
||||
pull_request:
|
||||
|
||||
# By default GH trigger on types opened, synchronize and reopened.
|
||||
@@ -29,12 +27,10 @@ on:
|
||||
|
||||
paths:
|
||||
- ".github/**"
|
||||
- "src/**"
|
||||
- "build.zig"
|
||||
- "src/**/*.zig"
|
||||
- "src/*.zig"
|
||||
- "vendor/**"
|
||||
- ".github/**"
|
||||
- "vendor/**"
|
||||
- "build.zig.zon"
|
||||
|
||||
# Allows you to run this workflow manually from the Actions tab
|
||||
workflow_dispatch:
|
||||
|
||||
@@ -52,8 +48,6 @@ jobs:
|
||||
- uses: actions/checkout@v6
|
||||
with:
|
||||
fetch-depth: 0
|
||||
# fetch submodules recusively, to get zig-js-runtime submodules also.
|
||||
submodules: recursive
|
||||
|
||||
- uses: ./.github/actions/install
|
||||
|
||||
@@ -61,7 +55,7 @@ jobs:
|
||||
run: zig build -Dprebuilt_v8_path=v8/libc_v8.a -Doptimize=ReleaseFast -Dcpu=x86_64 -Dgit_commit=$(git rev-parse --short ${{ github.sha }})
|
||||
|
||||
- name: upload artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v7
|
||||
with:
|
||||
name: lightpanda-build-release
|
||||
path: |
|
||||
@@ -76,7 +70,7 @@ jobs:
|
||||
timeout-minutes: 15
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v6
|
||||
with:
|
||||
repository: 'lightpanda-io/demo'
|
||||
fetch-depth: 0
|
||||
@@ -84,7 +78,7 @@ jobs:
|
||||
- run: npm install
|
||||
|
||||
- name: download artifact
|
||||
uses: actions/download-artifact@v4
|
||||
uses: actions/download-artifact@v8
|
||||
with:
|
||||
name: lightpanda-build-release
|
||||
|
||||
@@ -117,6 +111,107 @@ jobs:
|
||||
BASE_URL=https://demo-browser.lightpanda.io/ node playwright/proxy_auth.js
|
||||
kill `cat LPD.pid` `cat PROXY.id`
|
||||
|
||||
# e2e tests w/ web-bot-auth configuration on.
|
||||
wba-demo-scripts:
|
||||
name: wba-demo-scripts
|
||||
needs: zig-build-release
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 15
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
with:
|
||||
repository: 'lightpanda-io/demo'
|
||||
fetch-depth: 0
|
||||
|
||||
- run: npm install
|
||||
|
||||
- name: download artifact
|
||||
uses: actions/download-artifact@v8
|
||||
with:
|
||||
name: lightpanda-build-release
|
||||
|
||||
- run: chmod a+x ./lightpanda
|
||||
|
||||
- run: echo "${{ secrets.WBA_PRIVATE_KEY_PEM }}" > private_key.pem
|
||||
|
||||
- name: run end to end tests
|
||||
run: |
|
||||
./lightpanda serve \
|
||||
--web_bot_auth_key_file private_key.pem \
|
||||
--web_bot_auth_keyid ${{ vars.WBA_KEY_ID }} \
|
||||
--web_bot_auth_domain ${{ vars.WBA_DOMAIN }} \
|
||||
& echo $! > LPD.pid
|
||||
go run runner/main.go
|
||||
kill `cat LPD.pid`
|
||||
|
||||
- name: build proxy
|
||||
run: |
|
||||
cd proxy
|
||||
go build
|
||||
|
||||
- name: run end to end tests through proxy
|
||||
run: |
|
||||
./proxy/proxy & echo $! > PROXY.id
|
||||
./lightpanda serve \
|
||||
--web_bot_auth_key_file private_key.pem \
|
||||
--web_bot_auth_keyid ${{ vars.WBA_KEY_ID }} \
|
||||
--web_bot_auth_domain ${{ vars.WBA_DOMAIN }} \
|
||||
--http_proxy 'http://127.0.0.1:3000' \
|
||||
& echo $! > LPD.pid
|
||||
go run runner/main.go
|
||||
kill `cat LPD.pid` `cat PROXY.id`
|
||||
|
||||
- name: run request interception through proxy
|
||||
run: |
|
||||
export PROXY_USERNAME=username PROXY_PASSWORD=password
|
||||
./proxy/proxy & echo $! > PROXY.id
|
||||
./lightpanda serve & echo $! > LPD.pid
|
||||
URL=https://demo-browser.lightpanda.io/campfire-commerce/ node puppeteer/proxy_auth.js
|
||||
BASE_URL=https://demo-browser.lightpanda.io/ node playwright/proxy_auth.js
|
||||
kill `cat LPD.pid` `cat PROXY.id`
|
||||
|
||||
wba-test:
|
||||
name: wba-test
|
||||
needs: zig-build-release
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 5
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
with:
|
||||
repository: 'lightpanda-io/demo'
|
||||
fetch-depth: 0
|
||||
|
||||
- name: download artifact
|
||||
uses: actions/download-artifact@v8
|
||||
with:
|
||||
name: lightpanda-build-release
|
||||
|
||||
- run: chmod a+x ./lightpanda
|
||||
|
||||
# force a wakup of the auth server before requesting it w/ the test itself
|
||||
- run: curl https://${{ vars.WBA_DOMAIN }}
|
||||
|
||||
- name: run wba test
|
||||
shell: bash
|
||||
run: |
|
||||
node webbotauth/validator.js &
|
||||
VALIDATOR_PID=$!
|
||||
sleep 5
|
||||
|
||||
exec 3<<< "${{ secrets.WBA_PRIVATE_KEY_PEM }}"
|
||||
|
||||
./lightpanda fetch --dump http://127.0.0.1:8989/ \
|
||||
--web_bot_auth_key_file /proc/self/fd/3 \
|
||||
--web_bot_auth_keyid ${{ vars.WBA_KEY_ID }} \
|
||||
--web_bot_auth_domain ${{ vars.WBA_DOMAIN }}
|
||||
|
||||
wait $VALIDATOR_PID
|
||||
exec 3>&-
|
||||
|
||||
cdp-and-hyperfine-bench:
|
||||
name: cdp-and-hyperfine-bench
|
||||
needs: zig-build-release
|
||||
@@ -125,7 +220,6 @@ jobs:
|
||||
MAX_VmHWM: 28000 # 28MB (KB)
|
||||
MAX_CG_PEAK: 8000 # 8MB (KB)
|
||||
MAX_AVG_DURATION: 17
|
||||
LIGHTPANDA_DISABLE_TELEMETRY: true
|
||||
|
||||
# How to give cgroups access to the user actions-runner on the host:
|
||||
# $ sudo apt install cgroup-tools
|
||||
@@ -140,7 +234,7 @@ jobs:
|
||||
timeout-minutes: 15
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v6
|
||||
with:
|
||||
repository: 'lightpanda-io/demo'
|
||||
fetch-depth: 0
|
||||
@@ -148,7 +242,7 @@ jobs:
|
||||
- run: npm install
|
||||
|
||||
- name: download artifact
|
||||
uses: actions/download-artifact@v4
|
||||
uses: actions/download-artifact@v8
|
||||
with:
|
||||
name: lightpanda-build-release
|
||||
|
||||
@@ -234,7 +328,7 @@ jobs:
|
||||
echo "${{github.sha}}" > commit.txt
|
||||
|
||||
- name: upload artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v7
|
||||
with:
|
||||
name: bench-results
|
||||
path: |
|
||||
@@ -257,12 +351,12 @@ jobs:
|
||||
container:
|
||||
image: ghcr.io/lightpanda-io/perf-fmt:latest
|
||||
credentials:
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
steps:
|
||||
- name: download artifact
|
||||
uses: actions/download-artifact@v4
|
||||
uses: actions/download-artifact@v8
|
||||
with:
|
||||
name: bench-results
|
||||
|
||||
@@ -280,7 +374,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: download artifact
|
||||
uses: actions/download-artifact@v4
|
||||
uses: actions/download-artifact@v8
|
||||
with:
|
||||
name: lightpanda-build-release
|
||||
|
||||
|
||||
@@ -5,7 +5,9 @@ env:
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.NIGHTLY_BUILD_AWS_SECRET_ACCESS_KEY }}
|
||||
AWS_BUCKET: ${{ vars.NIGHTLY_BUILD_AWS_BUCKET }}
|
||||
AWS_REGION: ${{ vars.NIGHTLY_BUILD_AWS_REGION }}
|
||||
|
||||
RELEASE: ${{ github.ref_type == 'tag' && github.ref_name || 'nightly' }}
|
||||
GIT_VERSION_FLAG: ${{ github.ref_type == 'tag' && format('-Dgit_version={0}', github.ref_name) || '' }}
|
||||
|
||||
on:
|
||||
push:
|
||||
@@ -33,8 +35,6 @@ jobs:
|
||||
- uses: actions/checkout@v6
|
||||
with:
|
||||
fetch-depth: 0
|
||||
# fetch submodules recusively, to get zig-js-runtime submodules also.
|
||||
submodules: recursive
|
||||
|
||||
- uses: ./.github/actions/install
|
||||
with:
|
||||
@@ -45,7 +45,7 @@ jobs:
|
||||
run: zig build -Dprebuilt_v8_path=v8/libc_v8.a -Doptimize=ReleaseFast snapshot_creator -- src/snapshot.bin
|
||||
|
||||
- name: zig build
|
||||
run: zig build -Dsnapshot_path=../../snapshot.bin -Dprebuilt_v8_path=v8/libc_v8.a -Doptimize=ReleaseFast -Dcpu=x86_64 -Dgit_commit=$(git rev-parse --short ${{ github.sha }})
|
||||
run: zig build -Dsnapshot_path=../../snapshot.bin -Dprebuilt_v8_path=v8/libc_v8.a -Doptimize=ReleaseFast -Dcpu=x86_64 -Dgit_commit=$(git rev-parse --short ${{ github.sha }}) ${{ env.GIT_VERSION_FLAG }}
|
||||
|
||||
- name: Rename binary
|
||||
run: mv zig-out/bin/lightpanda lightpanda-${{ env.ARCH }}-${{ env.OS }}
|
||||
@@ -72,11 +72,9 @@ jobs:
|
||||
timeout-minutes: 20
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v6
|
||||
with:
|
||||
fetch-depth: 0
|
||||
# fetch submodules recusively, to get zig-js-runtime submodules also.
|
||||
submodules: recursive
|
||||
|
||||
- uses: ./.github/actions/install
|
||||
with:
|
||||
@@ -87,7 +85,7 @@ jobs:
|
||||
run: zig build -Dprebuilt_v8_path=v8/libc_v8.a -Doptimize=ReleaseFast snapshot_creator -- src/snapshot.bin
|
||||
|
||||
- name: zig build
|
||||
run: zig build -Dsnapshot_path=../../snapshot.bin -Dprebuilt_v8_path=v8/libc_v8.a -Doptimize=ReleaseFast -Dcpu=generic -Dgit_commit=$(git rev-parse --short ${{ github.sha }})
|
||||
run: zig build -Dsnapshot_path=../../snapshot.bin -Dprebuilt_v8_path=v8/libc_v8.a -Doptimize=ReleaseFast -Dcpu=generic -Dgit_commit=$(git rev-parse --short ${{ github.sha }}) ${{ env.GIT_VERSION_FLAG }}
|
||||
|
||||
- name: Rename binary
|
||||
run: mv zig-out/bin/lightpanda lightpanda-${{ env.ARCH }}-${{ env.OS }}
|
||||
@@ -116,11 +114,9 @@ jobs:
|
||||
timeout-minutes: 20
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v6
|
||||
with:
|
||||
fetch-depth: 0
|
||||
# fetch submodules recusively, to get zig-js-runtime submodules also.
|
||||
submodules: recursive
|
||||
|
||||
- uses: ./.github/actions/install
|
||||
with:
|
||||
@@ -131,7 +127,7 @@ jobs:
|
||||
run: zig build -Dprebuilt_v8_path=v8/libc_v8.a -Doptimize=ReleaseFast snapshot_creator -- src/snapshot.bin
|
||||
|
||||
- name: zig build
|
||||
run: zig build -Dsnapshot_path=../../snapshot.bin -Dprebuilt_v8_path=v8/libc_v8.a -Doptimize=ReleaseFast -Dgit_commit=$(git rev-parse --short ${{ github.sha }})
|
||||
run: zig build -Dsnapshot_path=../../snapshot.bin -Dprebuilt_v8_path=v8/libc_v8.a -Doptimize=ReleaseFast -Dgit_commit=$(git rev-parse --short ${{ github.sha }}) ${{ env.GIT_VERSION_FLAG }}
|
||||
|
||||
- name: Rename binary
|
||||
run: mv zig-out/bin/lightpanda lightpanda-${{ env.ARCH }}-${{ env.OS }}
|
||||
@@ -158,11 +154,9 @@ jobs:
|
||||
timeout-minutes: 20
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v6
|
||||
with:
|
||||
fetch-depth: 0
|
||||
# fetch submodules recusively, to get zig-js-runtime submodules also.
|
||||
submodules: recursive
|
||||
|
||||
- uses: ./.github/actions/install
|
||||
with:
|
||||
@@ -173,7 +167,7 @@ jobs:
|
||||
run: zig build -Dprebuilt_v8_path=v8/libc_v8.a -Doptimize=ReleaseFast snapshot_creator -- src/snapshot.bin
|
||||
|
||||
- name: zig build
|
||||
run: zig build -Dsnapshot_path=../../snapshot.bin -Dprebuilt_v8_path=v8/libc_v8.a -Doptimize=ReleaseFast -Dgit_commit=$(git rev-parse --short ${{ github.sha }})
|
||||
run: zig build -Dsnapshot_path=../../snapshot.bin -Dprebuilt_v8_path=v8/libc_v8.a -Doptimize=ReleaseFast -Dgit_commit=$(git rev-parse --short ${{ github.sha }}) ${{ env.GIT_VERSION_FLAG }}
|
||||
|
||||
- name: Rename binary
|
||||
run: mv zig-out/bin/lightpanda lightpanda-${{ env.ARCH }}-${{ env.OS }}
|
||||
42
.github/workflows/wpt.yml
vendored
42
.github/workflows/wpt.yml
vendored
@@ -10,7 +10,7 @@ env:
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "23 2 * * *"
|
||||
- cron: "21 2 * * *"
|
||||
|
||||
# Allows you to run this workflow manually from the Actions tab
|
||||
workflow_dispatch:
|
||||
@@ -19,23 +19,31 @@ jobs:
|
||||
wpt-build-release:
|
||||
name: zig build release
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 15
|
||||
env:
|
||||
ARCH: aarch64
|
||||
OS: linux
|
||||
|
||||
runs-on: ubuntu-24.04-arm
|
||||
timeout-minutes: 20
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
with:
|
||||
fetch-depth: 0
|
||||
# fetch submodules recusively, to get zig-js-runtime submodules also.
|
||||
submodules: recursive
|
||||
|
||||
- uses: ./.github/actions/install
|
||||
with:
|
||||
os: ${{env.OS}}
|
||||
arch: ${{env.ARCH}}
|
||||
|
||||
- name: v8 snapshot
|
||||
run: zig build -Dprebuilt_v8_path=v8/libc_v8.a -Doptimize=ReleaseFast snapshot_creator -- src/snapshot.bin
|
||||
|
||||
- name: zig build release
|
||||
run: zig build -Dprebuilt_v8_path=v8/libc_v8.a -Doptimize=ReleaseFast -Dcpu=x86_64 -Dgit_commit=$(git rev-parse --short ${{ github.sha }})
|
||||
run: zig build -Dsnapshot_path=../../snapshot.bin -Dprebuilt_v8_path=v8/libc_v8.a -Doptimize=ReleaseFast -Dcpu=generic -Dgit_commit=$(git rev-parse --short ${{ github.sha }})
|
||||
|
||||
- name: upload artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v7
|
||||
with:
|
||||
name: lightpanda-build-release
|
||||
path: |
|
||||
@@ -45,7 +53,7 @@ jobs:
|
||||
wpt-build-runner:
|
||||
name: build wpt runner
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-24.04-arm
|
||||
timeout-minutes: 15
|
||||
|
||||
steps:
|
||||
@@ -59,7 +67,7 @@ jobs:
|
||||
CGO_ENABLED=0 go build
|
||||
|
||||
- name: upload artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v7
|
||||
with:
|
||||
name: wptrunner
|
||||
path: |
|
||||
@@ -73,8 +81,8 @@ jobs:
|
||||
- wpt-build-runner
|
||||
|
||||
# use a self host runner.
|
||||
runs-on: lpd-bench-hetzner
|
||||
timeout-minutes: 180
|
||||
runs-on: lpd-wpt-aws
|
||||
timeout-minutes: 600
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
@@ -91,14 +99,14 @@ jobs:
|
||||
run: ./wpt manifest
|
||||
|
||||
- name: download lightpanda release
|
||||
uses: actions/download-artifact@v4
|
||||
uses: actions/download-artifact@v8
|
||||
with:
|
||||
name: lightpanda-build-release
|
||||
|
||||
- run: chmod a+x ./lightpanda
|
||||
|
||||
- name: download wptrunner
|
||||
uses: actions/download-artifact@v4
|
||||
uses: actions/download-artifact@v8
|
||||
with:
|
||||
name: wptrunner
|
||||
|
||||
@@ -107,8 +115,8 @@ jobs:
|
||||
- name: run test with json output
|
||||
run: |
|
||||
./wpt serve 2> /dev/null & echo $! > WPT.pid
|
||||
sleep 10s
|
||||
./wptrunner -lpd-path ./lightpanda -json -concurrency 10 -pool 3 > wpt.json
|
||||
sleep 20s
|
||||
./wptrunner -lpd-path ./lightpanda -json -concurrency 5 -pool 5 --mem-limit 400 > wpt.json
|
||||
kill `cat WPT.pid`
|
||||
|
||||
- name: write commit
|
||||
@@ -116,7 +124,7 @@ jobs:
|
||||
echo "${{github.sha}}" > commit.txt
|
||||
|
||||
- name: upload artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v7
|
||||
with:
|
||||
name: wpt-results
|
||||
path: |
|
||||
@@ -139,7 +147,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: download artifact
|
||||
uses: actions/download-artifact@v4
|
||||
uses: actions/download-artifact@v8
|
||||
with:
|
||||
name: wpt-results
|
||||
|
||||
|
||||
60
.github/workflows/zig-fmt.yml
vendored
60
.github/workflows/zig-fmt.yml
vendored
@@ -1,60 +0,0 @@
|
||||
name: zig-fmt
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
|
||||
# By default GH trigger on types opened, synchronize and reopened.
|
||||
# see https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#pull_request
|
||||
# Since we skip the job when the PR is in draft state, we want to force CI
|
||||
# running when the PR is marked ready_for_review w/o other change.
|
||||
# see https://github.com/orgs/community/discussions/25722#discussioncomment-3248917
|
||||
types: [opened, synchronize, reopened, ready_for_review]
|
||||
|
||||
paths:
|
||||
- ".github/**"
|
||||
- "build.zig"
|
||||
- "src/**/*.zig"
|
||||
- "src/*.zig"
|
||||
# Allows you to run this workflow manually from the Actions tab
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
zig-fmt:
|
||||
name: zig fmt
|
||||
|
||||
# Don't run the CI with draft PR.
|
||||
if: github.event.pull_request.draft == false
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 15
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
# Zig version used from the `minimum_zig_version` field in build.zig.zon
|
||||
- uses: mlugg/setup-zig@v2
|
||||
|
||||
- name: Run zig fmt
|
||||
id: fmt
|
||||
run: |
|
||||
zig fmt --check ./*.zig ./**/*.zig 2> zig-fmt.err > zig-fmt.err2 || echo "Failed"
|
||||
delimiter="$(openssl rand -hex 8)"
|
||||
echo "zig_fmt_errs<<${delimiter}" >> "${GITHUB_OUTPUT}"
|
||||
|
||||
if [ -s zig-fmt.err ]; then
|
||||
echo "// The following errors occurred:" >> "${GITHUB_OUTPUT}"
|
||||
cat zig-fmt.err >> "${GITHUB_OUTPUT}"
|
||||
fi
|
||||
|
||||
if [ -s zig-fmt.err2 ]; then
|
||||
echo "// The following files were not formatted:" >> "${GITHUB_OUTPUT}"
|
||||
cat zig-fmt.err2 >> "${GITHUB_OUTPUT}"
|
||||
fi
|
||||
|
||||
echo "${delimiter}" >> "${GITHUB_OUTPUT}"
|
||||
|
||||
- name: Fail the job
|
||||
if: steps.fmt.outputs.zig_fmt_errs != ''
|
||||
run: exit 1
|
||||
98
.github/workflows/zig-test.yml
vendored
98
.github/workflows/zig-test.yml
vendored
@@ -5,19 +5,18 @@ env:
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.LPD_PERF_AWS_SECRET_ACCESS_KEY }}
|
||||
AWS_BUCKET: ${{ vars.LPD_PERF_AWS_BUCKET }}
|
||||
AWS_REGION: ${{ vars.LPD_PERF_AWS_REGION }}
|
||||
LIGHTPANDA_DISABLE_TELEMETRY: true
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
branches: [main]
|
||||
paths:
|
||||
- "build.zig"
|
||||
- "src/**"
|
||||
- "vendor/zig-js-runtime"
|
||||
- ".github/**"
|
||||
- "vendor/**"
|
||||
pull_request:
|
||||
- "src/**"
|
||||
- "build.zig"
|
||||
- "build.zig.zon"
|
||||
|
||||
pull_request:
|
||||
# By default GH trigger on types opened, synchronize and reopened.
|
||||
# see https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#pull_request
|
||||
# Since we skip the job when the PR is in draft state, we want to force CI
|
||||
@@ -27,28 +26,63 @@ on:
|
||||
|
||||
paths:
|
||||
- ".github/**"
|
||||
- "src/**"
|
||||
- "build.zig"
|
||||
- "src/**/*.zig"
|
||||
- "src/*.zig"
|
||||
- "vendor/**"
|
||||
- ".github/**"
|
||||
- "vendor/**"
|
||||
- "build.zig.zon"
|
||||
|
||||
# Allows you to run this workflow manually from the Actions tab
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
zig-test-debug:
|
||||
name: zig test using v8 in debug mode
|
||||
timeout-minutes: 15
|
||||
zig-fmt:
|
||||
name: zig fmt
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 15
|
||||
|
||||
if: github.event.pull_request.draft == false
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v6
|
||||
|
||||
# Zig version used from the `minimum_zig_version` field in build.zig.zon
|
||||
- uses: mlugg/setup-zig@v2
|
||||
|
||||
- name: Run zig fmt
|
||||
id: fmt
|
||||
run: |
|
||||
zig fmt --check ./*.zig ./**/*.zig 2> zig-fmt.err > zig-fmt.err2 || echo "Failed"
|
||||
delimiter="$(openssl rand -hex 8)"
|
||||
echo "zig_fmt_errs<<${delimiter}" >> "${GITHUB_OUTPUT}"
|
||||
|
||||
if [ -s zig-fmt.err ]; then
|
||||
echo "// The following errors occurred:" >> "${GITHUB_OUTPUT}"
|
||||
cat zig-fmt.err >> "${GITHUB_OUTPUT}"
|
||||
fi
|
||||
|
||||
if [ -s zig-fmt.err2 ]; then
|
||||
echo "// The following files were not formatted:" >> "${GITHUB_OUTPUT}"
|
||||
cat zig-fmt.err2 >> "${GITHUB_OUTPUT}"
|
||||
fi
|
||||
|
||||
echo "${delimiter}" >> "${GITHUB_OUTPUT}"
|
||||
|
||||
- name: Fail the job
|
||||
if: steps.fmt.outputs.zig_fmt_errs != ''
|
||||
run: exit 1
|
||||
|
||||
zig-test-debug:
|
||||
name: zig test using v8 in debug mode
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 15
|
||||
|
||||
if: github.event.pull_request.draft == false
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
with:
|
||||
fetch-depth: 0
|
||||
# fetch submodules recusively, to get zig-js-runtime submodules also.
|
||||
submodules: recursive
|
||||
|
||||
- uses: ./.github/actions/install
|
||||
with:
|
||||
@@ -57,21 +91,18 @@ jobs:
|
||||
- name: zig build test
|
||||
run: zig build -Dprebuilt_v8_path=v8/libc_v8_debug.a -Dtsan=true test
|
||||
|
||||
zig-test:
|
||||
zig-test-release:
|
||||
name: zig test
|
||||
timeout-minutes: 15
|
||||
|
||||
# Don't run the CI with draft PR.
|
||||
if: github.event.pull_request.draft == false
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 15
|
||||
|
||||
if: github.event.pull_request.draft == false
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v6
|
||||
with:
|
||||
fetch-depth: 0
|
||||
# fetch submodules recusively, to get zig-js-runtime submodules also.
|
||||
submodules: recursive
|
||||
|
||||
- uses: ./.github/actions/install
|
||||
|
||||
@@ -83,7 +114,7 @@ jobs:
|
||||
echo "${{github.sha}}" > commit.txt
|
||||
|
||||
- name: upload artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v7
|
||||
with:
|
||||
name: bench-results
|
||||
path: |
|
||||
@@ -93,23 +124,22 @@ jobs:
|
||||
|
||||
bench-fmt:
|
||||
name: perf-fmt
|
||||
needs: zig-test
|
||||
|
||||
# Don't execute on PR
|
||||
if: github.event_name != 'pull_request'
|
||||
needs: zig-test-release
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 15
|
||||
|
||||
if: github.event_name != 'pull_request'
|
||||
|
||||
container:
|
||||
image: ghcr.io/lightpanda-io/perf-fmt:latest
|
||||
credentials:
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
steps:
|
||||
- name: download artifact
|
||||
uses: actions/download-artifact@v4
|
||||
uses: actions/download-artifact@v8
|
||||
with:
|
||||
name: bench-results
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@ FROM debian:stable-slim
|
||||
ARG MINISIG=0.12
|
||||
ARG ZIG_MINISIG=RWSGOq2NVecA2UPNdBUZykf1CCb147pkmdtYxgb3Ti+JO/wCYvhbAb/U
|
||||
ARG V8=14.0.365.4
|
||||
ARG ZIG_V8=v0.3.2
|
||||
ARG ZIG_V8=v0.3.4
|
||||
ARG TARGETPLATFORM
|
||||
|
||||
RUN apt-get update -yq && \
|
||||
|
||||
8
Makefile
8
Makefile
@@ -47,7 +47,7 @@ help:
|
||||
|
||||
# $(ZIG) commands
|
||||
# ------------
|
||||
.PHONY: build build-v8-snapshot build-dev run run-release shell test bench data end2end
|
||||
.PHONY: build build-v8-snapshot build-dev run run-release test bench data end2end
|
||||
|
||||
## Build v8 snapshot
|
||||
build-v8-snapshot:
|
||||
@@ -77,11 +77,6 @@ run-debug: build-dev
|
||||
@printf "\033[36mRunning...\033[0m\n"
|
||||
@./zig-out/bin/lightpanda || (printf "\033[33mRun ERROR\033[0m\n"; exit 1;)
|
||||
|
||||
## Run a JS shell in debug mode
|
||||
shell:
|
||||
@printf "\033[36mBuilding shell...\033[0m\n"
|
||||
@$(ZIG) build shell || (printf "\033[33mBuild ERROR\033[0m\n"; exit 1;)
|
||||
|
||||
## Test - `grep` is used to filter out the huge compile command on build
|
||||
ifeq ($(OS), macos)
|
||||
test:
|
||||
@@ -106,4 +101,3 @@ install: build
|
||||
|
||||
data:
|
||||
cd src/data && go run public_suffix_list_gen.go > public_suffix_list.zig
|
||||
|
||||
|
||||
377
README.md
377
README.md
@@ -1,30 +1,22 @@
|
||||
<p align="center">
|
||||
<a href="https://lightpanda.io"><img src="https://cdn.lightpanda.io/assets/images/logo/lpd-logo.png" alt="Logo" height=170></a>
|
||||
</p>
|
||||
|
||||
<h1 align="center">Lightpanda Browser</h1>
|
||||
<p align="center">
|
||||
<strong>The headless browser built from scratch for AI agents and automation.</strong><br>
|
||||
Not a Chromium fork. Not a WebKit patch. A new browser, written in Zig.
|
||||
</p>
|
||||
|
||||
<p align="center"><a href="https://lightpanda.io/">lightpanda.io</a></p>
|
||||
|
||||
</div>
|
||||
<div align="center">
|
||||
|
||||
[](https://github.com/lightpanda-io/browser/blob/main/LICENSE)
|
||||
[](https://twitter.com/lightpanda_io)
|
||||
[](https://github.com/lightpanda-io/browser)
|
||||
[](https://discord.gg/K63XeymfB5)
|
||||
|
||||
</div>
|
||||
|
||||
Lightpanda is the open-source browser made for headless usage:
|
||||
|
||||
- Javascript execution
|
||||
- Support of Web APIs (partial, WIP)
|
||||
- Compatible with Playwright[^1], Puppeteer, chromedp through [CDP](https://chromedevtools.github.io/devtools-protocol/)
|
||||
|
||||
Fast web automation for AI agents, LLM training, scraping and testing:
|
||||
|
||||
- Ultra-low memory footprint (9x less than Chrome)
|
||||
- Exceptionally fast execution (11x faster than Chrome)
|
||||
- Instant startup
|
||||
<div align="center">
|
||||
|
||||
[<img width="350px" src="https://cdn.lightpanda.io/assets/images/github/execution-time.svg">
|
||||
](https://github.com/lightpanda-io/demo)
|
||||
@@ -33,11 +25,37 @@ Fast web automation for AI agents, LLM training, scraping and testing:
|
||||
](https://github.com/lightpanda-io/demo)
|
||||
</div>
|
||||
|
||||
_Puppeteer requesting 100 pages from a local website on a AWS EC2 m5.large instance.
|
||||
See [benchmark details](https://github.com/lightpanda-io/demo)._
|
||||
## Table of Contents
|
||||
|
||||
[^1]: **Playwright support disclaimer:**
|
||||
Due to the nature of Playwright, a script that works with the current version of the browser may not function correctly with a future version. Playwright uses an intermediate JavaScript layer that selects an execution strategy based on the browser's available features. If Lightpanda adds a new [Web API](https://developer.mozilla.org/en-US/docs/Web/API), Playwright may choose to execute different code for the same script. This new code path could attempt to use features that are not yet implemented. Lightpanda makes an effort to add compatibility tests, but we can't cover all scenarios. If you encounter an issue, please create a [GitHub issue](https://github.com/lightpanda-io/browser/issues) and include the last known working version of the script.
|
||||
- [Benchmarks](#benchmarks)
|
||||
- [Quick Start](#quick-start)
|
||||
- [Install](#install)
|
||||
- [Dump a URL](#dump-a-url)
|
||||
- [Start a CDP Server](#start-a-cdp-server)
|
||||
- [Telemetry](#telemetry)
|
||||
- [Lightpanda vs Headless Chrome](#lightpanda-vs-headless-chrome)
|
||||
- [What Lightpanda supports today](#what-lightpanda-supports-today)
|
||||
- [Use Cases](#use-cases)
|
||||
- [Architecture](#architecture)
|
||||
- [Why Lightpanda?](#why-lightpanda)
|
||||
- [Build from Source](#build-from-source)
|
||||
- [Test](#test)
|
||||
- [Contributing](#contributing)
|
||||
- [Compatibility Note](#compatibility-note)
|
||||
- [FAQ](#faq)
|
||||
|
||||
---
|
||||
|
||||
## Benchmarks
|
||||
|
||||
_Puppeteer requesting 100 pages from a local website on an AWS EC2 m5.large instance. See [benchmark details](https://github.com/lightpanda-io/demo)._
|
||||
|
||||
| Metric | Lightpanda | Headless Chrome | Difference |
|
||||
| :---- | :---- | :---- | :---- |
|
||||
| Memory (peak, 100 pages) | 24 MB | 207 MB | ~9x less |
|
||||
| Execution time (100 pages) | 2.3s | 25.2s | ~11x faster |
|
||||
|
||||
---
|
||||
|
||||
## Quick start
|
||||
|
||||
@@ -80,6 +98,10 @@ docker run -d --name lightpanda -p 9222:9222 lightpanda/browser:nightly
|
||||
```console
|
||||
./lightpanda fetch --obey_robots --log_format pretty --log_level info https://demo-browser.lightpanda.io/campfire-commerce/
|
||||
```
|
||||
|
||||
<details>
|
||||
<summary>Example output</summary>
|
||||
|
||||
```console
|
||||
INFO telemetry : telemetry status . . . . . . . . . . . . . [+0ms]
|
||||
disabled = false
|
||||
@@ -110,11 +132,17 @@ INFO http : request complete . . . . . . . . . . . . . . . . [+141ms]
|
||||
<!DOCTYPE html>
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
### Start a CDP server
|
||||
|
||||
```console
|
||||
./lightpanda serve --obey_robots --log_format pretty --log_level info --host 127.0.0.1 --port 9222
|
||||
```
|
||||
|
||||
<details>
|
||||
<summary>Example output</summary>
|
||||
|
||||
```console
|
||||
INFO telemetry : telemetry status . . . . . . . . . . . . . [+0ms]
|
||||
disabled = false
|
||||
@@ -123,9 +151,14 @@ INFO app : server running . . . . . . . . . . . . . . . . . [+0ms]
|
||||
address = 127.0.0.1:9222
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
Once the CDP server started, you can run a Puppeteer script by configuring the
|
||||
`browserWSEndpoint`.
|
||||
|
||||
<details>
|
||||
<summary>Example Puppeteer script</summary>
|
||||
|
||||
```js
|
||||
'use strict'
|
||||
|
||||
@@ -156,52 +189,114 @@ await context.close();
|
||||
await browser.disconnect();
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
### Telemetry
|
||||
By default, Lightpanda collects and sends usage telemetry. This can be disabled by setting an environment variable `LIGHTPANDA_DISABLE_TELEMETRY=true`. You can read Lightpanda's privacy policy at: [https://lightpanda.io/privacy-policy](https://lightpanda.io/privacy-policy).
|
||||
|
||||
## Status
|
||||
## Lightpanda vs Headless Chrome
|
||||
|
||||
Lightpanda is in Beta and currently a work in progress. Stability and coverage are improving and many websites now work.
|
||||
You may still encounter errors or crashes. Please open an issue with specifics if so.
|
||||
Lightpanda is not a general-purpose browser. It is built specifically for headless workloads.
|
||||
|
||||
Here are the key features we have implemented:
|
||||
**Use Lightpanda when you need:**
|
||||
|
||||
- [x] HTTP loader ([Libcurl](https://curl.se/libcurl/))
|
||||
- [x] HTML parser ([html5ever](https://github.com/servo/html5ever))
|
||||
- [x] DOM tree
|
||||
- [x] Javascript support ([v8](https://v8.dev/))
|
||||
- [x] DOM APIs
|
||||
- [x] Ajax
|
||||
- [x] XHR API
|
||||
- [x] Fetch API
|
||||
- [x] DOM dump
|
||||
- [x] CDP/websockets server
|
||||
- [x] Click
|
||||
- [x] Input form
|
||||
- [x] Cookies
|
||||
- [x] Custom HTTP headers
|
||||
- [x] Proxy support
|
||||
- [x] Network interception
|
||||
- [x] Respect `robots.txt` with option `--obey_robots`
|
||||
- Low-memory scraping or data extraction at scale
|
||||
- AI agent browsing (via MCP or CDP)
|
||||
- Fast CI test runs against a headless browser
|
||||
- Markdown/text extraction from JS-rendered pages
|
||||
- Minimal footprint: single binary, no Chromium install
|
||||
|
||||
NOTE: There are hundreds of Web APIs. Developing a browser (even just for headless mode) is a huge task. Coverage will increase over time.
|
||||
**Use Headless Chrome when you need:**
|
||||
|
||||
You can also follow the progress of our Javascript support in our dedicated [zig-js-runtime](https://github.com/lightpanda-io/zig-js-runtime#development) project.
|
||||
- Full visual rendering, screenshots, or PDFs
|
||||
- WebGL or advanced CSS layout
|
||||
- Complete Web API coverage (Canvas, WebRTC, etc.)
|
||||
- Pixel-perfect visual testing
|
||||
|
||||
## Build from sources
|
||||
### What Lightpanda supports today
|
||||
|
||||
- HTTP loader ([Libcurl](https://curl.se/libcurl/))
|
||||
- HTML parser ([html5ever](https://github.com/servo/html5ever))
|
||||
- DOM tree + DOM APIs
|
||||
- Javascript ([v8](https://v8.dev/))
|
||||
- Ajax (XHR + Fetch)
|
||||
- CDP/WebSocket server
|
||||
- Click, input/form, cookies
|
||||
- Custom HTTP headers
|
||||
- Proxy support
|
||||
- Network interception
|
||||
- robots.txt compliance (`--obey_robots`)
|
||||
|
||||
**Note:** There are hundreds of Web APIs. Coverage increases with each release. If you hit a gap, [open an issue](https://github.com/lightpanda-io/browser/issues).
|
||||
|
||||
## Use Cases
|
||||
|
||||
### AI Agents and LLM Tools
|
||||
|
||||
Give your AI agent a real browser that is fast and cheap to run. Lightpanda Cloud exposes an MCP endpoint at `cloud.lightpanda.io/mcp/sse` with tools for search, goto, markdown, and links. Works with Claude, Cursor, Windsurf, or any CDP-based agent framework.
|
||||
|
||||
- [agent-skill repo](https://github.com/lightpanda-io/agent-skill)
|
||||
|
||||
### Web Scraping and Data Extraction
|
||||
|
||||
Lightpanda uses 9x less memory than Chrome. It works with Crawlee, Puppeteer, and Playwright.
|
||||
|
||||
```console
|
||||
lightpanda fetch --dump markdown --obey_robots https://example.com
|
||||
```
|
||||
|
||||
### Automated Testing
|
||||
|
||||
Drop-in replacement for headless Chrome in CI pipelines. If your tests use Puppeteer or Playwright, change the connection URL to `ws://127.0.0.1:9222` and run them.
|
||||
|
||||
### LLM Training Data Collection
|
||||
|
||||
Use `--dump markdown` to extract clean text from JS-rendered pages at volume.
|
||||
|
||||
---
|
||||
|
||||
## Architecture
|
||||
|
||||

|
||||
|
||||
The client connects over CDP via WebSocket. The server parses HTML into a DOM tree, applies CSS, and executes JavaScript through V8. Page content is returned to the client as HTML, markdown, or structured data depending on the request.
|
||||
|
||||
---
|
||||
|
||||
## Why Lightpanda?
|
||||
|
||||
### Javascript execution is mandatory for the modern web
|
||||
|
||||
Simple HTTP requests used to be enough for scraping. That's no longer the case. Javascript now drives most of the web:
|
||||
|
||||
- Ajax, Single Page Apps, infinite loading, instant search
|
||||
- JS frameworks: React, Vue, Angular, and others
|
||||
|
||||
### Chrome is not the right tool
|
||||
|
||||
Running a full desktop browser on a server works, but it does not scale well. Chrome at hundreds or thousands of instances is expensive:
|
||||
|
||||
- Heavy on RAM and CPU
|
||||
- Hard to package, deploy, and maintain at scale
|
||||
- Many features are irrelevant in headless usage
|
||||
|
||||
### Lightpanda is built for performance
|
||||
|
||||
Supporting Javascript with real performance meant building from scratch rather than forking Chromium:
|
||||
|
||||
- Not based on Chromium, Blink, or WebKit
|
||||
- Written in Zig, a low-level language with explicit memory control
|
||||
- No graphical rendering engine
|
||||
|
||||
---
|
||||
|
||||
## Build from Source
|
||||
|
||||
### Prerequisites
|
||||
|
||||
Lightpanda is written with [Zig](https://ziglang.org/) `0.15.2`. You have to
|
||||
install it with the right version in order to build the project.
|
||||
Lightpanda is written with [Zig](https://ziglang.org/) `0.15.2` and depends on: [v8](https://chromium.googlesource.com/v8/v8.git), [Libcurl](https://curl.se/libcurl/), [html5ever](https://github.com/servo/html5ever).
|
||||
|
||||
Lightpanda also depends on
|
||||
[zig-js-runtime](https://github.com/lightpanda-io/zig-js-runtime/) (with v8),
|
||||
[Libcurl](https://curl.se/libcurl/) and [html5ever](https://github.com/servo/html5ever).
|
||||
|
||||
To be able to build the v8 engine for zig-js-runtime, you have to install some libs:
|
||||
|
||||
For **Debian/Ubuntu based Linux**:
|
||||
**Debian/Ubuntu:**
|
||||
|
||||
```
|
||||
sudo apt install xz-utils ca-certificates \
|
||||
@@ -210,55 +305,60 @@ sudo apt install xz-utils ca-certificates \
|
||||
```
|
||||
You also need to [install Rust](https://rust-lang.org/tools/install/).
|
||||
|
||||
For systems with [**Nix**](https://nixos.org/download/), you can use the devShell:
|
||||
**Nix:**
|
||||
|
||||
```
|
||||
nix develop
|
||||
```
|
||||
|
||||
For **MacOS**, you need cmake and [Rust](https://rust-lang.org/tools/install/).
|
||||
**macOS:**
|
||||
|
||||
```
|
||||
brew install cmake
|
||||
```
|
||||
|
||||
You also need [Rust](https://rust-lang.org/tools/install/).
|
||||
|
||||
### Build and run
|
||||
|
||||
You an build the entire browser with `make build` or `make build-dev` for debug
|
||||
env.
|
||||
Build the browser:
|
||||
|
||||
But you can directly use the zig command: `zig build run`.
|
||||
```
|
||||
make build # release
|
||||
make build-dev # debug
|
||||
```
|
||||
|
||||
Or directly: `zig build run`.
|
||||
|
||||
#### Embed v8 snapshot
|
||||
|
||||
Lighpanda uses v8 snapshot. By default, it is created on startup but you can
|
||||
embed it by using the following commands:
|
||||
Generate the snapshot:
|
||||
|
||||
Generate the snapshot.
|
||||
```
|
||||
zig build snapshot_creator -- src/snapshot.bin
|
||||
```
|
||||
|
||||
Build using the snapshot binary.
|
||||
Build using the snapshot:
|
||||
|
||||
```
|
||||
zig build -Dsnapshot_path=../../snapshot.bin
|
||||
```
|
||||
|
||||
See [#1279](https://github.com/lightpanda-io/browser/pull/1279) for more details.
|
||||
See [#1279](https://github.com/lightpanda-io/browser/pull/1279) for details.
|
||||
|
||||
---
|
||||
|
||||
## Test
|
||||
|
||||
### Unit Tests
|
||||
|
||||
You can test Lightpanda by running `make test`.
|
||||
```
|
||||
make test
|
||||
```
|
||||
|
||||
### End to end tests
|
||||
### End to End Tests
|
||||
|
||||
To run end to end tests, you need to clone the [demo
|
||||
repository](https://github.com/lightpanda-io/demo) into `../demo` dir.
|
||||
|
||||
You have to install the [demo's node
|
||||
requirements](https://github.com/lightpanda-io/demo?tab=readme-ov-file#dependencies-1)
|
||||
|
||||
You also need to install [Go](https://go.dev) > v1.24.
|
||||
Clone the [demo repository](https://github.com/lightpanda-io/demo) into `../demo`. Install the [demo's node requirements](https://github.com/lightpanda-io/demo?tab=readme-ov-file#dependencies-1) and [Go](https://go.dev) >= v1.24.
|
||||
|
||||
```
|
||||
make end2end
|
||||
@@ -266,107 +366,124 @@ make end2end
|
||||
|
||||
### Web Platform Tests
|
||||
|
||||
Lightpanda is tested against the standardized [Web Platform
|
||||
Tests](https://web-platform-tests.org/).
|
||||
Lightpanda is tested against the standardized [Web Platform Tests](https://web-platform-tests.org/) using [a fork](https://github.com/lightpanda-io/wpt/tree/fork) with a custom [`testharnessreport.js`](https://github.com/lightpanda-io/wpt/commit/01a3115c076a3ad0c84849dbbf77a6e3d199c56f).
|
||||
|
||||
We use [a fork](https://github.com/lightpanda-io/wpt/tree/fork) including a custom
|
||||
[`testharnessreport.js`](https://github.com/lightpanda-io/wpt/commit/01a3115c076a3ad0c84849dbbf77a6e3d199c56f).
|
||||
You can also run individual WPT test cases in your browser via [wpt.live](https://wpt.live).
|
||||
|
||||
For reference, you can easily execute a WPT test case with your browser via
|
||||
[wpt.live](https://wpt.live).
|
||||
**Setup WPT HTTP server:**
|
||||
|
||||
#### Configure WPT HTTP server
|
||||
|
||||
To run the test, you must clone the repository, configure the custom hosts and generate the
|
||||
`MANIFEST.json` file.
|
||||
|
||||
Clone the repository with the `fork` branch.
|
||||
```
|
||||
git clone -b fork --depth=1 git@github.com:lightpanda-io/wpt.git
|
||||
```
|
||||
|
||||
Enter into the `wpt/` dir.
|
||||
|
||||
Install custom domains in your `/etc/hosts`
|
||||
```
|
||||
cd wpt
|
||||
./wpt make-hosts-file | sudo tee -a /etc/hosts
|
||||
```
|
||||
|
||||
Generate `MANIFEST.json`
|
||||
```
|
||||
./wpt manifest
|
||||
```
|
||||
Use the [WPT's setup
|
||||
guide](https://web-platform-tests.org/running-tests/from-local-system.html) for
|
||||
details.
|
||||
|
||||
#### Run WPT test suite
|
||||
See the [WPT setup guide](https://web-platform-tests.org/running-tests/from-local-system.html) for details.
|
||||
|
||||
An external [Go](https://go.dev) runner is provided by
|
||||
[github.com/lightpanda-io/demo/](https://github.com/lightpanda-io/demo/)
|
||||
repository, located into `wptrunner/` dir.
|
||||
You need to clone the project first.
|
||||
**Run WPT tests:**
|
||||
|
||||
Start the WPT HTTP server:
|
||||
|
||||
First start the WPT's HTTP server from your `wpt/` clone dir.
|
||||
```
|
||||
./wpt serve
|
||||
```
|
||||
|
||||
Run a Lightpanda browser
|
||||
Run Lightpanda:
|
||||
|
||||
```
|
||||
zig build run -- --insecure_disable_tls_host_verification
|
||||
```
|
||||
|
||||
Then you can start the wptrunner from the Demo's clone dir:
|
||||
Run the test suite (from [demo](https://github.com/lightpanda-io/demo/) clone):
|
||||
|
||||
```
|
||||
cd wptrunner && go run .
|
||||
```
|
||||
|
||||
Or one specific test:
|
||||
Run a specific test:
|
||||
|
||||
```
|
||||
cd wptrunner && go run . Node-childNodes.html
|
||||
```
|
||||
|
||||
`wptrunner` command accepts `--summary` and `--json` options modifying output.
|
||||
Also `--concurrency` define the concurrency limit.
|
||||
Options: `--summary`, `--json`, `--concurrency`.
|
||||
|
||||
:warning: Running the whole test suite will take a long time. In this case,
|
||||
it's useful to build in `releaseFast` mode to make tests faster.
|
||||
**Note:** The full suite takes a long time. Build with `zig build -Doptimize=ReleaseFast run` for faster test execution.
|
||||
|
||||
```
|
||||
zig build -Doptimize=ReleaseFast run
|
||||
```
|
||||
---
|
||||
|
||||
## Contributing
|
||||
|
||||
Lightpanda accepts pull requests through GitHub.
|
||||
See [CONTRIBUTING.md](https://github.com/lightpanda-io/browser/blob/main/CONTRIBUTING.md) for guidelines.
|
||||
|
||||
You have to sign our [CLA](CLA.md) during the pull request process otherwise
|
||||
we're not able to accept your contributions.
|
||||
You must sign our [CLA](CLA.md) during the pull request process.
|
||||
|
||||
## Why?
|
||||
- [Good first issues](https://github.com/lightpanda-io/browser/labels/good%20first%20issue)
|
||||
- [Discord](https://discord.gg/K63XeymfB5)
|
||||
|
||||
### Javascript execution is mandatory for the modern web
|
||||
---
|
||||
|
||||
In the good old days, scraping a webpage was as easy as making an HTTP request, cURL-like. It’s not possible anymore, because Javascript is everywhere, like it or not:
|
||||
## Compatibility Note
|
||||
|
||||
- Ajax, Single Page App, infinite loading, “click to display”, instant search, etc.
|
||||
- JS web frameworks: React, Vue, Angular & others
|
||||
**Playwright compatibility note:** A Playwright script that works today may break after a Lightpanda update. Playwright selects its execution strategy based on which browser APIs are available. When Lightpanda adds a new [Web API](https://developer.mozilla.org/en-US/docs/Web/API), Playwright may switch to a code path that uses features not yet implemented. We test for compatibility, but cannot cover every scenario. If you hit a regression, [open a GitHub issue](https://github.com/lightpanda-io/browser/issues) and include the last version of the script that worked.
|
||||
|
||||
### Chrome is not the right tool
|
||||
---
|
||||
|
||||
If we need Javascript, why not use a real web browser? Take a huge desktop application, hack it, and run it on the server. Hundreds or thousands of instances of Chrome if you use it at scale. Are you sure it’s such a good idea?
|
||||
## FAQ
|
||||
|
||||
- Heavy on RAM and CPU, expensive to run
|
||||
- Hard to package, deploy and maintain at scale
|
||||
- Bloated, lots of features are not useful in headless usage
|
||||
<details>
|
||||
<summary><strong>Q: What is Lightpanda?</strong></summary>
|
||||
|
||||
### Lightpanda is built for performance
|
||||
Lightpanda is an open-source headless browser written in Zig. It targets AI agents, web scraping, and automated testing. It uses 9x less memory and runs 11x faster than headless Chrome.
|
||||
|
||||
If we want both Javascript and performance in a true headless browser, we need to start from scratch. Not another iteration of Chromium, really from a blank page. Crazy right? But that’s what we did:
|
||||
</details>
|
||||
|
||||
- Not based on Chromium, Blink or WebKit
|
||||
- Low-level system programming language (Zig) with optimisations in mind
|
||||
- Opinionated: without graphical rendering
|
||||
<details>
|
||||
<summary><strong>Q: How does Lightpanda compare to Headless Chrome?</strong></summary>
|
||||
|
||||
About 24 MB peak memory vs 207 MB for Chrome when loading 100 pages via Puppeteer. Task completion: 2.3s vs 25.2s. It supports the same CDP protocol, so most Puppeteer and Playwright scripts work without code changes. See the [Lightpanda vs Headless Chrome](#lightpanda-vs-headless-chrome) section for what Lightpanda can and cannot do.
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary><strong>Q: Is Lightpanda a Chromium fork?</strong></summary>
|
||||
|
||||
No. It is written in Zig and implements web standards independently (W3C DOM, CSS, JavaScript via V8).
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary><strong>Q: Does Lightpanda work with Playwright?</strong></summary>
|
||||
|
||||
Yes. Connect with `chromium.connectOverCDP("ws://127.0.0.1:9222")` locally, or use a cloud endpoint for managed infrastructure. See the [compatibility note](#compatibility-note) for caveats.
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary><strong>Q: Is there a cloud/hosted version?</strong></summary>
|
||||
|
||||
Yes. [console.lightpanda.io](https://console.lightpanda.io) provides managed browser infrastructure with regional endpoints (EU West, US West), MCP integration, and both Lightpanda and Chromium browser options.
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary><strong>Q: Why is Lightpanda written in Zig?</strong></summary>
|
||||
|
||||
Zig provides precise memory control and deterministic performance without a garbage collector. It compiles to a single static binary with no runtime dependencies. Learn more: [Why We Built Lightpanda in Zig](https://lightpanda.io/blog/posts/why-we-built-lightpanda-in-zig).
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary><strong>Q: What operating systems does Lightpanda support?</strong></summary>
|
||||
|
||||
Linux (Debian 12, Ubuntu 22.04/24.04), macOS 13+, and Windows 10+ via WSL2.
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary><strong>Q: Does Lightpanda respect robots.txt?</strong></summary>
|
||||
|
||||
Yes, when the `--obey_robots` flag is enabled.
|
||||
|
||||
</details>
|
||||
|
||||
29
SUMMARY.md
Normal file
29
SUMMARY.md
Normal file
@@ -0,0 +1,29 @@
|
||||
# Lightpanda Browser: Document Summary
|
||||
|
||||
**What it is:** A headless browser built in Zig from scratch. Not a Chromium fork. Targets AI agents, scraping, and automated testing.
|
||||
|
||||
**Performance:** 9x less memory (24 MB vs 207 MB) and 11x faster (2.3s vs 25.2s) than headless Chrome, measured over 100 pages via Puppeteer.
|
||||
|
||||
---
|
||||
|
||||
## Section Summaries
|
||||
|
||||
**Quick Start:** Install via nightly binary (Linux/macOS/Windows WSL2) or Docker. Run `fetch` to dump a URL or `serve` to start a CDP server. Connect Puppeteer/Playwright via `ws://127.0.0.1:9222`.
|
||||
|
||||
**Lightpanda vs Headless Chrome:** Choose Lightpanda for low-memory scraping, AI agent browsing, CI testing, and markdown extraction. Use Chrome for screenshots, PDFs, WebGL, or full Web API coverage. Supported: HTTP, HTML5, DOM, JS (V8), Ajax, CDP, cookies, proxy, network interception, robots.txt.
|
||||
|
||||
**Use Cases:** AI agents via MCP or CDP, web scraping at scale, headless Chrome replacement in CI, LLM training data extraction with `--dump markdown`.
|
||||
|
||||
**Architecture:** CDP/WebSocket client → HTML parsed to DOM → CSS applied → JS via V8 → response as HTML, markdown, or structured data.
|
||||
|
||||
**Why Lightpanda?:** Modern web requires JS execution; Chrome is too heavy to run at scale; Lightpanda is built in Zig with no graphical renderer for minimal footprint.
|
||||
|
||||
**Build from Source:** Requires Zig 0.15.2, v8, Libcurl, html5ever, and Rust. `make build` or `zig build run`. Optional v8 snapshot for faster startup.
|
||||
|
||||
**Test:** `make test` for unit tests; `make end2end` for end-to-end; WPT suite runs via a Go runner in the demo repo.
|
||||
|
||||
**Contributing:** PRs via GitHub; CLA required. [Good first issues](https://github.com/lightpanda-io/browser/labels/good%20first%20issue) labeled.
|
||||
|
||||
**Compatibility Note:** Playwright scripts may break after Lightpanda updates when new Web APIs shift Playwright's execution path. File an issue with the last working version.
|
||||
|
||||
**FAQ:** What Lightpanda is, Chrome comparison, Chromium fork question, Playwright/cloud usage, Zig rationale, OS support, robots.txt.
|
||||
BIN
architecture-diagram-highres.png
Normal file
BIN
architecture-diagram-highres.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 342 KiB |
@@ -27,12 +27,14 @@ pub fn build(b: *Build) !void {
|
||||
const manifest = Manifest.init(b);
|
||||
|
||||
const git_commit = b.option([]const u8, "git_commit", "Current git commit");
|
||||
const git_version = b.option([]const u8, "git_version", "Current git version (from tag)");
|
||||
const prebuilt_v8_path = b.option([]const u8, "prebuilt_v8_path", "Path to prebuilt libc_v8.a");
|
||||
const snapshot_path = b.option([]const u8, "snapshot_path", "Path to v8 snapshot");
|
||||
|
||||
var opts = b.addOptions();
|
||||
opts.addOption([]const u8, "version", manifest.version);
|
||||
opts.addOption([]const u8, "git_commit", git_commit orelse "dev");
|
||||
opts.addOption(?[]const u8, "git_version", git_version orelse null);
|
||||
opts.addOption(?[]const u8, "snapshot_path", snapshot_path);
|
||||
|
||||
const enable_tsan = b.option(bool, "tsan", "Enable Thread Sanitizer") orelse false;
|
||||
|
||||
@@ -5,8 +5,8 @@
|
||||
.minimum_zig_version = "0.15.2",
|
||||
.dependencies = .{
|
||||
.v8 = .{
|
||||
.url = "https://github.com/lightpanda-io/zig-v8-fork/archive/refs/tags/v0.3.2.tar.gz",
|
||||
.hash = "v8-0.0.0-xddH6wx-BABNgL7YIDgbnFgKZuXZ68yZNngNSrV6OjrY",
|
||||
.url = "https://github.com/lightpanda-io/zig-v8-fork/archive/refs/tags/v0.3.4.tar.gz",
|
||||
.hash = "v8-0.0.0-xddH6_F3BAAiFvKY6R1H-gkuQlk19BkDQ0--uZuTrSup",
|
||||
},
|
||||
// .v8 = .{ .path = "../zig-v8-fork" },
|
||||
.brotli = .{
|
||||
|
||||
@@ -67,7 +67,7 @@ pub fn init(allocator: Allocator, config: *const Config) !*App {
|
||||
app.app_dir_path = getAndMakeAppDir(allocator);
|
||||
|
||||
app.telemetry = try Telemetry.init(app, config.mode);
|
||||
errdefer app.telemetry.deinit();
|
||||
errdefer app.telemetry.deinit(allocator);
|
||||
|
||||
app.arena_pool = ArenaPool.init(allocator, 512, 1024 * 16);
|
||||
errdefer app.arena_pool.deinit();
|
||||
@@ -85,7 +85,7 @@ pub fn deinit(self: *App) void {
|
||||
allocator.free(app_dir_path);
|
||||
self.app_dir_path = null;
|
||||
}
|
||||
self.telemetry.deinit();
|
||||
self.telemetry.deinit(allocator);
|
||||
self.network.deinit();
|
||||
self.snapshot.deinit();
|
||||
self.platform.deinit();
|
||||
|
||||
@@ -23,6 +23,8 @@ const Allocator = std.mem.Allocator;
|
||||
const log = @import("log.zig");
|
||||
const dump = @import("browser/dump.zig");
|
||||
|
||||
const WebBotAuthConfig = @import("network/WebBotAuth.zig").Config;
|
||||
|
||||
pub const RunMode = enum {
|
||||
help,
|
||||
fetch,
|
||||
@@ -161,6 +163,17 @@ pub fn cdpTimeout(self: *const Config) usize {
|
||||
};
|
||||
}
|
||||
|
||||
pub fn webBotAuth(self: *const Config) ?WebBotAuthConfig {
|
||||
return switch (self.mode) {
|
||||
inline .serve, .fetch, .mcp => |opts| WebBotAuthConfig{
|
||||
.key_file = opts.common.web_bot_auth_key_file orelse return null,
|
||||
.keyid = opts.common.web_bot_auth_keyid orelse return null,
|
||||
.domain = opts.common.web_bot_auth_domain orelse return null,
|
||||
},
|
||||
.help, .version => null,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn maxConnections(self: *const Config) u16 {
|
||||
return switch (self.mode) {
|
||||
.serve => |opts| opts.cdp_max_connections,
|
||||
@@ -200,6 +213,8 @@ pub const DumpFormat = enum {
|
||||
html,
|
||||
markdown,
|
||||
wpt,
|
||||
semantic_tree,
|
||||
semantic_tree_text,
|
||||
};
|
||||
|
||||
pub const Fetch = struct {
|
||||
@@ -225,6 +240,10 @@ pub const Common = struct {
|
||||
log_format: ?log.Format = null,
|
||||
log_filter_scopes: ?[]log.Scope = null,
|
||||
user_agent_suffix: ?[]const u8 = null,
|
||||
|
||||
web_bot_auth_key_file: ?[]const u8 = null,
|
||||
web_bot_auth_keyid: ?[]const u8 = null,
|
||||
web_bot_auth_domain: ?[]const u8 = null,
|
||||
};
|
||||
|
||||
/// Pre-formatted HTTP headers for reuse across Http and Client.
|
||||
@@ -332,6 +351,14 @@ pub fn printUsageAndExit(self: *const Config, success: bool) void {
|
||||
\\--user_agent_suffix
|
||||
\\ Suffix to append to the Lightpanda/X.Y User-Agent
|
||||
\\
|
||||
\\--web_bot_auth_key_file
|
||||
\\ Path to the Ed25519 private key PEM file.
|
||||
\\
|
||||
\\--web_bot_auth_keyid
|
||||
\\ The JWK thumbprint of your public key.
|
||||
\\
|
||||
\\--web_bot_auth_domain
|
||||
\\ Your domain e.g. yourdomain.com
|
||||
;
|
||||
|
||||
// MAX_HELP_LEN|
|
||||
@@ -346,7 +373,7 @@ pub fn printUsageAndExit(self: *const Config, success: bool) void {
|
||||
\\
|
||||
\\Options:
|
||||
\\--dump Dumps document to stdout.
|
||||
\\ Argument must be 'html' or 'markdown'.
|
||||
\\ Argument must be 'html', 'markdown', 'semantic_tree', or 'semantic_tree_text'.
|
||||
\\ Defaults to no dump.
|
||||
\\
|
||||
\\--strip_mode Comma separated list of tag groups to remove from dump
|
||||
@@ -853,5 +880,32 @@ fn parseCommonArg(
|
||||
return true;
|
||||
}
|
||||
|
||||
if (std.mem.eql(u8, "--web_bot_auth_key_file", opt)) {
|
||||
const str = args.next() orelse {
|
||||
log.fatal(.app, "missing argument value", .{ .arg = "--web_bot_auth_key_file" });
|
||||
return error.InvalidArgument;
|
||||
};
|
||||
common.web_bot_auth_key_file = try allocator.dupe(u8, str);
|
||||
return true;
|
||||
}
|
||||
|
||||
if (std.mem.eql(u8, "--web_bot_auth_keyid", opt)) {
|
||||
const str = args.next() orelse {
|
||||
log.fatal(.app, "missing argument value", .{ .arg = "--web_bot_auth_keyid" });
|
||||
return error.InvalidArgument;
|
||||
};
|
||||
common.web_bot_auth_keyid = try allocator.dupe(u8, str);
|
||||
return true;
|
||||
}
|
||||
|
||||
if (std.mem.eql(u8, "--web_bot_auth_domain", opt)) {
|
||||
const str = args.next() orelse {
|
||||
log.fatal(.app, "missing argument value", .{ .arg = "--web_bot_auth_domain" });
|
||||
return error.InvalidArgument;
|
||||
};
|
||||
common.web_bot_auth_domain = try allocator.dupe(u8, str);
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
532
src/SemanticTree.zig
Normal file
532
src/SemanticTree.zig
Normal file
@@ -0,0 +1,532 @@
|
||||
// Copyright (C) 2023-2026 Lightpanda (Selecy SAS)
|
||||
//
|
||||
// Francis Bouvier <francis@lightpanda.io>
|
||||
// Pierre Tachoire <pierre@lightpanda.io>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU Affero General Public License as
|
||||
// published by the Free Software Foundation, either version 3 of the
|
||||
// License, or (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU Affero General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU Affero General Public License
|
||||
// along with this program. See <https://www.gnu.org/licenses/>.
|
||||
|
||||
const std = @import("std");
|
||||
|
||||
const lp = @import("lightpanda");
|
||||
const log = @import("log.zig");
|
||||
const isAllWhitespace = @import("string.zig").isAllWhitespace;
|
||||
const Page = lp.Page;
|
||||
const interactive = @import("browser/interactive.zig");
|
||||
|
||||
const CData = @import("browser/webapi/CData.zig");
|
||||
const Element = @import("browser/webapi/Element.zig");
|
||||
const Node = @import("browser/webapi/Node.zig");
|
||||
const AXNode = @import("cdp/AXNode.zig");
|
||||
const CDPNode = @import("cdp/Node.zig");
|
||||
|
||||
const Self = @This();
|
||||
|
||||
dom_node: *Node,
|
||||
registry: *CDPNode.Registry,
|
||||
page: *Page,
|
||||
arena: std.mem.Allocator,
|
||||
prune: bool = true,
|
||||
interactive_only: bool = false,
|
||||
max_depth: u32 = std.math.maxInt(u32) - 1,
|
||||
|
||||
pub fn jsonStringify(self: @This(), jw: *std.json.Stringify) error{WriteFailed}!void {
|
||||
var visitor = JsonVisitor{ .jw = jw, .tree = self };
|
||||
var xpath_buffer: std.ArrayList(u8) = .{};
|
||||
const listener_targets = interactive.buildListenerTargetMap(self.page, self.arena) catch |err| {
|
||||
log.err(.app, "listener map failed", .{ .err = err });
|
||||
return error.WriteFailed;
|
||||
};
|
||||
self.walk(self.dom_node, &xpath_buffer, null, &visitor, 1, listener_targets, 0) catch |err| {
|
||||
log.err(.app, "semantic tree json dump failed", .{ .err = err });
|
||||
return error.WriteFailed;
|
||||
};
|
||||
}
|
||||
|
||||
pub fn textStringify(self: @This(), writer: *std.Io.Writer) error{WriteFailed}!void {
|
||||
var visitor = TextVisitor{ .writer = writer, .tree = self, .depth = 0 };
|
||||
var xpath_buffer: std.ArrayList(u8) = .empty;
|
||||
const listener_targets = interactive.buildListenerTargetMap(self.page, self.arena) catch |err| {
|
||||
log.err(.app, "listener map failed", .{ .err = err });
|
||||
return error.WriteFailed;
|
||||
};
|
||||
self.walk(self.dom_node, &xpath_buffer, null, &visitor, 1, listener_targets, 0) catch |err| {
|
||||
log.err(.app, "semantic tree text dump failed", .{ .err = err });
|
||||
return error.WriteFailed;
|
||||
};
|
||||
}
|
||||
|
||||
const OptionData = struct {
|
||||
value: []const u8,
|
||||
text: []const u8,
|
||||
selected: bool,
|
||||
};
|
||||
|
||||
const NodeData = struct {
|
||||
id: CDPNode.Id,
|
||||
axn: AXNode,
|
||||
role: []const u8,
|
||||
name: ?[]const u8,
|
||||
value: ?[]const u8,
|
||||
options: ?[]OptionData = null,
|
||||
xpath: []const u8,
|
||||
is_interactive: bool,
|
||||
node_name: []const u8,
|
||||
};
|
||||
|
||||
fn walk(self: @This(), node: *Node, xpath_buffer: *std.ArrayList(u8), parent_name: ?[]const u8, visitor: anytype, index: usize, listener_targets: interactive.ListenerTargetMap, current_depth: u32) !void {
|
||||
if (current_depth > self.max_depth) return;
|
||||
|
||||
// 1. Skip non-content nodes
|
||||
if (node.is(Element)) |el| {
|
||||
const tag = el.getTag();
|
||||
if (tag.isMetadata() or tag == .svg) return;
|
||||
|
||||
// We handle options/optgroups natively inside their parents, skip them in the general walk
|
||||
if (tag == .datalist or tag == .option or tag == .optgroup) return;
|
||||
|
||||
// Check visibility using the engine's checkVisibility which handles CSS display: none
|
||||
if (!el.checkVisibility(self.page)) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (el.is(Element.Html)) |html_el| {
|
||||
if (html_el.getHidden()) return;
|
||||
}
|
||||
} else if (node.is(CData.Text)) |text_node| {
|
||||
const text = text_node.getWholeText();
|
||||
if (isAllWhitespace(text)) {
|
||||
return;
|
||||
}
|
||||
} else if (node._type != .document and node._type != .document_fragment) {
|
||||
return;
|
||||
}
|
||||
|
||||
const cdp_node = try self.registry.register(node);
|
||||
const axn = AXNode.fromNode(node);
|
||||
const role = try axn.getRole();
|
||||
|
||||
var is_interactive = false;
|
||||
var value: ?[]const u8 = null;
|
||||
var options: ?[]OptionData = null;
|
||||
var node_name: []const u8 = "text";
|
||||
|
||||
if (node.is(Element)) |el| {
|
||||
node_name = el.getTagNameLower();
|
||||
|
||||
if (el.is(Element.Html.Input)) |input| {
|
||||
value = input.getValue();
|
||||
if (el.getAttributeSafe(comptime lp.String.wrap("list"))) |list_id| {
|
||||
options = try extractDataListOptions(list_id, self.page, self.arena);
|
||||
}
|
||||
} else if (el.is(Element.Html.TextArea)) |textarea| {
|
||||
value = textarea.getValue();
|
||||
} else if (el.is(Element.Html.Select)) |select| {
|
||||
value = select.getValue(self.page);
|
||||
options = try extractSelectOptions(el.asNode(), self.page, self.arena);
|
||||
}
|
||||
|
||||
if (el.is(Element.Html)) |html_el| {
|
||||
if (interactive.classifyInteractivity(el, html_el, listener_targets) != null) {
|
||||
is_interactive = true;
|
||||
}
|
||||
}
|
||||
} else if (node._type == .document or node._type == .document_fragment) {
|
||||
node_name = "root";
|
||||
}
|
||||
|
||||
const initial_xpath_len = xpath_buffer.items.len;
|
||||
try appendXPathSegment(node, xpath_buffer.writer(self.arena), index);
|
||||
const xpath = xpath_buffer.items;
|
||||
|
||||
var name = try axn.getName(self.page, self.arena);
|
||||
|
||||
const has_explicit_label = if (node.is(Element)) |el|
|
||||
el.getAttributeSafe(.wrap("aria-label")) != null or el.getAttributeSafe(.wrap("title")) != null
|
||||
else
|
||||
false;
|
||||
|
||||
const structural = isStructuralRole(role);
|
||||
|
||||
// Filter out computed concatenated names for generic containers without explicit labels.
|
||||
// This prevents token bloat and ensures their StaticText children aren't incorrectly pruned.
|
||||
// We ignore interactivity because a generic wrapper with an event listener still shouldn't hoist all text.
|
||||
if (name != null and structural and !has_explicit_label) {
|
||||
name = null;
|
||||
}
|
||||
|
||||
var data = NodeData{
|
||||
.id = cdp_node.id,
|
||||
.axn = axn,
|
||||
.role = role,
|
||||
.name = name,
|
||||
.value = value,
|
||||
.options = options,
|
||||
.xpath = xpath,
|
||||
.is_interactive = is_interactive,
|
||||
.node_name = node_name,
|
||||
};
|
||||
|
||||
var should_visit = true;
|
||||
if (self.interactive_only) {
|
||||
var keep = false;
|
||||
if (interactive.isInteractiveRole(role)) {
|
||||
keep = true;
|
||||
} else if (interactive.isContentRole(role)) {
|
||||
if (name != null and name.?.len > 0) {
|
||||
keep = true;
|
||||
}
|
||||
} else if (std.mem.eql(u8, role, "RootWebArea")) {
|
||||
keep = true;
|
||||
} else if (is_interactive) {
|
||||
keep = true;
|
||||
}
|
||||
if (!keep) {
|
||||
should_visit = false;
|
||||
}
|
||||
} else if (self.prune) {
|
||||
if (structural and !is_interactive and !has_explicit_label) {
|
||||
should_visit = false;
|
||||
}
|
||||
|
||||
if (std.mem.eql(u8, role, "StaticText") and node._parent != null) {
|
||||
if (parent_name != null and name != null and std.mem.indexOf(u8, parent_name.?, name.?) != null) {
|
||||
should_visit = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var did_visit = false;
|
||||
var should_walk_children = true;
|
||||
if (should_visit) {
|
||||
should_walk_children = try visitor.visit(node, &data);
|
||||
did_visit = true; // Always true if should_visit was true, because visit() executed and opened structures
|
||||
} else {
|
||||
// If we skip the node, we must NOT tell the visitor to close it later
|
||||
did_visit = false;
|
||||
}
|
||||
|
||||
if (should_walk_children) {
|
||||
// If we are printing this node normally OR skipping it and unrolling its children,
|
||||
// we walk the children iterator.
|
||||
var it = node.childrenIterator();
|
||||
var tag_counts = std.StringArrayHashMap(usize).init(self.arena);
|
||||
while (it.next()) |child| {
|
||||
var tag: []const u8 = "text()";
|
||||
if (child.is(Element)) |el| {
|
||||
tag = el.getTagNameLower();
|
||||
}
|
||||
|
||||
const gop = try tag_counts.getOrPut(tag);
|
||||
if (!gop.found_existing) {
|
||||
gop.value_ptr.* = 0;
|
||||
}
|
||||
gop.value_ptr.* += 1;
|
||||
|
||||
try self.walk(child, xpath_buffer, name, visitor, gop.value_ptr.*, listener_targets, current_depth + 1);
|
||||
}
|
||||
}
|
||||
|
||||
if (did_visit) {
|
||||
try visitor.leave();
|
||||
}
|
||||
|
||||
xpath_buffer.shrinkRetainingCapacity(initial_xpath_len);
|
||||
}
|
||||
|
||||
fn extractSelectOptions(node: *Node, page: *Page, arena: std.mem.Allocator) ![]OptionData {
|
||||
var options = std.ArrayListUnmanaged(OptionData){};
|
||||
var it = node.childrenIterator();
|
||||
while (it.next()) |child| {
|
||||
if (child.is(Element)) |el| {
|
||||
if (el.getTag() == .option) {
|
||||
if (el.is(Element.Html.Option)) |opt| {
|
||||
const text = opt.getText(page);
|
||||
const value = opt.getValue(page);
|
||||
const selected = opt.getSelected();
|
||||
try options.append(arena, .{ .text = text, .value = value, .selected = selected });
|
||||
}
|
||||
} else if (el.getTag() == .optgroup) {
|
||||
var group_it = child.childrenIterator();
|
||||
while (group_it.next()) |group_child| {
|
||||
if (group_child.is(Element.Html.Option)) |opt| {
|
||||
const text = opt.getText(page);
|
||||
const value = opt.getValue(page);
|
||||
const selected = opt.getSelected();
|
||||
try options.append(arena, .{ .text = text, .value = value, .selected = selected });
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return options.toOwnedSlice(arena);
|
||||
}
|
||||
|
||||
fn extractDataListOptions(list_id: []const u8, page: *Page, arena: std.mem.Allocator) !?[]OptionData {
|
||||
if (page.document.getElementById(list_id, page)) |referenced_el| {
|
||||
if (referenced_el.getTag() == .datalist) {
|
||||
return try extractSelectOptions(referenced_el.asNode(), page, arena);
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
fn appendXPathSegment(node: *Node, writer: anytype, index: usize) !void {
|
||||
if (node.is(Element)) |el| {
|
||||
const tag = el.getTagNameLower();
|
||||
try std.fmt.format(writer, "/{s}[{d}]", .{ tag, index });
|
||||
} else if (node.is(CData.Text)) |_| {
|
||||
try std.fmt.format(writer, "/text()[{d}]", .{index});
|
||||
}
|
||||
}
|
||||
|
||||
const JsonVisitor = struct {
|
||||
jw: *std.json.Stringify,
|
||||
tree: Self,
|
||||
|
||||
pub fn visit(self: *JsonVisitor, node: *Node, data: *NodeData) !bool {
|
||||
try self.jw.beginObject();
|
||||
|
||||
try self.jw.objectField("nodeId");
|
||||
try self.jw.write(try std.fmt.allocPrint(self.tree.arena, "{d}", .{data.id}));
|
||||
|
||||
try self.jw.objectField("backendDOMNodeId");
|
||||
try self.jw.write(data.id);
|
||||
|
||||
try self.jw.objectField("nodeName");
|
||||
try self.jw.write(data.node_name);
|
||||
|
||||
try self.jw.objectField("xpath");
|
||||
try self.jw.write(data.xpath);
|
||||
|
||||
if (node.is(Element)) |el| {
|
||||
try self.jw.objectField("nodeType");
|
||||
try self.jw.write(1);
|
||||
|
||||
try self.jw.objectField("isInteractive");
|
||||
try self.jw.write(data.is_interactive);
|
||||
|
||||
try self.jw.objectField("role");
|
||||
try self.jw.write(data.role);
|
||||
|
||||
if (data.name) |name| {
|
||||
if (name.len > 0) {
|
||||
try self.jw.objectField("name");
|
||||
try self.jw.write(name);
|
||||
}
|
||||
}
|
||||
|
||||
if (data.value) |value| {
|
||||
try self.jw.objectField("value");
|
||||
try self.jw.write(value);
|
||||
}
|
||||
|
||||
if (el._attributes) |attrs| {
|
||||
try self.jw.objectField("attributes");
|
||||
try self.jw.beginObject();
|
||||
var iter = attrs.iterator();
|
||||
while (iter.next()) |attr| {
|
||||
try self.jw.objectField(attr._name.str());
|
||||
try self.jw.write(attr._value.str());
|
||||
}
|
||||
try self.jw.endObject();
|
||||
}
|
||||
|
||||
if (data.options) |options| {
|
||||
try self.jw.objectField("options");
|
||||
try self.jw.beginArray();
|
||||
for (options) |opt| {
|
||||
try self.jw.beginObject();
|
||||
try self.jw.objectField("value");
|
||||
try self.jw.write(opt.value);
|
||||
try self.jw.objectField("text");
|
||||
try self.jw.write(opt.text);
|
||||
try self.jw.objectField("selected");
|
||||
try self.jw.write(opt.selected);
|
||||
try self.jw.endObject();
|
||||
}
|
||||
try self.jw.endArray();
|
||||
}
|
||||
} else if (node.is(CData.Text)) |text_node| {
|
||||
try self.jw.objectField("nodeType");
|
||||
try self.jw.write(3);
|
||||
try self.jw.objectField("nodeValue");
|
||||
try self.jw.write(text_node.getWholeText());
|
||||
} else {
|
||||
try self.jw.objectField("nodeType");
|
||||
try self.jw.write(9);
|
||||
}
|
||||
|
||||
try self.jw.objectField("children");
|
||||
try self.jw.beginArray();
|
||||
|
||||
if (data.options != null) {
|
||||
// Signal to not walk children, as we handled them natively
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
pub fn leave(self: *JsonVisitor) !void {
|
||||
try self.jw.endArray();
|
||||
try self.jw.endObject();
|
||||
}
|
||||
};
|
||||
|
||||
fn isStructuralRole(role: []const u8) bool {
|
||||
const structural_roles = std.StaticStringMap(void).initComptime(.{
|
||||
.{ "none", {} },
|
||||
.{ "generic", {} },
|
||||
.{ "InlineTextBox", {} },
|
||||
.{ "banner", {} },
|
||||
.{ "navigation", {} },
|
||||
.{ "main", {} },
|
||||
.{ "list", {} },
|
||||
.{ "listitem", {} },
|
||||
.{ "table", {} },
|
||||
.{ "rowgroup", {} },
|
||||
.{ "row", {} },
|
||||
.{ "cell", {} },
|
||||
.{ "region", {} },
|
||||
});
|
||||
return structural_roles.has(role);
|
||||
}
|
||||
|
||||
const TextVisitor = struct {
|
||||
writer: *std.Io.Writer,
|
||||
tree: Self,
|
||||
depth: usize,
|
||||
|
||||
pub fn visit(self: *TextVisitor, node: *Node, data: *NodeData) !bool {
|
||||
for (0..self.depth) |_| {
|
||||
try self.writer.writeByte(' ');
|
||||
}
|
||||
|
||||
var name_to_print: ?[]const u8 = null;
|
||||
if (data.name) |n| {
|
||||
if (n.len > 0) {
|
||||
name_to_print = n;
|
||||
}
|
||||
} else if (node.is(CData.Text)) |text_node| {
|
||||
const trimmed = std.mem.trim(u8, text_node.getWholeText(), " \t\r\n");
|
||||
if (trimmed.len > 0) {
|
||||
name_to_print = trimmed;
|
||||
}
|
||||
}
|
||||
|
||||
const is_text_only = std.mem.eql(u8, data.role, "StaticText") or std.mem.eql(u8, data.role, "none") or std.mem.eql(u8, data.role, "generic");
|
||||
|
||||
try self.writer.print("{d}", .{data.id});
|
||||
if (!is_text_only) {
|
||||
try self.writer.print(" {s}", .{data.role});
|
||||
}
|
||||
if (name_to_print) |n| {
|
||||
try self.writer.print(" '{s}'", .{n});
|
||||
}
|
||||
|
||||
if (data.value) |v| {
|
||||
if (v.len > 0) {
|
||||
try self.writer.print(" value='{s}'", .{v});
|
||||
}
|
||||
}
|
||||
|
||||
if (data.options) |options| {
|
||||
try self.writer.writeAll(" options=[");
|
||||
for (options, 0..) |opt, i| {
|
||||
if (i > 0) try self.writer.writeAll(",");
|
||||
try self.writer.print("'{s}'", .{opt.value});
|
||||
if (opt.selected) {
|
||||
try self.writer.writeAll("*");
|
||||
}
|
||||
}
|
||||
try self.writer.writeAll("]\n");
|
||||
self.depth += 1;
|
||||
return false; // Native handling complete, do not walk children
|
||||
}
|
||||
|
||||
try self.writer.writeByte('\n');
|
||||
self.depth += 1;
|
||||
|
||||
// If this is a leaf-like semantic node and we already have a name,
|
||||
// skip children to avoid redundant StaticText or noise.
|
||||
const is_leaf_semantic = std.mem.eql(u8, data.role, "link") or
|
||||
std.mem.eql(u8, data.role, "button") or
|
||||
std.mem.eql(u8, data.role, "heading") or
|
||||
std.mem.eql(u8, data.role, "code");
|
||||
if (is_leaf_semantic and data.name != null and data.name.?.len > 0) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
pub fn leave(self: *TextVisitor) !void {
|
||||
if (self.depth > 0) {
|
||||
self.depth -= 1;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const testing = @import("testing.zig");
|
||||
|
||||
test "SemanticTree backendDOMNodeId" {
|
||||
var registry: CDPNode.Registry = .init(testing.allocator);
|
||||
defer registry.deinit();
|
||||
|
||||
var page = try testing.pageTest("cdp/registry1.html");
|
||||
defer testing.reset();
|
||||
defer page._session.removePage();
|
||||
|
||||
const st: Self = .{
|
||||
.dom_node = page.window._document.asNode(),
|
||||
.registry = ®istry,
|
||||
.page = page,
|
||||
.arena = testing.arena_allocator,
|
||||
.prune = false,
|
||||
.interactive_only = false,
|
||||
.max_depth = std.math.maxInt(u32) - 1,
|
||||
};
|
||||
|
||||
const json_str = try std.json.Stringify.valueAlloc(testing.allocator, st, .{});
|
||||
defer testing.allocator.free(json_str);
|
||||
|
||||
try testing.expect(std.mem.indexOf(u8, json_str, "\"backendDOMNodeId\":") != null);
|
||||
}
|
||||
|
||||
test "SemanticTree max_depth" {
|
||||
var registry: CDPNode.Registry = .init(testing.allocator);
|
||||
defer registry.deinit();
|
||||
|
||||
var page = try testing.pageTest("cdp/registry1.html");
|
||||
defer testing.reset();
|
||||
defer page._session.removePage();
|
||||
|
||||
const st: Self = .{
|
||||
.dom_node = page.window._document.asNode(),
|
||||
.registry = ®istry,
|
||||
.page = page,
|
||||
.arena = testing.arena_allocator,
|
||||
.prune = false,
|
||||
.interactive_only = false,
|
||||
.max_depth = 1,
|
||||
};
|
||||
|
||||
var aw: std.Io.Writer.Allocating = .init(testing.allocator);
|
||||
defer aw.deinit();
|
||||
|
||||
try st.textStringify(&aw.writer);
|
||||
const text_str = aw.written();
|
||||
|
||||
try testing.expect(std.mem.indexOf(u8, text_str, "other") == null);
|
||||
}
|
||||
@@ -64,17 +64,17 @@ pub fn init(app: *App, address: net.Address) !*Server {
|
||||
return self;
|
||||
}
|
||||
|
||||
pub fn deinit(self: *Server) void {
|
||||
// Stop all active clients
|
||||
{
|
||||
self.client_mutex.lock();
|
||||
defer self.client_mutex.unlock();
|
||||
pub fn shutdown(self: *Server) void {
|
||||
self.client_mutex.lock();
|
||||
defer self.client_mutex.unlock();
|
||||
|
||||
for (self.clients.items) |client| {
|
||||
client.stop();
|
||||
}
|
||||
for (self.clients.items) |client| {
|
||||
client.stop();
|
||||
}
|
||||
}
|
||||
|
||||
pub fn deinit(self: *Server) void {
|
||||
self.shutdown();
|
||||
self.joinThreads();
|
||||
self.clients.deinit(self.allocator);
|
||||
self.clients_pool.deinit();
|
||||
@@ -242,7 +242,10 @@ pub const Client = struct {
|
||||
fn stop(self: *Client) void {
|
||||
switch (self.mode) {
|
||||
.http => {},
|
||||
.cdp => |*cdp| cdp.browser.env.terminate(),
|
||||
.cdp => |*cdp| {
|
||||
cdp.browser.env.terminate();
|
||||
self.ws.sendClose();
|
||||
},
|
||||
}
|
||||
self.ws.shutdown();
|
||||
}
|
||||
@@ -295,7 +298,7 @@ pub const Client = struct {
|
||||
}
|
||||
|
||||
var cdp = &self.mode.cdp;
|
||||
var last_message = timestamp(.monotonic);
|
||||
var last_message = milliTimestamp(.monotonic);
|
||||
var ms_remaining = self.ws.timeout_ms;
|
||||
|
||||
while (true) {
|
||||
@@ -304,7 +307,7 @@ pub const Client = struct {
|
||||
if (self.readSocket() == false) {
|
||||
return;
|
||||
}
|
||||
last_message = timestamp(.monotonic);
|
||||
last_message = milliTimestamp(.monotonic);
|
||||
ms_remaining = self.ws.timeout_ms;
|
||||
},
|
||||
.no_page => {
|
||||
@@ -319,16 +322,18 @@ pub const Client = struct {
|
||||
if (self.readSocket() == false) {
|
||||
return;
|
||||
}
|
||||
last_message = timestamp(.monotonic);
|
||||
last_message = milliTimestamp(.monotonic);
|
||||
ms_remaining = self.ws.timeout_ms;
|
||||
},
|
||||
.done => {
|
||||
const elapsed = timestamp(.monotonic) - last_message;
|
||||
if (elapsed > ms_remaining) {
|
||||
const now = milliTimestamp(.monotonic);
|
||||
const elapsed = now - last_message;
|
||||
if (elapsed >= ms_remaining) {
|
||||
log.info(.app, "CDP timeout", .{});
|
||||
return;
|
||||
}
|
||||
ms_remaining -= @intCast(elapsed);
|
||||
last_message = now;
|
||||
},
|
||||
}
|
||||
}
|
||||
@@ -501,6 +506,7 @@ fn buildJSONVersionResponse(
|
||||
}
|
||||
|
||||
pub const timestamp = @import("datetime.zig").timestamp;
|
||||
pub const milliTimestamp = @import("datetime.zig").milliTimestamp;
|
||||
|
||||
const testing = std.testing;
|
||||
test "server: buildJSONVersionResponse" {
|
||||
|
||||
@@ -91,25 +91,32 @@ pub fn runMicrotasks(self: *Browser) void {
|
||||
self.env.runMicrotasks();
|
||||
}
|
||||
|
||||
pub fn runMacrotasks(self: *Browser) !?u64 {
|
||||
pub fn runMacrotasks(self: *Browser) !void {
|
||||
const env = &self.env;
|
||||
|
||||
const time_to_next = try self.env.runMacrotasks();
|
||||
try self.env.runMacrotasks();
|
||||
env.pumpMessageLoop();
|
||||
|
||||
// either of the above could have queued more microtasks
|
||||
env.runMicrotasks();
|
||||
|
||||
return time_to_next;
|
||||
}
|
||||
|
||||
pub fn hasBackgroundTasks(self: *Browser) bool {
|
||||
return self.env.hasBackgroundTasks();
|
||||
}
|
||||
|
||||
pub fn waitForBackgroundTasks(self: *Browser) void {
|
||||
self.env.waitForBackgroundTasks();
|
||||
}
|
||||
|
||||
pub fn msToNextMacrotask(self: *Browser) ?u64 {
|
||||
return self.env.msToNextMacrotask();
|
||||
}
|
||||
|
||||
pub fn msTo(self: *Browser) bool {
|
||||
return self.env.hasBackgroundTasks();
|
||||
}
|
||||
|
||||
pub fn runIdleTasks(self: *const Browser) void {
|
||||
self.env.runIdleTasks();
|
||||
}
|
||||
|
||||
@@ -233,6 +233,12 @@ const DispatchDirectOptions = struct {
|
||||
pub fn dispatchDirect(self: *EventManager, target: *EventTarget, event: *Event, handler: anytype, comptime opts: DispatchDirectOptions) !void {
|
||||
const page = self.page;
|
||||
|
||||
// Set window.event to the currently dispatching event (WHATWG spec)
|
||||
const window = page.window;
|
||||
const prev_event = window._current_event;
|
||||
window._current_event = event;
|
||||
defer window._current_event = prev_event;
|
||||
|
||||
event.acquireRef();
|
||||
defer event.deinit(false, page._session);
|
||||
|
||||
@@ -398,6 +404,13 @@ fn dispatchNode(self: *EventManager, target: *Node, event: *Event, comptime opts
|
||||
}
|
||||
|
||||
const page = self.page;
|
||||
|
||||
// Set window.event to the currently dispatching event (WHATWG spec)
|
||||
const window = page.window;
|
||||
const prev_event = window._current_event;
|
||||
window._current_event = event;
|
||||
defer window._current_event = prev_event;
|
||||
|
||||
var was_handled = false;
|
||||
|
||||
// Create a single scope for all event handlers in this dispatch.
|
||||
|
||||
@@ -265,13 +265,15 @@ pub fn blob(_: *const Factory, arena: Allocator, child: anytype) !*@TypeOf(child
|
||||
return chain.get(1);
|
||||
}
|
||||
|
||||
pub fn abstractRange(self: *Factory, child: anytype, page: *Page) !*@TypeOf(child) {
|
||||
const allocator = self._slab.allocator();
|
||||
const chain = try PrototypeChain(&.{ AbstractRange, @TypeOf(child) }).allocate(allocator);
|
||||
pub fn abstractRange(_: *const Factory, arena: Allocator, child: anytype, page: *Page) !*@TypeOf(child) {
|
||||
const chain = try PrototypeChain(&.{ AbstractRange, @TypeOf(child) }).allocate(arena);
|
||||
|
||||
const doc = page.document.asNode();
|
||||
const abstract_range = chain.get(0);
|
||||
abstract_range.* = AbstractRange{
|
||||
._rc = 0,
|
||||
._arena = arena,
|
||||
._page_id = page.id,
|
||||
._type = unionInit(AbstractRange.Type, chain.get(1)),
|
||||
._end_offset = 0,
|
||||
._start_offset = 0,
|
||||
|
||||
@@ -30,6 +30,7 @@ const Notification = @import("../Notification.zig");
|
||||
const CookieJar = @import("../browser/webapi/storage/Cookie.zig").Jar;
|
||||
const Robots = @import("../network/Robots.zig");
|
||||
const RobotStore = Robots.RobotStore;
|
||||
const WebBotAuth = @import("../network/WebBotAuth.zig");
|
||||
|
||||
const Allocator = std.mem.Allocator;
|
||||
const ArenaAllocator = std.heap.ArenaAllocator;
|
||||
@@ -66,9 +67,18 @@ active: usize,
|
||||
// 'networkAlmostIdle' Page.lifecycleEvent in CDP).
|
||||
intercepted: usize,
|
||||
|
||||
// Our easy handles, managed by a curl multi.
|
||||
// Our curl multi handle.
|
||||
handles: Net.Handles,
|
||||
|
||||
// Connections currently in this client's curl_multi.
|
||||
in_use: std.DoublyLinkedList = .{},
|
||||
|
||||
// Connections that failed to be removed from curl_multi during perform.
|
||||
dirty: std.DoublyLinkedList = .{},
|
||||
|
||||
// Whether we're currently inside a curl_multi_perform call.
|
||||
performing: bool = false,
|
||||
|
||||
// Use to generate the next request ID
|
||||
next_request_id: u32 = 0,
|
||||
|
||||
@@ -88,8 +98,8 @@ pending_robots_queue: std.StringHashMapUnmanaged(std.ArrayList(Request)) = .empt
|
||||
// request. These wil come and go with each request.
|
||||
transfer_pool: std.heap.MemoryPool(Transfer),
|
||||
|
||||
// only needed for CDP which can change the proxy and then restore it. When
|
||||
// restoring, this originally-configured value is what it goes to.
|
||||
// The current proxy. CDP can change it, restoreOriginalProxy restores
|
||||
// from config.
|
||||
http_proxy: ?[:0]const u8 = null,
|
||||
|
||||
// track if the client use a proxy for connections.
|
||||
@@ -97,6 +107,11 @@ http_proxy: ?[:0]const u8 = null,
|
||||
// CDP.
|
||||
use_proxy: bool,
|
||||
|
||||
// Current TLS verification state, applied per-connection in makeRequest.
|
||||
tls_verify: bool = true,
|
||||
|
||||
obey_robots: bool,
|
||||
|
||||
cdp_client: ?CDPClient = null,
|
||||
|
||||
// libcurl can monitor arbitrary sockets, this lets us use libcurl to poll
|
||||
@@ -126,13 +141,8 @@ pub fn init(allocator: Allocator, network: *Network) !*Client {
|
||||
const client = try allocator.create(Client);
|
||||
errdefer allocator.destroy(client);
|
||||
|
||||
var handles = try Net.Handles.init(allocator, network.ca_blob, network.config);
|
||||
errdefer handles.deinit(allocator);
|
||||
|
||||
// Set transfer callbacks on each connection.
|
||||
for (handles.connections) |*conn| {
|
||||
try conn.setCallbacks(Transfer.headerCallback, Transfer.dataCallback);
|
||||
}
|
||||
var handles = try Net.Handles.init(network.config);
|
||||
errdefer handles.deinit();
|
||||
|
||||
const http_proxy = network.config.httpProxy();
|
||||
|
||||
@@ -145,6 +155,8 @@ pub fn init(allocator: Allocator, network: *Network) !*Client {
|
||||
.network = network,
|
||||
.http_proxy = http_proxy,
|
||||
.use_proxy = http_proxy != null,
|
||||
.tls_verify = network.config.tlsVerifyHost(),
|
||||
.obey_robots = network.config.obeyRobots(),
|
||||
.transfer_pool = transfer_pool,
|
||||
};
|
||||
|
||||
@@ -153,7 +165,7 @@ pub fn init(allocator: Allocator, network: *Network) !*Client {
|
||||
|
||||
pub fn deinit(self: *Client) void {
|
||||
self.abort();
|
||||
self.handles.deinit(self.allocator);
|
||||
self.handles.deinit();
|
||||
|
||||
self.transfer_pool.deinit();
|
||||
|
||||
@@ -182,14 +194,14 @@ pub fn abortFrame(self: *Client, frame_id: u32) void {
|
||||
// but abort can avoid the frame_id check at comptime.
|
||||
fn _abort(self: *Client, comptime abort_all: bool, frame_id: u32) void {
|
||||
{
|
||||
var q = &self.handles.in_use;
|
||||
var q = &self.in_use;
|
||||
var n = q.first;
|
||||
while (n) |node| {
|
||||
n = node.next;
|
||||
const conn: *Net.Connection = @fieldParentPtr("node", node);
|
||||
var transfer = Transfer.fromConnection(conn) catch |err| {
|
||||
// Let's cleanup what we can
|
||||
self.handles.remove(conn);
|
||||
self.removeConn(conn);
|
||||
log.err(.http, "get private info", .{ .err = err, .source = "abort" });
|
||||
continue;
|
||||
};
|
||||
@@ -226,8 +238,7 @@ fn _abort(self: *Client, comptime abort_all: bool, frame_id: u32) void {
|
||||
}
|
||||
|
||||
if (comptime IS_DEBUG and abort_all) {
|
||||
std.debug.assert(self.handles.in_use.first == null);
|
||||
std.debug.assert(self.handles.available.len() == self.handles.connections.len);
|
||||
std.debug.assert(self.in_use.first == null);
|
||||
|
||||
const running = self.handles.perform() catch |err| {
|
||||
lp.assert(false, "multi perform in abort", .{ .err = err });
|
||||
@@ -237,49 +248,45 @@ fn _abort(self: *Client, comptime abort_all: bool, frame_id: u32) void {
|
||||
}
|
||||
|
||||
pub fn tick(self: *Client, timeout_ms: u32) !PerformStatus {
|
||||
while (true) {
|
||||
if (self.handles.hasAvailable() == false) {
|
||||
while (self.queue.popFirst()) |queue_node| {
|
||||
const conn = self.network.getConnection() orelse {
|
||||
self.queue.prepend(queue_node);
|
||||
break;
|
||||
}
|
||||
const queue_node = self.queue.popFirst() orelse break;
|
||||
};
|
||||
const transfer: *Transfer = @fieldParentPtr("_node", queue_node);
|
||||
|
||||
// we know this exists, because we checked hasAvailable() above
|
||||
const conn = self.handles.get().?;
|
||||
try self.makeRequest(conn, transfer);
|
||||
}
|
||||
return self.perform(@intCast(timeout_ms));
|
||||
}
|
||||
|
||||
pub fn request(self: *Client, req: Request) !void {
|
||||
if (self.network.config.obeyRobots()) {
|
||||
const robots_url = try URL.getRobotsUrl(self.allocator, req.url);
|
||||
errdefer self.allocator.free(robots_url);
|
||||
|
||||
// If we have this robots cached, we can take a fast path.
|
||||
if (self.network.robot_store.get(robots_url)) |robot_entry| {
|
||||
defer self.allocator.free(robots_url);
|
||||
|
||||
switch (robot_entry) {
|
||||
// If we have a found robots entry, we check it.
|
||||
.present => |robots| {
|
||||
const path = URL.getPathname(req.url);
|
||||
if (!robots.isAllowed(path)) {
|
||||
req.error_callback(req.ctx, error.RobotsBlocked);
|
||||
return;
|
||||
}
|
||||
},
|
||||
// Otherwise, we assume we won't find it again.
|
||||
.absent => {},
|
||||
}
|
||||
|
||||
return self.processRequest(req);
|
||||
}
|
||||
|
||||
return self.fetchRobotsThenProcessRequest(robots_url, req);
|
||||
if (self.obey_robots == false) {
|
||||
return self.processRequest(req);
|
||||
}
|
||||
|
||||
return self.processRequest(req);
|
||||
const robots_url = try URL.getRobotsUrl(self.allocator, req.url);
|
||||
errdefer self.allocator.free(robots_url);
|
||||
|
||||
// If we have this robots cached, we can take a fast path.
|
||||
if (self.network.robot_store.get(robots_url)) |robot_entry| {
|
||||
defer self.allocator.free(robots_url);
|
||||
|
||||
switch (robot_entry) {
|
||||
// If we have a found robots entry, we check it.
|
||||
.present => |robots| {
|
||||
const path = URL.getPathname(req.url);
|
||||
if (!robots.isAllowed(path)) {
|
||||
req.error_callback(req.ctx, error.RobotsBlocked);
|
||||
return;
|
||||
}
|
||||
},
|
||||
// Otherwise, we assume we won't find it again.
|
||||
.absent => {},
|
||||
}
|
||||
|
||||
return self.processRequest(req);
|
||||
}
|
||||
return self.fetchRobotsThenProcessRequest(robots_url, req);
|
||||
}
|
||||
|
||||
fn processRequest(self: *Client, req: Request) !void {
|
||||
@@ -529,8 +536,8 @@ fn waitForInterceptedResponse(self: *Client, transfer: *Transfer) !bool {
|
||||
fn process(self: *Client, transfer: *Transfer) !void {
|
||||
// libcurl doesn't allow recursive calls, if we're in a `perform()` operation
|
||||
// then we _have_ to queue this.
|
||||
if (self.handles.performing == false) {
|
||||
if (self.handles.get()) |conn| {
|
||||
if (self.performing == false) {
|
||||
if (self.network.getConnection()) |conn| {
|
||||
return self.makeRequest(conn, transfer);
|
||||
}
|
||||
}
|
||||
@@ -644,10 +651,7 @@ fn requestFailed(transfer: *Transfer, err: anyerror, comptime execute_callback:
|
||||
// can be changed at any point in the easy's lifecycle.
|
||||
pub fn changeProxy(self: *Client, proxy: [:0]const u8) !void {
|
||||
try self.ensureNoActiveConnection();
|
||||
|
||||
for (self.handles.connections) |*conn| {
|
||||
try conn.setProxy(proxy.ptr);
|
||||
}
|
||||
self.http_proxy = proxy;
|
||||
self.use_proxy = true;
|
||||
}
|
||||
|
||||
@@ -656,31 +660,21 @@ pub fn changeProxy(self: *Client, proxy: [:0]const u8) !void {
|
||||
pub fn restoreOriginalProxy(self: *Client) !void {
|
||||
try self.ensureNoActiveConnection();
|
||||
|
||||
const proxy = if (self.http_proxy) |p| p.ptr else null;
|
||||
for (self.handles.connections) |*conn| {
|
||||
try conn.setProxy(proxy);
|
||||
}
|
||||
self.use_proxy = proxy != null;
|
||||
self.http_proxy = self.network.config.httpProxy();
|
||||
self.use_proxy = self.http_proxy != null;
|
||||
}
|
||||
|
||||
// Enable TLS verification on all connections.
|
||||
pub fn enableTlsVerify(self: *Client) !void {
|
||||
pub fn setTlsVerify(self: *Client, verify: bool) !void {
|
||||
// Remove inflight connections check on enable TLS b/c chromiumoxide calls
|
||||
// the command during navigate and Curl seems to accept it...
|
||||
|
||||
for (self.handles.connections) |*conn| {
|
||||
try conn.setTlsVerify(true, self.use_proxy);
|
||||
}
|
||||
}
|
||||
|
||||
// Disable TLS verification on all connections.
|
||||
pub fn disableTlsVerify(self: *Client) !void {
|
||||
// Remove inflight connections check on disable TLS b/c chromiumoxide calls
|
||||
// the command during navigate and Curl seems to accept it...
|
||||
|
||||
for (self.handles.connections) |*conn| {
|
||||
try conn.setTlsVerify(false, self.use_proxy);
|
||||
var it = self.in_use.first;
|
||||
while (it) |node| : (it = node.next) {
|
||||
const conn: *Net.Connection = @fieldParentPtr("node", node);
|
||||
try conn.setTlsVerify(verify, self.use_proxy);
|
||||
}
|
||||
self.tls_verify = verify;
|
||||
}
|
||||
|
||||
fn makeRequest(self: *Client, conn: *Net.Connection, transfer: *Transfer) anyerror!void {
|
||||
@@ -691,9 +685,14 @@ fn makeRequest(self: *Client, conn: *Net.Connection, transfer: *Transfer) anyerr
|
||||
errdefer {
|
||||
transfer._conn = null;
|
||||
transfer.deinit();
|
||||
self.handles.isAvailable(conn);
|
||||
self.releaseConn(conn);
|
||||
}
|
||||
|
||||
// Set callbacks and per-client settings on the pooled connection.
|
||||
try conn.setCallbacks(Transfer.headerCallback, Transfer.dataCallback);
|
||||
try conn.setProxy(self.http_proxy);
|
||||
try conn.setTlsVerify(self.tls_verify, self.use_proxy);
|
||||
|
||||
try conn.setURL(req.url);
|
||||
try conn.setMethod(req.method);
|
||||
if (req.body) |b| {
|
||||
@@ -706,6 +705,12 @@ fn makeRequest(self: *Client, conn: *Net.Connection, transfer: *Transfer) anyerr
|
||||
try conn.secretHeaders(&header_list, &self.network.config.http_headers); // Add headers that must be hidden from intercepts
|
||||
try conn.setHeaders(&header_list);
|
||||
|
||||
// If we have WebBotAuth, sign our request.
|
||||
if (self.network.web_bot_auth) |*wba| {
|
||||
const authority = URL.getHost(req.url);
|
||||
try wba.signRequest(transfer.arena.allocator(), &header_list, authority);
|
||||
}
|
||||
|
||||
// Add cookies.
|
||||
if (header_list.cookies) |cookies| {
|
||||
try conn.setCookies(cookies);
|
||||
@@ -728,10 +733,12 @@ fn makeRequest(self: *Client, conn: *Net.Connection, transfer: *Transfer) anyerr
|
||||
// fails BEFORE `curl_multi_add_handle` succeeds, the we still need to do
|
||||
// cleanup. But if things fail after `curl_multi_add_handle`, we expect
|
||||
// perfom to pickup the failure and cleanup.
|
||||
self.in_use.append(&conn.node);
|
||||
self.handles.add(conn) catch |err| {
|
||||
transfer._conn = null;
|
||||
transfer.deinit();
|
||||
self.handles.isAvailable(conn);
|
||||
self.in_use.remove(&conn.node);
|
||||
self.releaseConn(conn);
|
||||
return err;
|
||||
};
|
||||
|
||||
@@ -752,7 +759,22 @@ pub const PerformStatus = enum {
|
||||
};
|
||||
|
||||
fn perform(self: *Client, timeout_ms: c_int) !PerformStatus {
|
||||
const running = try self.handles.perform();
|
||||
const running = blk: {
|
||||
self.performing = true;
|
||||
defer self.performing = false;
|
||||
|
||||
break :blk try self.handles.perform();
|
||||
};
|
||||
|
||||
// Process dirty connections — return them to Runtime pool.
|
||||
while (self.dirty.popFirst()) |node| {
|
||||
const conn: *Net.Connection = @fieldParentPtr("node", node);
|
||||
self.handles.remove(conn) catch |err| {
|
||||
log.fatal(.http, "multi remove handle", .{ .err = err, .src = "perform" });
|
||||
@panic("multi_remove_handle");
|
||||
};
|
||||
self.releaseConn(conn);
|
||||
}
|
||||
|
||||
// We're potentially going to block for a while until we get data. Process
|
||||
// whatever messages we have waiting ahead of time.
|
||||
@@ -871,11 +893,26 @@ fn processMessages(self: *Client) !bool {
|
||||
|
||||
fn endTransfer(self: *Client, transfer: *Transfer) void {
|
||||
const conn = transfer._conn.?;
|
||||
self.handles.remove(conn);
|
||||
self.removeConn(conn);
|
||||
transfer._conn = null;
|
||||
self.active -= 1;
|
||||
}
|
||||
|
||||
fn removeConn(self: *Client, conn: *Net.Connection) void {
|
||||
self.in_use.remove(&conn.node);
|
||||
if (self.handles.remove(conn)) {
|
||||
self.releaseConn(conn);
|
||||
} else |_| {
|
||||
// Can happen if we're in a perform() call, so we'll queue this
|
||||
// for cleanup later.
|
||||
self.dirty.append(&conn.node);
|
||||
}
|
||||
}
|
||||
|
||||
fn releaseConn(self: *Client, conn: *Net.Connection) void {
|
||||
self.network.releaseConnection(conn);
|
||||
}
|
||||
|
||||
fn ensureNoActiveConnection(self: *const Client) !void {
|
||||
if (self.active > 0) {
|
||||
return error.InflightConnection;
|
||||
@@ -898,7 +935,7 @@ pub const RequestCookie = struct {
|
||||
|
||||
if (arr.items.len > 0) {
|
||||
try arr.append(temp, 0); //null terminate
|
||||
headers.cookies = @ptrCast(arr.items.ptr);
|
||||
headers.cookies = @as([*c]const u8, @ptrCast(arr.items.ptr));
|
||||
}
|
||||
}
|
||||
};
|
||||
@@ -1023,7 +1060,7 @@ pub const Transfer = struct {
|
||||
fn deinit(self: *Transfer) void {
|
||||
self.req.headers.deinit();
|
||||
if (self._conn) |conn| {
|
||||
self.client.handles.remove(conn);
|
||||
self.client.removeConn(conn);
|
||||
}
|
||||
self.arena.deinit();
|
||||
self.client.transfer_pool.destroy(self);
|
||||
@@ -1093,7 +1130,7 @@ pub const Transfer = struct {
|
||||
requestFailed(self, err, true);
|
||||
|
||||
const client = self.client;
|
||||
if (self._performing or client.handles.performing) {
|
||||
if (self._performing or client.performing) {
|
||||
// We're currently in a curl_multi_perform. We cannot call endTransfer
|
||||
// as that calls curl_multi_remove_handle, and you can't do that
|
||||
// from a curl callback. Instead, we flag this transfer and all of
|
||||
@@ -1258,6 +1295,16 @@ pub const Transfer = struct {
|
||||
|
||||
if (buf_len < 3) {
|
||||
// could be \r\n or \n.
|
||||
// We get the last header line.
|
||||
if (transfer._redirecting) {
|
||||
// parse and set cookies for the redirection.
|
||||
redirectionCookies(transfer, &conn) catch |err| {
|
||||
if (comptime IS_DEBUG) {
|
||||
log.debug(.http, "redirection cookies", .{ .err = err });
|
||||
}
|
||||
return 0;
|
||||
};
|
||||
}
|
||||
return buf_len;
|
||||
}
|
||||
|
||||
@@ -1324,38 +1371,22 @@ pub const Transfer = struct {
|
||||
transfer.bytes_received += buf_len;
|
||||
}
|
||||
|
||||
if (buf_len > 2) {
|
||||
if (transfer._auth_challenge != null) {
|
||||
// try to parse auth challenge.
|
||||
if (std.ascii.startsWithIgnoreCase(header, "WWW-Authenticate") or
|
||||
std.ascii.startsWithIgnoreCase(header, "Proxy-Authenticate"))
|
||||
{
|
||||
const ac = AuthChallenge.parse(
|
||||
transfer._auth_challenge.?.status,
|
||||
header,
|
||||
) catch |err| {
|
||||
// We can't parse the auth challenge
|
||||
log.err(.http, "parse auth challenge", .{ .err = err, .header = header });
|
||||
// Should we cancel the request? I don't think so.
|
||||
return buf_len;
|
||||
};
|
||||
transfer._auth_challenge = ac;
|
||||
}
|
||||
if (transfer._auth_challenge != null) {
|
||||
// try to parse auth challenge.
|
||||
if (std.ascii.startsWithIgnoreCase(header, "WWW-Authenticate") or
|
||||
std.ascii.startsWithIgnoreCase(header, "Proxy-Authenticate"))
|
||||
{
|
||||
const ac = AuthChallenge.parse(
|
||||
transfer._auth_challenge.?.status,
|
||||
header,
|
||||
) catch |err| {
|
||||
// We can't parse the auth challenge
|
||||
log.err(.http, "parse auth challenge", .{ .err = err, .header = header });
|
||||
// Should we cancel the request? I don't think so.
|
||||
return buf_len;
|
||||
};
|
||||
transfer._auth_challenge = ac;
|
||||
}
|
||||
return buf_len;
|
||||
}
|
||||
|
||||
// Starting here, we get the last header line.
|
||||
|
||||
if (transfer._redirecting) {
|
||||
// parse and set cookies for the redirection.
|
||||
redirectionCookies(transfer, &conn) catch |err| {
|
||||
if (comptime IS_DEBUG) {
|
||||
log.debug(.http, "redirection cookies", .{ .err = err });
|
||||
}
|
||||
return 0;
|
||||
};
|
||||
return buf_len;
|
||||
}
|
||||
|
||||
return buf_len;
|
||||
|
||||
@@ -25,6 +25,7 @@ params: []const u8 = "",
|
||||
// We keep 41 for null-termination since HTML parser expects in this format.
|
||||
charset: [41]u8 = default_charset,
|
||||
charset_len: usize = default_charset_len,
|
||||
is_default_charset: bool = true,
|
||||
|
||||
/// String "UTF-8" continued by null characters.
|
||||
const default_charset = .{ 'U', 'T', 'F', '-', '8' } ++ .{0} ** 36;
|
||||
@@ -130,6 +131,7 @@ pub fn parse(input: []u8) !Mime {
|
||||
|
||||
var charset: [41]u8 = default_charset;
|
||||
var charset_len: usize = default_charset_len;
|
||||
var has_explicit_charset = false;
|
||||
|
||||
var it = std.mem.splitScalar(u8, params, ';');
|
||||
while (it.next()) |attr| {
|
||||
@@ -156,6 +158,7 @@ pub fn parse(input: []u8) !Mime {
|
||||
// Null-terminate right after attribute value.
|
||||
charset[attribute_value.len] = 0;
|
||||
charset_len = attribute_value.len;
|
||||
has_explicit_charset = true;
|
||||
},
|
||||
}
|
||||
}
|
||||
@@ -165,9 +168,137 @@ pub fn parse(input: []u8) !Mime {
|
||||
.charset = charset,
|
||||
.charset_len = charset_len,
|
||||
.content_type = content_type,
|
||||
.is_default_charset = !has_explicit_charset,
|
||||
};
|
||||
}
|
||||
|
||||
/// Prescan the first 1024 bytes of an HTML document for a charset declaration.
|
||||
/// Looks for `<meta charset="X">` and `<meta http-equiv="Content-Type" content="...;charset=X">`.
|
||||
/// Returns the charset value or null if none found.
|
||||
/// See: https://www.w3.org/International/questions/qa-html-encoding-declarations
|
||||
pub fn prescanCharset(html: []const u8) ?[]const u8 {
|
||||
const limit = @min(html.len, 1024);
|
||||
const data = html[0..limit];
|
||||
|
||||
// Scan for <meta tags
|
||||
var pos: usize = 0;
|
||||
while (pos < data.len) {
|
||||
// Find next '<'
|
||||
pos = std.mem.indexOfScalarPos(u8, data, pos, '<') orelse return null;
|
||||
pos += 1;
|
||||
if (pos >= data.len) return null;
|
||||
|
||||
// Check for "meta" (case-insensitive)
|
||||
if (pos + 4 >= data.len) return null;
|
||||
var tag_buf: [4]u8 = undefined;
|
||||
_ = std.ascii.lowerString(&tag_buf, data[pos..][0..4]);
|
||||
if (!std.mem.eql(u8, &tag_buf, "meta")) {
|
||||
continue;
|
||||
}
|
||||
pos += 4;
|
||||
|
||||
// Must be followed by whitespace or end of tag
|
||||
if (pos >= data.len) return null;
|
||||
if (data[pos] != ' ' and data[pos] != '\t' and data[pos] != '\n' and
|
||||
data[pos] != '\r' and data[pos] != '/')
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
// Scan attributes within this meta tag
|
||||
const tag_end = std.mem.indexOfScalarPos(u8, data, pos, '>') orelse return null;
|
||||
const attrs = data[pos..tag_end];
|
||||
|
||||
// Look for charset= attribute directly
|
||||
if (findAttrValue(attrs, "charset")) |charset| {
|
||||
if (charset.len > 0 and charset.len <= 40) return charset;
|
||||
}
|
||||
|
||||
// Look for http-equiv="content-type" with content="...;charset=X"
|
||||
if (findAttrValue(attrs, "http-equiv")) |he| {
|
||||
if (std.ascii.eqlIgnoreCase(he, "content-type")) {
|
||||
if (findAttrValue(attrs, "content")) |content| {
|
||||
if (extractCharsetFromContentType(content)) |charset| {
|
||||
return charset;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pos = tag_end + 1;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
fn findAttrValue(attrs: []const u8, name: []const u8) ?[]const u8 {
|
||||
var pos: usize = 0;
|
||||
while (pos < attrs.len) {
|
||||
// Skip whitespace
|
||||
while (pos < attrs.len and (attrs[pos] == ' ' or attrs[pos] == '\t' or
|
||||
attrs[pos] == '\n' or attrs[pos] == '\r'))
|
||||
{
|
||||
pos += 1;
|
||||
}
|
||||
if (pos >= attrs.len) return null;
|
||||
|
||||
// Read attribute name
|
||||
const attr_start = pos;
|
||||
while (pos < attrs.len and attrs[pos] != '=' and attrs[pos] != ' ' and
|
||||
attrs[pos] != '\t' and attrs[pos] != '>' and attrs[pos] != '/')
|
||||
{
|
||||
pos += 1;
|
||||
}
|
||||
const attr_name = attrs[attr_start..pos];
|
||||
|
||||
// Skip whitespace around =
|
||||
while (pos < attrs.len and (attrs[pos] == ' ' or attrs[pos] == '\t')) pos += 1;
|
||||
if (pos >= attrs.len or attrs[pos] != '=') {
|
||||
// No '=' found - skip this token. Advance at least one byte to avoid infinite loop.
|
||||
if (pos == attr_start) pos += 1;
|
||||
continue;
|
||||
}
|
||||
pos += 1; // skip '='
|
||||
while (pos < attrs.len and (attrs[pos] == ' ' or attrs[pos] == '\t')) pos += 1;
|
||||
if (pos >= attrs.len) return null;
|
||||
|
||||
// Read attribute value
|
||||
const value = blk: {
|
||||
if (attrs[pos] == '"' or attrs[pos] == '\'') {
|
||||
const quote = attrs[pos];
|
||||
pos += 1;
|
||||
const val_start = pos;
|
||||
while (pos < attrs.len and attrs[pos] != quote) pos += 1;
|
||||
const val = attrs[val_start..pos];
|
||||
if (pos < attrs.len) pos += 1; // skip closing quote
|
||||
break :blk val;
|
||||
} else {
|
||||
const val_start = pos;
|
||||
while (pos < attrs.len and attrs[pos] != ' ' and attrs[pos] != '\t' and
|
||||
attrs[pos] != '>' and attrs[pos] != '/')
|
||||
{
|
||||
pos += 1;
|
||||
}
|
||||
break :blk attrs[val_start..pos];
|
||||
}
|
||||
};
|
||||
|
||||
if (std.ascii.eqlIgnoreCase(attr_name, name)) return value;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
fn extractCharsetFromContentType(content: []const u8) ?[]const u8 {
|
||||
var it = std.mem.splitScalar(u8, content, ';');
|
||||
while (it.next()) |part| {
|
||||
const trimmed = std.mem.trimLeft(u8, part, &.{ ' ', '\t' });
|
||||
if (trimmed.len > 8 and std.ascii.eqlIgnoreCase(trimmed[0..8], "charset=")) {
|
||||
const val = std.mem.trim(u8, trimmed[8..], &.{ ' ', '\t', '"', '\'' });
|
||||
if (val.len > 0 and val.len <= 40) return val;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
pub fn sniff(body: []const u8) ?Mime {
|
||||
// 0x0C is form feed
|
||||
const content = std.mem.trimLeft(u8, body, &.{ ' ', '\t', '\n', '\r', 0x0C });
|
||||
@@ -178,15 +309,30 @@ pub fn sniff(body: []const u8) ?Mime {
|
||||
if (content[0] != '<') {
|
||||
if (std.mem.startsWith(u8, content, &.{ 0xEF, 0xBB, 0xBF })) {
|
||||
// UTF-8 BOM
|
||||
return .{ .content_type = .{ .text_plain = {} } };
|
||||
return .{
|
||||
.content_type = .{ .text_plain = {} },
|
||||
.charset = default_charset,
|
||||
.charset_len = default_charset_len,
|
||||
.is_default_charset = false,
|
||||
};
|
||||
}
|
||||
if (std.mem.startsWith(u8, content, &.{ 0xFE, 0xFF })) {
|
||||
// UTF-16 big-endian BOM
|
||||
return .{ .content_type = .{ .text_plain = {} } };
|
||||
return .{
|
||||
.content_type = .{ .text_plain = {} },
|
||||
.charset = .{ 'U', 'T', 'F', '-', '1', '6', 'B', 'E' } ++ .{0} ** 33,
|
||||
.charset_len = 8,
|
||||
.is_default_charset = false,
|
||||
};
|
||||
}
|
||||
if (std.mem.startsWith(u8, content, &.{ 0xFF, 0xFE })) {
|
||||
// UTF-16 little-endian BOM
|
||||
return .{ .content_type = .{ .text_plain = {} } };
|
||||
return .{
|
||||
.content_type = .{ .text_plain = {} },
|
||||
.charset = .{ 'U', 'T', 'F', '-', '1', '6', 'L', 'E' } ++ .{0} ** 33,
|
||||
.charset_len = 8,
|
||||
.is_default_charset = false,
|
||||
};
|
||||
}
|
||||
return null;
|
||||
}
|
||||
@@ -540,6 +686,24 @@ test "Mime: sniff" {
|
||||
|
||||
try expectHTML("<!-->");
|
||||
try expectHTML(" \n\t <!-->");
|
||||
|
||||
{
|
||||
const mime = Mime.sniff(&.{ 0xEF, 0xBB, 0xBF }).?;
|
||||
try testing.expectEqual(.text_plain, std.meta.activeTag(mime.content_type));
|
||||
try testing.expectEqual("UTF-8", mime.charsetString());
|
||||
}
|
||||
|
||||
{
|
||||
const mime = Mime.sniff(&.{ 0xFE, 0xFF }).?;
|
||||
try testing.expectEqual(.text_plain, std.meta.activeTag(mime.content_type));
|
||||
try testing.expectEqual("UTF-16BE", mime.charsetString());
|
||||
}
|
||||
|
||||
{
|
||||
const mime = Mime.sniff(&.{ 0xFF, 0xFE }).?;
|
||||
try testing.expectEqual(.text_plain, std.meta.activeTag(mime.content_type));
|
||||
try testing.expectEqual("UTF-16LE", mime.charsetString());
|
||||
}
|
||||
}
|
||||
|
||||
const Expectation = struct {
|
||||
@@ -576,3 +740,35 @@ fn expect(expected: Expectation, input: []const u8) !void {
|
||||
try testing.expectEqual(m.charsetStringZ(), actual.charsetStringZ());
|
||||
}
|
||||
}
|
||||
|
||||
test "Mime: prescanCharset" {
|
||||
// <meta charset="X">
|
||||
try testing.expectEqual("utf-8", Mime.prescanCharset("<html><head><meta charset=\"utf-8\">").?);
|
||||
try testing.expectEqual("iso-8859-1", Mime.prescanCharset("<html><head><meta charset=\"iso-8859-1\">").?);
|
||||
try testing.expectEqual("shift_jis", Mime.prescanCharset("<meta charset='shift_jis'>").?);
|
||||
|
||||
// Case-insensitive tag matching
|
||||
try testing.expectEqual("utf-8", Mime.prescanCharset("<META charset=\"utf-8\">").?);
|
||||
try testing.expectEqual("utf-8", Mime.prescanCharset("<Meta charset=\"utf-8\">").?);
|
||||
|
||||
// <meta http-equiv="Content-Type" content="text/html; charset=X">
|
||||
try testing.expectEqual(
|
||||
"iso-8859-1",
|
||||
Mime.prescanCharset("<meta http-equiv=\"Content-Type\" content=\"text/html; charset=iso-8859-1\">").?,
|
||||
);
|
||||
|
||||
// No charset found
|
||||
try testing.expectEqual(null, Mime.prescanCharset("<html><head><title>Test</title>"));
|
||||
try testing.expectEqual(null, Mime.prescanCharset(""));
|
||||
try testing.expectEqual(null, Mime.prescanCharset("no html here"));
|
||||
|
||||
// Self-closing meta without charset must not loop forever
|
||||
try testing.expectEqual(null, Mime.prescanCharset("<meta foo=\"bar\"/>"));
|
||||
|
||||
// Charset after 1024 bytes should not be found
|
||||
var long_html: [1100]u8 = undefined;
|
||||
@memset(&long_html, ' ');
|
||||
const suffix = "<meta charset=\"windows-1252\">";
|
||||
@memcpy(long_html[1050 .. 1050 + suffix.len], suffix);
|
||||
try testing.expectEqual(null, Mime.prescanCharset(&long_html));
|
||||
}
|
||||
|
||||
@@ -62,6 +62,7 @@ const storage = @import("webapi/storage/storage.zig");
|
||||
const PageTransitionEvent = @import("webapi/event/PageTransitionEvent.zig");
|
||||
const NavigationKind = @import("webapi/navigation/root.zig").NavigationKind;
|
||||
const KeyboardEvent = @import("webapi/event/KeyboardEvent.zig");
|
||||
const MouseEvent = @import("webapi/event/MouseEvent.zig");
|
||||
|
||||
const HttpClient = @import("HttpClient.zig");
|
||||
const ArenaPool = App.ArenaPool;
|
||||
@@ -80,6 +81,8 @@ pub const BUF_SIZE = 1024;
|
||||
|
||||
const Page = @This();
|
||||
|
||||
id: u32,
|
||||
|
||||
// This is the "id" of the frame. It can be re-used from page-to-page, e.g.
|
||||
// when navigating.
|
||||
_frame_id: u32,
|
||||
@@ -254,6 +257,7 @@ pub fn init(self: *Page, frame_id: u32, session: *Session, parent: ?*Page) !void
|
||||
})).asDocument();
|
||||
|
||||
self.* = .{
|
||||
.id = session.nextPageId(),
|
||||
.js = undefined,
|
||||
.parent = parent,
|
||||
.arena = session.page_arena,
|
||||
@@ -304,14 +308,16 @@ pub fn init(self: *Page, frame_id: u32, session: *Session, parent: ?*Page) !void
|
||||
document._page = self;
|
||||
|
||||
if (comptime builtin.is_test == false) {
|
||||
// HTML test runner manually calls these as necessary
|
||||
try self.js.scheduler.add(session.browser, struct {
|
||||
fn runIdleTasks(ctx: *anyopaque) !?u32 {
|
||||
const b: *@import("Browser.zig") = @ptrCast(@alignCast(ctx));
|
||||
b.runIdleTasks();
|
||||
return 200;
|
||||
}
|
||||
}.runIdleTasks, 200, .{ .name = "page.runIdleTasks", .low_priority = true });
|
||||
if (parent == null) {
|
||||
// HTML test runner manually calls these as necessary
|
||||
try self.js.scheduler.add(session.browser, struct {
|
||||
fn runIdleTasks(ctx: *anyopaque) !?u32 {
|
||||
const b: *@import("Browser.zig") = @ptrCast(@alignCast(ctx));
|
||||
b.runIdleTasks();
|
||||
return 200;
|
||||
}
|
||||
}.runIdleTasks, 200, .{ .name = "page.runIdleTasks", .low_priority = true });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -404,6 +410,11 @@ pub fn isSameOrigin(self: *const Page, url: [:0]const u8) !bool {
|
||||
return std.mem.startsWith(u8, url, current_origin);
|
||||
}
|
||||
|
||||
/// Look up a blob URL in this page's registry.
|
||||
pub fn lookupBlobUrl(self: *Page, url: []const u8) ?*Blob {
|
||||
return self._blob_urls.get(url);
|
||||
}
|
||||
|
||||
pub fn navigate(self: *Page, request_url: [:0]const u8, opts: NavigateOpts) !void {
|
||||
lp.assert(self._load_state == .waiting, "page.renavigate", .{});
|
||||
const session = self._session;
|
||||
@@ -419,12 +430,17 @@ pub fn navigate(self: *Page, request_url: [:0]const u8, opts: NavigateOpts) !voi
|
||||
.type = self._type,
|
||||
});
|
||||
|
||||
// if the url is about:blank, we load an empty HTML document in the
|
||||
// page and dispatch the events.
|
||||
if (std.mem.eql(u8, "about:blank", request_url)) {
|
||||
self.url = "about:blank";
|
||||
// Handle synthetic navigations: about:blank and blob: URLs
|
||||
const is_about_blank = std.mem.eql(u8, "about:blank", request_url);
|
||||
const is_blob = !is_about_blank and std.mem.startsWith(u8, request_url, "blob:");
|
||||
|
||||
if (self.parent) |parent| {
|
||||
if (is_about_blank or is_blob) {
|
||||
self.url = if (is_about_blank) "about:blank" else try self.arena.dupeZ(u8, request_url);
|
||||
|
||||
if (is_blob) {
|
||||
// strip out blob:
|
||||
self.origin = try URL.getOrigin(self.arena, request_url[5.. :0]);
|
||||
} else if (self.parent) |parent| {
|
||||
self.origin = parent.origin;
|
||||
} else {
|
||||
self.origin = null;
|
||||
@@ -435,10 +451,29 @@ pub fn navigate(self: *Page, request_url: [:0]const u8, opts: NavigateOpts) !voi
|
||||
// It's important to force a reset during the following navigation.
|
||||
self._parse_state = .complete;
|
||||
|
||||
self.document.injectBlank(self) catch |err| {
|
||||
log.err(.browser, "inject blank", .{ .err = err });
|
||||
return error.InjectBlankFailed;
|
||||
};
|
||||
// Content injection
|
||||
if (is_blob) {
|
||||
// For navigation, walk up the parent chain to find blob URLs
|
||||
// (e.g., parent creates blob URL and sets iframe.src to it)
|
||||
const blob = blk: {
|
||||
var current: ?*Page = self.parent;
|
||||
while (current) |page| {
|
||||
if (page._blob_urls.get(request_url)) |b| break :blk b;
|
||||
current = page.parent;
|
||||
}
|
||||
log.warn(.js, "invalid blob", .{ .url = request_url });
|
||||
return error.BlobNotFound;
|
||||
};
|
||||
const parse_arena = try self.getArena(.{ .debug = "Page.parseBlob" });
|
||||
defer self.releaseArena(parse_arena);
|
||||
var parser = Parser.init(parse_arena, self.document.asNode(), self);
|
||||
parser.parse(blob._slice);
|
||||
} else {
|
||||
self.document.injectBlank(self) catch |err| {
|
||||
log.err(.browser, "inject blank", .{ .err = err });
|
||||
return error.InjectBlankFailed;
|
||||
};
|
||||
}
|
||||
self.documentIsComplete();
|
||||
|
||||
session.notification.dispatch(.page_navigate, &.{
|
||||
@@ -452,7 +487,7 @@ pub fn navigate(self: *Page, request_url: [:0]const u8, opts: NavigateOpts) !voi
|
||||
// Record telemetry for navigation
|
||||
session.browser.app.telemetry.record(.{
|
||||
.navigate = .{
|
||||
.tls = false, // about:blank is not TLS
|
||||
.tls = false, // about:blank and blob: are not TLS
|
||||
.proxy = session.browser.app.config.httpProxy() != null,
|
||||
},
|
||||
});
|
||||
@@ -562,12 +597,9 @@ fn scheduleNavigationWithArena(originator: *Page, arena: Allocator, request_url:
|
||||
};
|
||||
|
||||
const target = switch (nt) {
|
||||
.form, .anchor => |p| p,
|
||||
.script => |p| p orelse originator,
|
||||
.iframe => |iframe| iframe._window.?._page, // only an frame with existing content (i.e. a window) can be navigated
|
||||
.anchor, .form => |node| blk: {
|
||||
const doc = node.ownerDocument(originator) orelse break :blk originator;
|
||||
break :blk doc._page orelse originator;
|
||||
},
|
||||
};
|
||||
|
||||
const session = target._session;
|
||||
@@ -680,11 +712,14 @@ pub fn scriptsCompletedLoading(self: *Page) void {
|
||||
}
|
||||
|
||||
pub fn iframeCompletedLoading(self: *Page, iframe: *IFrame) void {
|
||||
blk: {
|
||||
var ls: JS.Local.Scope = undefined;
|
||||
self.js.localScope(&ls);
|
||||
defer ls.deinit();
|
||||
var ls: JS.Local.Scope = undefined;
|
||||
self.js.localScope(&ls);
|
||||
defer ls.deinit();
|
||||
|
||||
const entered = self.js.enter(&ls.handle_scope);
|
||||
defer entered.exit();
|
||||
|
||||
blk: {
|
||||
const event = Event.initTrusted(comptime .wrap("load"), .{}, self) catch |err| {
|
||||
log.err(.page, "iframe event init", .{ .err = err, .url = iframe._src });
|
||||
break :blk;
|
||||
@@ -693,6 +728,7 @@ pub fn iframeCompletedLoading(self: *Page, iframe: *IFrame) void {
|
||||
log.warn(.js, "iframe onload", .{ .err = err, .url = iframe._src });
|
||||
};
|
||||
}
|
||||
|
||||
self.pendingLoadCompleted();
|
||||
}
|
||||
|
||||
@@ -763,6 +799,10 @@ fn _documentIsComplete(self: *Page) !void {
|
||||
try self._event_manager.dispatchDirect(window_target, pageshow_event, self.window._on_pageshow, .{ .context = "page show" });
|
||||
}
|
||||
|
||||
if (comptime IS_DEBUG) {
|
||||
log.debug(.page, "load", .{ .url = self.url, .type = self._type });
|
||||
}
|
||||
|
||||
self.notifyParentLoadComplete();
|
||||
}
|
||||
|
||||
@@ -815,13 +855,25 @@ fn pageDataCallback(transfer: *HttpClient.Transfer, data: []const u8) !void {
|
||||
if (self._parse_state == .pre) {
|
||||
// we lazily do this, because we might need the first chunk of data
|
||||
// to sniff the content type
|
||||
const mime: Mime = blk: {
|
||||
var mime: Mime = blk: {
|
||||
if (transfer.response_header.?.contentType()) |ct| {
|
||||
break :blk try Mime.parse(ct);
|
||||
}
|
||||
break :blk Mime.sniff(data);
|
||||
} orelse .unknown;
|
||||
|
||||
// If the HTTP Content-Type header didn't specify a charset and this is HTML,
|
||||
// prescan the first 1024 bytes for a <meta charset> declaration.
|
||||
if (mime.content_type == .text_html and mime.is_default_charset) {
|
||||
if (Mime.prescanCharset(data)) |charset| {
|
||||
if (charset.len <= 40) {
|
||||
@memcpy(mime.charset[0..charset.len], charset);
|
||||
mime.charset[charset.len] = 0;
|
||||
mime.charset_len = charset.len;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (comptime IS_DEBUG) {
|
||||
log.debug(.page, "navigate first chunk", .{
|
||||
.content_type = mime.content_type,
|
||||
@@ -977,6 +1029,14 @@ pub fn scriptAddedCallback(self: *Page, comptime from_parser: bool, script: *Ele
|
||||
return;
|
||||
}
|
||||
|
||||
if (comptime from_parser) {
|
||||
// parser-inserted scripts have force-async set to false, but only if
|
||||
// they have src or non-empty content
|
||||
if (script._src.len > 0 or script.asNode().firstChild() != null) {
|
||||
script._force_async = false;
|
||||
}
|
||||
}
|
||||
|
||||
self._script_manager.addFromElement(from_parser, script, "parsing") catch |err| {
|
||||
log.err(.page, "page.scriptAddedCallback", .{
|
||||
.err = err,
|
||||
@@ -1050,7 +1110,6 @@ pub fn iframeAddedCallback(self: *Page, iframe: *IFrame) !void {
|
||||
log.warn(.page, "iframe navigate failure", .{ .url = url, .err = err });
|
||||
self._pending_loads -= 1;
|
||||
iframe._window = null;
|
||||
page_frame.deinit(true);
|
||||
return error.IFrameLoadError;
|
||||
};
|
||||
|
||||
@@ -1459,6 +1518,8 @@ pub fn adoptNodeTree(self: *Page, node: *Node, new_owner: *Document) !void {
|
||||
}
|
||||
|
||||
pub fn createElementNS(self: *Page, namespace: Element.Namespace, name: []const u8, attribute_iterator: anytype) !*Node {
|
||||
const from_parser = @TypeOf(attribute_iterator) == Parser.AttributeIterator;
|
||||
|
||||
switch (namespace) {
|
||||
.html => {
|
||||
switch (name.len) {
|
||||
@@ -2129,6 +2190,15 @@ pub fn createElementNS(self: *Page, namespace: Element.Namespace, name: []const
|
||||
self.js.localScope(&ls);
|
||||
defer ls.deinit();
|
||||
|
||||
if (from_parser) {
|
||||
// There are some things custom elements aren't allowed to do
|
||||
// when we're parsing.
|
||||
self.document._throw_on_dynamic_markup_insertion_counter += 1;
|
||||
}
|
||||
defer if (from_parser) {
|
||||
self.document._throw_on_dynamic_markup_insertion_counter -= 1;
|
||||
};
|
||||
|
||||
var caught: JS.TryCatch.Caught = undefined;
|
||||
_ = ls.toLocal(def.constructor).newInstance(&caught) catch |err| {
|
||||
log.warn(.js, "custom element constructor", .{ .name = name, .err = err, .caught = caught, .type = self._type, .url = self.url });
|
||||
@@ -2599,6 +2669,8 @@ pub fn _insertNodeRelative(self: *Page, comptime from_parser: bool, parent: *Nod
|
||||
}
|
||||
}
|
||||
|
||||
const parent_is_connected = parent.isConnected();
|
||||
|
||||
// Tri-state behavior for mutations:
|
||||
// 1. from_parser=true, parse_mode=document -> no mutations (initial document parse)
|
||||
// 2. from_parser=true, parse_mode=fragment -> mutations (innerHTML additions)
|
||||
@@ -2614,6 +2686,15 @@ pub fn _insertNodeRelative(self: *Page, comptime from_parser: bool, parent: *Nod
|
||||
// When the parser adds the node, nodeIsReady is only called when the
|
||||
// nodeComplete() callback is executed.
|
||||
try self.nodeIsReady(false, child);
|
||||
|
||||
// Check if text was added to a script that hasn't started yet.
|
||||
if (child._type == .cdata and parent_is_connected) {
|
||||
if (parent.is(Element.Html.Script)) |script| {
|
||||
if (!script._executed) {
|
||||
try self.nodeIsReady(false, parent);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Notify mutation observers about childList change
|
||||
@@ -2652,7 +2733,6 @@ pub fn _insertNodeRelative(self: *Page, comptime from_parser: bool, parent: *Nod
|
||||
}
|
||||
|
||||
const parent_in_shadow = parent.is(ShadowRoot) != null or parent.isInShadowTree();
|
||||
const parent_is_connected = parent.isConnected();
|
||||
|
||||
if (!parent_in_shadow and !parent_is_connected) {
|
||||
return;
|
||||
@@ -3106,9 +3186,9 @@ const NavigationType = enum {
|
||||
};
|
||||
|
||||
const Navigation = union(NavigationType) {
|
||||
form: *Node,
|
||||
form: *Page,
|
||||
script: ?*Page,
|
||||
anchor: *Node,
|
||||
anchor: *Page,
|
||||
iframe: *IFrame,
|
||||
};
|
||||
|
||||
@@ -3120,6 +3200,69 @@ pub const QueuedNavigation = struct {
|
||||
navigation_type: NavigationType,
|
||||
};
|
||||
|
||||
/// Resolves a target attribute value (e.g., "_self", "_parent", "_top", or frame name)
|
||||
/// to the appropriate Page to navigate.
|
||||
/// Returns null if the target is "_blank" (which would open a new window/tab).
|
||||
/// Note: Callers should handle empty target separately (for owner document resolution).
|
||||
pub fn resolveTargetPage(self: *Page, target_name: []const u8) ?*Page {
|
||||
if (std.ascii.eqlIgnoreCase(target_name, "_self")) {
|
||||
return self;
|
||||
}
|
||||
|
||||
if (std.ascii.eqlIgnoreCase(target_name, "_blank")) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (std.ascii.eqlIgnoreCase(target_name, "_parent")) {
|
||||
return self.parent orelse self;
|
||||
}
|
||||
|
||||
if (std.ascii.eqlIgnoreCase(target_name, "_top")) {
|
||||
var page = self;
|
||||
while (page.parent) |p| {
|
||||
page = p;
|
||||
}
|
||||
return page;
|
||||
}
|
||||
|
||||
// Named frame lookup: search current page's descendants first, then from root
|
||||
// This follows the HTML spec's "implementation-defined" search order.
|
||||
if (findFrameByName(self, target_name)) |frame_page| {
|
||||
return frame_page;
|
||||
}
|
||||
|
||||
// If not found in descendants, search from root (catches siblings and ancestors' descendants)
|
||||
var root = self;
|
||||
while (root.parent) |p| {
|
||||
root = p;
|
||||
}
|
||||
if (root != self) {
|
||||
if (findFrameByName(root, target_name)) |frame_page| {
|
||||
return frame_page;
|
||||
}
|
||||
}
|
||||
|
||||
// If no frame found with that name, navigate in current page
|
||||
// (this matches browser behavior - unknown targets act like _self)
|
||||
return self;
|
||||
}
|
||||
|
||||
fn findFrameByName(page: *Page, name: []const u8) ?*Page {
|
||||
for (page.frames.items) |frame| {
|
||||
if (frame.iframe) |iframe| {
|
||||
const frame_name = iframe.asElement().getAttributeSafe(comptime .wrap("name")) orelse "";
|
||||
if (std.mem.eql(u8, frame_name, name)) {
|
||||
return frame;
|
||||
}
|
||||
}
|
||||
// Recursively search child frames
|
||||
if (findFrameByName(frame, name)) |found| {
|
||||
return found;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
pub fn triggerMouseClick(self: *Page, x: f64, y: f64) !void {
|
||||
const target = (try self.window._document.elementFromPoint(x, y, self)) orelse return;
|
||||
if (comptime IS_DEBUG) {
|
||||
@@ -3131,14 +3274,14 @@ pub fn triggerMouseClick(self: *Page, x: f64, y: f64) !void {
|
||||
.type = self._type,
|
||||
});
|
||||
}
|
||||
const event = (try @import("webapi/event/MouseEvent.zig").init("click", .{
|
||||
const mouse_event: *MouseEvent = try .initTrusted(comptime .wrap("click"), .{
|
||||
.bubbles = true,
|
||||
.cancelable = true,
|
||||
.composed = true,
|
||||
.clientX = x,
|
||||
.clientY = y,
|
||||
}, self)).asEvent();
|
||||
try self._event_manager.dispatch(target.asEventTarget(), event);
|
||||
}, self);
|
||||
try self._event_manager.dispatch(target.asEventTarget(), mouse_event.asEvent());
|
||||
}
|
||||
|
||||
// callback when the "click" event reaches the pages.
|
||||
@@ -3158,29 +3301,27 @@ pub fn handleClick(self: *Page, target: *Node) !void {
|
||||
return;
|
||||
}
|
||||
|
||||
// Check target attribute - don't navigate if opening in new window/tab
|
||||
const target_val = anchor.getTarget();
|
||||
if (target_val.len > 0 and !std.mem.eql(u8, target_val, "_self")) {
|
||||
log.warn(.not_implemented, "a.target", .{ .type = self._type, .url = self.url });
|
||||
return;
|
||||
}
|
||||
|
||||
if (try element.hasAttribute(comptime .wrap("download"), self)) {
|
||||
log.warn(.browser, "a.download", .{ .type = self._type, .url = self.url });
|
||||
return;
|
||||
}
|
||||
|
||||
// TODO: We need to support targets properly, but this is the most
|
||||
// common case: a click on an anchor navigates the page/frame that
|
||||
// anchor is in.
|
||||
const target_page = blk: {
|
||||
const target_name = anchor.getTarget();
|
||||
if (target_name.len == 0) {
|
||||
break :blk target.ownerPage(self);
|
||||
}
|
||||
break :blk self.resolveTargetPage(target_name) orelse {
|
||||
log.warn(.not_implemented, "target", .{ .type = self._type, .url = self.url, .target = target_name });
|
||||
return;
|
||||
};
|
||||
};
|
||||
|
||||
// ownerDocument only returns null when `target` is a document, which
|
||||
// it is NOT in this case. Even for a detched node, it'll return self.document
|
||||
try element.focus(self);
|
||||
try self.scheduleNavigation(href, .{
|
||||
.reason = .script,
|
||||
.kind = .{ .push = null },
|
||||
}, .{ .anchor = target });
|
||||
}, .{ .anchor = target_page });
|
||||
},
|
||||
.input => |input| {
|
||||
try element.focus(self);
|
||||
@@ -3273,6 +3414,25 @@ pub fn submitForm(self: *Page, submitter_: ?*Element, form_: ?*Element.Html.Form
|
||||
|
||||
const form_element = form.asElement();
|
||||
|
||||
const target_name_: ?[]const u8 = blk: {
|
||||
if (submitter_) |submitter| {
|
||||
if (submitter.getAttributeSafe(comptime .wrap("formtarget"))) |ft| {
|
||||
break :blk ft;
|
||||
}
|
||||
}
|
||||
break :blk form_element.getAttributeSafe(comptime .wrap("target"));
|
||||
};
|
||||
|
||||
const target_page = blk: {
|
||||
const target_name = target_name_ orelse {
|
||||
break :blk form_element.asNode().ownerPage(self);
|
||||
};
|
||||
break :blk self.resolveTargetPage(target_name) orelse {
|
||||
log.warn(.not_implemented, "target", .{ .type = self._type, .url = self.url, .target = target_name });
|
||||
return;
|
||||
};
|
||||
};
|
||||
|
||||
if (submit_opts.fire_event) {
|
||||
const submit_event = try Event.initTrusted(comptime .wrap("submit"), .{ .bubbles = true, .cancelable = true }, self);
|
||||
|
||||
@@ -3315,7 +3475,8 @@ pub fn submitForm(self: *Page, submitter_: ?*Element, form_: ?*Element.Html.Form
|
||||
} else {
|
||||
action = try URL.concatQueryString(arena, action, buf.written());
|
||||
}
|
||||
return self.scheduleNavigationWithArena(arena, action, opts, .{ .form = form_element.asNode() });
|
||||
|
||||
return self.scheduleNavigationWithArena(arena, action, opts, .{ .form = target_page });
|
||||
}
|
||||
|
||||
// insertText is a shortcut to insert text into the active element.
|
||||
@@ -3364,10 +3525,16 @@ fn asUint(comptime string: anytype) std.meta.Int(
|
||||
|
||||
const testing = @import("../testing.zig");
|
||||
test "WebApi: Page" {
|
||||
const filter: testing.LogFilter = .init(&.{ .http, .js });
|
||||
defer filter.deinit();
|
||||
|
||||
try testing.htmlRunner("page", .{});
|
||||
}
|
||||
|
||||
test "WebApi: Frames" {
|
||||
const filter: testing.LogFilter = .init(&.{.js});
|
||||
defer filter.deinit();
|
||||
|
||||
try testing.htmlRunner("frames", .{});
|
||||
}
|
||||
|
||||
|
||||
@@ -63,9 +63,6 @@ shutdown: bool = false,
|
||||
|
||||
client: *HttpClient,
|
||||
allocator: Allocator,
|
||||
buffer_pool: BufferPool,
|
||||
|
||||
script_pool: std.heap.MemoryPool(Script),
|
||||
|
||||
// We can download multiple sync modules in parallel, but we want to process
|
||||
// them in order. We can't use an std.DoublyLinkedList, like the other script types,
|
||||
@@ -101,18 +98,14 @@ pub fn init(allocator: Allocator, http_client: *HttpClient, page: *Page) ScriptM
|
||||
.imported_modules = .empty,
|
||||
.client = http_client,
|
||||
.static_scripts_done = false,
|
||||
.buffer_pool = BufferPool.init(allocator, 5),
|
||||
.page_notified_of_completion = false,
|
||||
.script_pool = std.heap.MemoryPool(Script).init(allocator),
|
||||
};
|
||||
}
|
||||
|
||||
pub fn deinit(self: *ScriptManager) void {
|
||||
// necessary to free any buffers scripts may be referencing
|
||||
// necessary to free any arenas scripts may be referencing
|
||||
self.reset();
|
||||
|
||||
self.buffer_pool.deinit();
|
||||
self.script_pool.deinit();
|
||||
self.imported_modules.deinit(self.allocator);
|
||||
// we don't deinit self.importmap b/c we use the page's arena for its
|
||||
// allocations.
|
||||
@@ -121,7 +114,10 @@ pub fn deinit(self: *ScriptManager) void {
|
||||
pub fn reset(self: *ScriptManager) void {
|
||||
var it = self.imported_modules.valueIterator();
|
||||
while (it.next()) |value_ptr| {
|
||||
self.buffer_pool.release(value_ptr.buffer);
|
||||
switch (value_ptr.state) {
|
||||
.done => |script| script.deinit(),
|
||||
else => {},
|
||||
}
|
||||
}
|
||||
self.imported_modules.clearRetainingCapacity();
|
||||
|
||||
@@ -138,13 +134,13 @@ pub fn reset(self: *ScriptManager) void {
|
||||
fn clearList(list: *std.DoublyLinkedList) void {
|
||||
while (list.popFirst()) |n| {
|
||||
const script: *Script = @fieldParentPtr("node", n);
|
||||
script.deinit(true);
|
||||
script.deinit();
|
||||
}
|
||||
}
|
||||
|
||||
pub fn getHeaders(self: *ScriptManager, url: [:0]const u8) !net_http.Headers {
|
||||
fn getHeaders(self: *ScriptManager, arena: Allocator, url: [:0]const u8) !net_http.Headers {
|
||||
var headers = try self.client.newHeaders();
|
||||
try self.page.headersForRequest(self.page.arena, url, &headers);
|
||||
try self.page.headersForRequest(arena, url, &headers);
|
||||
return headers;
|
||||
}
|
||||
|
||||
@@ -159,7 +155,6 @@ pub fn addFromElement(self: *ScriptManager, comptime from_parser: bool, script_e
|
||||
// <script> has already been processed.
|
||||
return;
|
||||
}
|
||||
script_element._executed = true;
|
||||
|
||||
const element = script_element.asElement();
|
||||
if (element.getAttributeSafe(comptime .wrap("nomodule")) != null) {
|
||||
@@ -192,30 +187,48 @@ pub fn addFromElement(self: *ScriptManager, comptime from_parser: bool, script_e
|
||||
return;
|
||||
};
|
||||
|
||||
var handover = false;
|
||||
const page = self.page;
|
||||
|
||||
const arena = try page.getArena(.{ .debug = "addFromElement" });
|
||||
errdefer if (!handover) {
|
||||
page.releaseArena(arena);
|
||||
};
|
||||
|
||||
var source: Script.Source = undefined;
|
||||
var remote_url: ?[:0]const u8 = null;
|
||||
const base_url = page.base();
|
||||
if (element.getAttributeSafe(comptime .wrap("src"))) |src| {
|
||||
if (try parseDataURI(page.arena, src)) |data_uri| {
|
||||
if (try parseDataURI(arena, src)) |data_uri| {
|
||||
source = .{ .@"inline" = data_uri };
|
||||
} else {
|
||||
remote_url = try URL.resolve(page.arena, base_url, src, .{});
|
||||
remote_url = try URL.resolve(arena, base_url, src, .{});
|
||||
source = .{ .remote = .{} };
|
||||
}
|
||||
} else {
|
||||
const inline_source = try element.asNode().getTextContentAlloc(page.arena);
|
||||
var buf = std.Io.Writer.Allocating.init(arena);
|
||||
try element.asNode().getChildTextContent(&buf.writer);
|
||||
try buf.writer.writeByte(0);
|
||||
const data = buf.written();
|
||||
const inline_source: [:0]const u8 = data[0 .. data.len - 1 :0];
|
||||
if (inline_source.len == 0) {
|
||||
// we haven't set script_element._executed = true yet, which is good.
|
||||
// If content is appended to the script, we will execute it then.
|
||||
page.releaseArena(arena);
|
||||
return;
|
||||
}
|
||||
source = .{ .@"inline" = inline_source };
|
||||
}
|
||||
|
||||
const script = try self.script_pool.create();
|
||||
errdefer self.script_pool.destroy(script);
|
||||
|
||||
// Only set _executed (already-started) when we actually have content to execute
|
||||
script_element._executed = true;
|
||||
const is_inline = source == .@"inline";
|
||||
|
||||
const script = try arena.create(Script);
|
||||
script.* = .{
|
||||
.kind = kind,
|
||||
.node = .{},
|
||||
.arena = arena,
|
||||
.manager = self,
|
||||
.source = source,
|
||||
.script_element = script_element,
|
||||
@@ -259,7 +272,7 @@ pub fn addFromElement(self: *ScriptManager, comptime from_parser: bool, script_e
|
||||
if (is_blocking == false) {
|
||||
self.scriptList(script).remove(&script.node);
|
||||
}
|
||||
script.deinit(true);
|
||||
// Let the outer errdefer handle releasing the arena if client.request fails
|
||||
}
|
||||
|
||||
try self.client.request(.{
|
||||
@@ -267,7 +280,7 @@ pub fn addFromElement(self: *ScriptManager, comptime from_parser: bool, script_e
|
||||
.ctx = script,
|
||||
.method = .GET,
|
||||
.frame_id = page._frame_id,
|
||||
.headers = try self.getHeaders(url),
|
||||
.headers = try self.getHeaders(arena, url),
|
||||
.blocking = is_blocking,
|
||||
.cookie_jar = &page._session.cookie_jar,
|
||||
.resource_type = .script,
|
||||
@@ -278,6 +291,7 @@ pub fn addFromElement(self: *ScriptManager, comptime from_parser: bool, script_e
|
||||
.done_callback = Script.doneCallback,
|
||||
.error_callback = Script.errorCallback,
|
||||
});
|
||||
handover = true;
|
||||
|
||||
if (comptime IS_DEBUG) {
|
||||
var ls: js.Local.Scope = undefined;
|
||||
@@ -307,7 +321,7 @@ pub fn addFromElement(self: *ScriptManager, comptime from_parser: bool, script_e
|
||||
}
|
||||
if (script.status == 0) {
|
||||
// an error (that we already logged)
|
||||
script.deinit(true);
|
||||
script.deinit();
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -316,7 +330,7 @@ pub fn addFromElement(self: *ScriptManager, comptime from_parser: bool, script_e
|
||||
self.is_evaluating = true;
|
||||
defer {
|
||||
self.is_evaluating = was_evaluating;
|
||||
script.deinit(true);
|
||||
script.deinit();
|
||||
}
|
||||
return script.eval(page);
|
||||
}
|
||||
@@ -348,11 +362,14 @@ pub fn preloadImport(self: *ScriptManager, url: [:0]const u8, referrer: []const
|
||||
}
|
||||
errdefer _ = self.imported_modules.remove(url);
|
||||
|
||||
const script = try self.script_pool.create();
|
||||
errdefer self.script_pool.destroy(script);
|
||||
const page = self.page;
|
||||
const arena = try page.getArena(.{ .debug = "preloadImport" });
|
||||
errdefer page.releaseArena(arena);
|
||||
|
||||
const script = try arena.create(Script);
|
||||
script.* = .{
|
||||
.kind = .module,
|
||||
.arena = arena,
|
||||
.url = url,
|
||||
.node = .{},
|
||||
.manager = self,
|
||||
@@ -362,11 +379,7 @@ pub fn preloadImport(self: *ScriptManager, url: [:0]const u8, referrer: []const
|
||||
.mode = .import,
|
||||
};
|
||||
|
||||
gop.value_ptr.* = ImportedModule{
|
||||
.manager = self,
|
||||
};
|
||||
|
||||
const page = self.page;
|
||||
gop.value_ptr.* = ImportedModule{};
|
||||
|
||||
if (comptime IS_DEBUG) {
|
||||
var ls: js.Local.Scope = undefined;
|
||||
@@ -381,12 +394,18 @@ pub fn preloadImport(self: *ScriptManager, url: [:0]const u8, referrer: []const
|
||||
});
|
||||
}
|
||||
|
||||
try self.client.request(.{
|
||||
// This seems wrong since we're not dealing with an async import (unlike
|
||||
// getAsyncModule below), but all we're trying to do here is pre-load the
|
||||
// script for execution at some point in the future (when waitForImport is
|
||||
// called).
|
||||
self.async_scripts.append(&script.node);
|
||||
|
||||
self.client.request(.{
|
||||
.url = url,
|
||||
.ctx = script,
|
||||
.method = .GET,
|
||||
.frame_id = page._frame_id,
|
||||
.headers = try self.getHeaders(url),
|
||||
.headers = try self.getHeaders(arena, url),
|
||||
.cookie_jar = &page._session.cookie_jar,
|
||||
.resource_type = .script,
|
||||
.notification = page._session.notification,
|
||||
@@ -395,13 +414,10 @@ pub fn preloadImport(self: *ScriptManager, url: [:0]const u8, referrer: []const
|
||||
.data_callback = Script.dataCallback,
|
||||
.done_callback = Script.doneCallback,
|
||||
.error_callback = Script.errorCallback,
|
||||
});
|
||||
|
||||
// This seems wrong since we're not dealing with an async import (unlike
|
||||
// getAsyncModule below), but all we're trying to do here is pre-load the
|
||||
// script for execution at some point in the future (when waitForImport is
|
||||
// called).
|
||||
self.async_scripts.append(&script.node);
|
||||
}) catch |err| {
|
||||
self.async_scripts.remove(&script.node);
|
||||
return err;
|
||||
};
|
||||
}
|
||||
|
||||
pub fn waitForImport(self: *ScriptManager, url: [:0]const u8) !ModuleSource {
|
||||
@@ -422,12 +438,12 @@ pub fn waitForImport(self: *ScriptManager, url: [:0]const u8) !ModuleSource {
|
||||
_ = try client.tick(200);
|
||||
continue;
|
||||
},
|
||||
.done => {
|
||||
.done => |script| {
|
||||
var shared = false;
|
||||
const buffer = entry.value_ptr.buffer;
|
||||
const waiters = entry.value_ptr.waiters;
|
||||
|
||||
if (waiters == 0) {
|
||||
if (waiters == 1) {
|
||||
self.imported_modules.removeByPtr(entry.key_ptr);
|
||||
} else {
|
||||
shared = true;
|
||||
@@ -436,7 +452,7 @@ pub fn waitForImport(self: *ScriptManager, url: [:0]const u8) !ModuleSource {
|
||||
return .{
|
||||
.buffer = buffer,
|
||||
.shared = shared,
|
||||
.buffer_pool = &self.buffer_pool,
|
||||
.script = script,
|
||||
};
|
||||
},
|
||||
.err => return error.Failed,
|
||||
@@ -445,11 +461,14 @@ pub fn waitForImport(self: *ScriptManager, url: [:0]const u8) !ModuleSource {
|
||||
}
|
||||
|
||||
pub fn getAsyncImport(self: *ScriptManager, url: [:0]const u8, cb: ImportAsync.Callback, cb_data: *anyopaque, referrer: []const u8) !void {
|
||||
const script = try self.script_pool.create();
|
||||
errdefer self.script_pool.destroy(script);
|
||||
const page = self.page;
|
||||
const arena = try page.getArena(.{ .debug = "getAsyncImport" });
|
||||
errdefer page.releaseArena(arena);
|
||||
|
||||
const script = try arena.create(Script);
|
||||
script.* = .{
|
||||
.kind = .module,
|
||||
.arena = arena,
|
||||
.url = url,
|
||||
.node = .{},
|
||||
.manager = self,
|
||||
@@ -462,7 +481,6 @@ pub fn getAsyncImport(self: *ScriptManager, url: [:0]const u8, cb: ImportAsync.C
|
||||
} },
|
||||
};
|
||||
|
||||
const page = self.page;
|
||||
if (comptime IS_DEBUG) {
|
||||
var ls: js.Local.Scope = undefined;
|
||||
page.js.localScope(&ls);
|
||||
@@ -485,11 +503,12 @@ pub fn getAsyncImport(self: *ScriptManager, url: [:0]const u8, cb: ImportAsync.C
|
||||
self.is_evaluating = true;
|
||||
defer self.is_evaluating = was_evaluating;
|
||||
|
||||
try self.client.request(.{
|
||||
self.async_scripts.append(&script.node);
|
||||
self.client.request(.{
|
||||
.url = url,
|
||||
.method = .GET,
|
||||
.frame_id = page._frame_id,
|
||||
.headers = try self.getHeaders(url),
|
||||
.headers = try self.getHeaders(arena, url),
|
||||
.ctx = script,
|
||||
.resource_type = .script,
|
||||
.cookie_jar = &page._session.cookie_jar,
|
||||
@@ -499,9 +518,10 @@ pub fn getAsyncImport(self: *ScriptManager, url: [:0]const u8, cb: ImportAsync.C
|
||||
.data_callback = Script.dataCallback,
|
||||
.done_callback = Script.doneCallback,
|
||||
.error_callback = Script.errorCallback,
|
||||
});
|
||||
|
||||
self.async_scripts.append(&script.node);
|
||||
}) catch |err| {
|
||||
self.async_scripts.remove(&script.node);
|
||||
return err;
|
||||
};
|
||||
}
|
||||
|
||||
// Called from the Page to let us know it's done parsing the HTML. Necessary that
|
||||
@@ -526,18 +546,18 @@ fn evaluate(self: *ScriptManager) void {
|
||||
var script: *Script = @fieldParentPtr("node", n);
|
||||
switch (script.mode) {
|
||||
.async => {
|
||||
defer script.deinit(true);
|
||||
defer script.deinit();
|
||||
script.eval(page);
|
||||
},
|
||||
.import_async => |ia| {
|
||||
defer script.deinit(false);
|
||||
if (script.status < 200 or script.status > 299) {
|
||||
script.deinit();
|
||||
ia.callback(ia.data, error.FailedToLoad);
|
||||
} else {
|
||||
ia.callback(ia.data, .{
|
||||
.shared = false,
|
||||
.script = script,
|
||||
.buffer = script.source.remote,
|
||||
.buffer_pool = &self.buffer_pool,
|
||||
});
|
||||
}
|
||||
},
|
||||
@@ -563,7 +583,7 @@ fn evaluate(self: *ScriptManager) void {
|
||||
}
|
||||
defer {
|
||||
_ = self.defer_scripts.popFirst();
|
||||
script.deinit(true);
|
||||
script.deinit();
|
||||
}
|
||||
script.eval(page);
|
||||
}
|
||||
@@ -614,11 +634,12 @@ fn parseImportmap(self: *ScriptManager, script: *const Script) !void {
|
||||
}
|
||||
|
||||
pub const Script = struct {
|
||||
complete: bool,
|
||||
kind: Kind,
|
||||
complete: bool,
|
||||
status: u16 = 0,
|
||||
source: Source,
|
||||
url: []const u8,
|
||||
arena: Allocator,
|
||||
mode: ExecutionMode,
|
||||
node: std.DoublyLinkedList.Node,
|
||||
script_element: ?*Element.Html.Script,
|
||||
@@ -669,11 +690,8 @@ pub const Script = struct {
|
||||
import_async: ImportAsync,
|
||||
};
|
||||
|
||||
fn deinit(self: *Script, comptime release_buffer: bool) void {
|
||||
if ((comptime release_buffer) and self.source == .remote) {
|
||||
self.manager.buffer_pool.release(self.source.remote);
|
||||
}
|
||||
self.manager.script_pool.destroy(self);
|
||||
fn deinit(self: *Script) void {
|
||||
self.manager.page.releaseArena(self.arena);
|
||||
}
|
||||
|
||||
fn startCallback(transfer: *HttpClient.Transfer) !void {
|
||||
@@ -739,9 +757,9 @@ pub const Script = struct {
|
||||
}
|
||||
|
||||
lp.assert(self.source.remote.capacity == 0, "ScriptManager.Header buffer", .{ .capacity = self.source.remote.capacity });
|
||||
var buffer = self.manager.buffer_pool.get();
|
||||
var buffer: std.ArrayList(u8) = .empty;
|
||||
if (transfer.getContentLength()) |cl| {
|
||||
try buffer.ensureTotalCapacity(self.manager.allocator, cl);
|
||||
try buffer.ensureTotalCapacity(self.arena, cl);
|
||||
}
|
||||
self.source = .{ .remote = buffer };
|
||||
return true;
|
||||
@@ -755,7 +773,7 @@ pub const Script = struct {
|
||||
};
|
||||
}
|
||||
fn _dataCallback(self: *Script, _: *HttpClient.Transfer, data: []const u8) !void {
|
||||
try self.source.remote.appendSlice(self.manager.allocator, data);
|
||||
try self.source.remote.appendSlice(self.arena, data);
|
||||
}
|
||||
|
||||
fn doneCallback(ctx: *anyopaque) !void {
|
||||
@@ -772,9 +790,8 @@ pub const Script = struct {
|
||||
} else if (self.mode == .import) {
|
||||
manager.async_scripts.remove(&self.node);
|
||||
const entry = manager.imported_modules.getPtr(self.url).?;
|
||||
entry.state = .done;
|
||||
entry.state = .{ .done = self };
|
||||
entry.buffer = self.source.remote;
|
||||
self.deinit(false);
|
||||
}
|
||||
manager.evaluate();
|
||||
}
|
||||
@@ -800,7 +817,7 @@ pub const Script = struct {
|
||||
const manager = self.manager;
|
||||
manager.scriptList(self).remove(&self.node);
|
||||
if (manager.shutdown) {
|
||||
self.deinit(true);
|
||||
self.deinit();
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -812,7 +829,7 @@ pub const Script = struct {
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
self.deinit(true);
|
||||
self.deinit();
|
||||
manager.evaluate();
|
||||
}
|
||||
|
||||
@@ -940,76 +957,6 @@ pub const Script = struct {
|
||||
}
|
||||
};
|
||||
|
||||
const BufferPool = struct {
|
||||
count: usize,
|
||||
available: List = .{},
|
||||
allocator: Allocator,
|
||||
max_concurrent_transfers: u8,
|
||||
mem_pool: std.heap.MemoryPool(Container),
|
||||
|
||||
const List = std.SinglyLinkedList;
|
||||
|
||||
const Container = struct {
|
||||
node: List.Node,
|
||||
buf: std.ArrayList(u8),
|
||||
};
|
||||
|
||||
fn init(allocator: Allocator, max_concurrent_transfers: u8) BufferPool {
|
||||
return .{
|
||||
.available = .{},
|
||||
.count = 0,
|
||||
.allocator = allocator,
|
||||
.max_concurrent_transfers = max_concurrent_transfers,
|
||||
.mem_pool = std.heap.MemoryPool(Container).init(allocator),
|
||||
};
|
||||
}
|
||||
|
||||
fn deinit(self: *BufferPool) void {
|
||||
const allocator = self.allocator;
|
||||
|
||||
var node = self.available.first;
|
||||
while (node) |n| {
|
||||
const container: *Container = @fieldParentPtr("node", n);
|
||||
container.buf.deinit(allocator);
|
||||
node = n.next;
|
||||
}
|
||||
self.mem_pool.deinit();
|
||||
}
|
||||
|
||||
fn get(self: *BufferPool) std.ArrayList(u8) {
|
||||
const node = self.available.popFirst() orelse {
|
||||
// return a new buffer
|
||||
return .{};
|
||||
};
|
||||
|
||||
self.count -= 1;
|
||||
const container: *Container = @fieldParentPtr("node", node);
|
||||
defer self.mem_pool.destroy(container);
|
||||
return container.buf;
|
||||
}
|
||||
|
||||
fn release(self: *BufferPool, buffer: ArrayList(u8)) void {
|
||||
// create mutable copy
|
||||
var b = buffer;
|
||||
|
||||
if (self.count == self.max_concurrent_transfers) {
|
||||
b.deinit(self.allocator);
|
||||
return;
|
||||
}
|
||||
|
||||
const container = self.mem_pool.create() catch |err| {
|
||||
b.deinit(self.allocator);
|
||||
log.err(.http, "SM BufferPool release", .{ .err = err });
|
||||
return;
|
||||
};
|
||||
|
||||
b.clearRetainingCapacity();
|
||||
container.* = .{ .buf = b, .node = .{} };
|
||||
self.count += 1;
|
||||
self.available.prepend(&container.node);
|
||||
}
|
||||
};
|
||||
|
||||
const ImportAsync = struct {
|
||||
data: *anyopaque,
|
||||
callback: ImportAsync.Callback,
|
||||
@@ -1019,12 +966,12 @@ const ImportAsync = struct {
|
||||
|
||||
pub const ModuleSource = struct {
|
||||
shared: bool,
|
||||
buffer_pool: *BufferPool,
|
||||
script: *Script,
|
||||
buffer: std.ArrayList(u8),
|
||||
|
||||
pub fn deinit(self: *ModuleSource) void {
|
||||
if (self.shared == false) {
|
||||
self.buffer_pool.release(self.buffer);
|
||||
self.script.deinit();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1034,15 +981,14 @@ pub const ModuleSource = struct {
|
||||
};
|
||||
|
||||
const ImportedModule = struct {
|
||||
manager: *ScriptManager,
|
||||
waiters: u16 = 1,
|
||||
state: State = .loading,
|
||||
buffer: std.ArrayList(u8) = .{},
|
||||
waiters: u16 = 1,
|
||||
|
||||
const State = enum {
|
||||
const State = union(enum) {
|
||||
err,
|
||||
done,
|
||||
loading,
|
||||
done: *Script,
|
||||
};
|
||||
};
|
||||
|
||||
|
||||
@@ -84,6 +84,7 @@ queued_navigation: std.ArrayList(*Page),
|
||||
// about:blank navigations (which may add to queued_navigation).
|
||||
queued_queued_navigation: std.ArrayList(*Page),
|
||||
|
||||
page_id_gen: u32,
|
||||
frame_id_gen: u32,
|
||||
|
||||
pub fn init(self: *Session, browser: *Browser, notification: *Notification) !void {
|
||||
@@ -103,6 +104,7 @@ pub fn init(self: *Session, browser: *Browser, notification: *Notification) !voi
|
||||
.page_arena = page_arena,
|
||||
.factory = Factory.init(page_arena),
|
||||
.history = .{},
|
||||
.page_id_gen = 0,
|
||||
.frame_id_gen = 0,
|
||||
// The prototype (EventTarget) for Navigation is created when a Page is created.
|
||||
.navigation = .{ ._proto = undefined },
|
||||
@@ -297,9 +299,24 @@ pub const WaitResult = enum {
|
||||
cdp_socket,
|
||||
};
|
||||
|
||||
pub fn findPage(self: *Session, frame_id: u32) ?*Page {
|
||||
pub fn findPageByFrameId(self: *Session, frame_id: u32) ?*Page {
|
||||
const page = self.currentPage() orelse return null;
|
||||
return if (page._frame_id == frame_id) page else null;
|
||||
return findPageBy(page, "_frame_id", frame_id);
|
||||
}
|
||||
|
||||
pub fn findPageById(self: *Session, id: u32) ?*Page {
|
||||
const page = self.currentPage() orelse return null;
|
||||
return findPageBy(page, "id", id);
|
||||
}
|
||||
|
||||
fn findPageBy(page: *Page, comptime field: []const u8, id: u32) ?*Page {
|
||||
if (@field(page, field) == id) return page;
|
||||
for (page.frames.items) |f| {
|
||||
if (findPageBy(f, field, id)) |found| {
|
||||
return found;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
pub fn wait(self: *Session, wait_ms: u32) WaitResult {
|
||||
@@ -384,7 +401,7 @@ fn _wait(self: *Session, page: *Page, wait_ms: u32) !WaitResult {
|
||||
// scheduler.run could trigger new http transfers, so do not
|
||||
// store http_client.active BEFORE this call and then use
|
||||
// it AFTER.
|
||||
const ms_to_next_task = try browser.runMacrotasks();
|
||||
try browser.runMacrotasks();
|
||||
|
||||
// Each call to this runs scheduled load events.
|
||||
try page.dispatchLoad();
|
||||
@@ -406,16 +423,16 @@ fn _wait(self: *Session, page: *Page, wait_ms: u32) !WaitResult {
|
||||
std.debug.assert(http_client.intercepted == 0);
|
||||
}
|
||||
|
||||
var ms: u64 = ms_to_next_task orelse blk: {
|
||||
if (wait_ms - ms_remaining < 100) {
|
||||
if (comptime builtin.is_test) {
|
||||
return .done;
|
||||
}
|
||||
// Look, we want to exit ASAP, but we don't want
|
||||
// to exit so fast that we've run none of the
|
||||
// background jobs.
|
||||
break :blk 50;
|
||||
}
|
||||
var ms = blk: {
|
||||
// if (wait_ms - ms_remaining < 100) {
|
||||
// if (comptime builtin.is_test) {
|
||||
// return .done;
|
||||
// }
|
||||
// // Look, we want to exit ASAP, but we don't want
|
||||
// // to exit so fast that we've run none of the
|
||||
// // background jobs.
|
||||
// break :blk 50;
|
||||
// }
|
||||
|
||||
if (browser.hasBackgroundTasks()) {
|
||||
// _we_ have nothing to run, but v8 is working on
|
||||
@@ -424,9 +441,7 @@ fn _wait(self: *Session, page: *Page, wait_ms: u32) !WaitResult {
|
||||
break :blk 20;
|
||||
}
|
||||
|
||||
// No http transfers, no cdp extra socket, no
|
||||
// scheduled tasks, we're done.
|
||||
return .done;
|
||||
break :blk browser.msToNextMacrotask() orelse return .done;
|
||||
};
|
||||
|
||||
if (ms > ms_remaining) {
|
||||
@@ -453,9 +468,9 @@ fn _wait(self: *Session, page: *Page, wait_ms: u32) !WaitResult {
|
||||
// We're here because we either have active HTTP
|
||||
// connections, or exit_when_done == false (aka, there's
|
||||
// an cdp_socket registered with the http client).
|
||||
// We should continue to run lowPriority tasks, so we
|
||||
// minimize how long we'll poll for network I/O.
|
||||
var ms_to_wait = @min(200, ms_to_next_task orelse 200);
|
||||
// We should continue to run tasks, so we minimize how long
|
||||
// we'll poll for network I/O.
|
||||
var ms_to_wait = @min(200, browser.msToNextMacrotask() orelse 200);
|
||||
if (ms_to_wait > 10 and browser.hasBackgroundTasks()) {
|
||||
// if we have background tasks, we don't want to wait too
|
||||
// long for a message from the client. We want to go back
|
||||
@@ -531,7 +546,9 @@ fn processQueuedNavigation(self: *Session) !void {
|
||||
continue;
|
||||
}
|
||||
|
||||
try self.processFrameNavigation(page, qn);
|
||||
self.processFrameNavigation(page, qn) catch |err| {
|
||||
log.warn(.page, "frame navigation", .{ .url = qn.url, .err = err });
|
||||
};
|
||||
}
|
||||
|
||||
// Clear the queue after first pass
|
||||
@@ -571,7 +588,8 @@ fn processFrameNavigation(self: *Session, page: *Page, qn: *QueuedNavigation) !v
|
||||
|
||||
errdefer iframe._window = null;
|
||||
|
||||
if (page._parent_notified) {
|
||||
const parent_notified = page._parent_notified;
|
||||
if (parent_notified) {
|
||||
// we already notified the parent that we had loaded
|
||||
parent._pending_loads += 1;
|
||||
}
|
||||
@@ -581,7 +599,19 @@ fn processFrameNavigation(self: *Session, page: *Page, qn: *QueuedNavigation) !v
|
||||
page.* = undefined;
|
||||
|
||||
try Page.init(page, frame_id, self, parent);
|
||||
errdefer page.deinit(true);
|
||||
errdefer {
|
||||
for (parent.frames.items, 0..) |frame, i| {
|
||||
if (frame == page) {
|
||||
parent.frames_sorted = false;
|
||||
_ = parent.frames.swapRemove(i);
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (parent_notified) {
|
||||
parent._pending_loads -= 1;
|
||||
}
|
||||
page.deinit(true);
|
||||
}
|
||||
|
||||
page.iframe = iframe;
|
||||
iframe._window = page.window;
|
||||
@@ -636,3 +666,9 @@ pub fn nextFrameId(self: *Session) u32 {
|
||||
self.frame_id_gen = id;
|
||||
return id;
|
||||
}
|
||||
|
||||
pub fn nextPageId(self: *Session) u32 {
|
||||
const id = self.page_id_gen +% 1;
|
||||
self.page_id_gen = id;
|
||||
return id;
|
||||
}
|
||||
|
||||
@@ -277,6 +277,11 @@ pub fn isCompleteHTTPUrl(url: []const u8) bool {
|
||||
return false;
|
||||
}
|
||||
|
||||
// blob: and data: URLs are complete but don't follow scheme:// pattern
|
||||
if (std.mem.startsWith(u8, url, "blob:") or std.mem.startsWith(u8, url, "data:")) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// Check if there's a scheme (protocol) ending with ://
|
||||
const colon_pos = std.mem.indexOfScalar(u8, url, ':') orelse return false;
|
||||
|
||||
@@ -1400,3 +1405,12 @@ test "URL: unescape" {
|
||||
try testing.expectEqual("hello%2", result);
|
||||
}
|
||||
}
|
||||
|
||||
test "URL: getHost" {
|
||||
try testing.expectEqualSlices(u8, "example.com:8080", getHost("https://example.com:8080/path"));
|
||||
try testing.expectEqualSlices(u8, "example.com", getHost("https://example.com/path"));
|
||||
try testing.expectEqualSlices(u8, "example.com:443", getHost("https://example.com:443/"));
|
||||
try testing.expectEqualSlices(u8, "example.com", getHost("https://user:pass@example.com/page"));
|
||||
try testing.expectEqualSlices(u8, "example.com:8080", getHost("https://user:pass@example.com:8080/page"));
|
||||
try testing.expectEqualSlices(u8, "", getHost("not-a-url"));
|
||||
}
|
||||
|
||||
104
src/browser/actions.zig
Normal file
104
src/browser/actions.zig
Normal file
@@ -0,0 +1,104 @@
|
||||
// Copyright (C) 2023-2026 Lightpanda (Selecy SAS)
|
||||
//
|
||||
// Francis Bouvier <francis@lightpanda.io>
|
||||
// Pierre Tachoire <pierre@lightpanda.io>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU Affero General Public License as
|
||||
// published by the Free Software Foundation, either version 3 of the
|
||||
// License, or (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU Affero General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU Affero General Public License
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
const std = @import("std");
|
||||
const lp = @import("../lightpanda.zig");
|
||||
const DOMNode = @import("webapi/Node.zig");
|
||||
const Element = @import("webapi/Element.zig");
|
||||
const Event = @import("webapi/Event.zig");
|
||||
const MouseEvent = @import("webapi/event/MouseEvent.zig");
|
||||
const Page = @import("Page.zig");
|
||||
|
||||
pub fn click(node: *DOMNode, page: *Page) !void {
|
||||
const el = node.is(Element) orelse return error.InvalidNodeType;
|
||||
|
||||
const mouse_event: *MouseEvent = try .initTrusted(comptime .wrap("click"), .{
|
||||
.bubbles = true,
|
||||
.cancelable = true,
|
||||
.composed = true,
|
||||
.clientX = 0,
|
||||
.clientY = 0,
|
||||
}, page);
|
||||
|
||||
page._event_manager.dispatch(el.asEventTarget(), mouse_event.asEvent()) catch |err| {
|
||||
lp.log.err(.app, "click failed", .{ .err = err });
|
||||
return error.ActionFailed;
|
||||
};
|
||||
}
|
||||
|
||||
pub fn fill(node: *DOMNode, text: []const u8, page: *Page) !void {
|
||||
const el = node.is(Element) orelse return error.InvalidNodeType;
|
||||
|
||||
if (el.is(Element.Html.Input)) |input| {
|
||||
input.setValue(text, page) catch |err| {
|
||||
lp.log.err(.app, "fill input failed", .{ .err = err });
|
||||
return error.ActionFailed;
|
||||
};
|
||||
} else if (el.is(Element.Html.TextArea)) |textarea| {
|
||||
textarea.setValue(text, page) catch |err| {
|
||||
lp.log.err(.app, "fill textarea failed", .{ .err = err });
|
||||
return error.ActionFailed;
|
||||
};
|
||||
} else if (el.is(Element.Html.Select)) |select| {
|
||||
select.setValue(text, page) catch |err| {
|
||||
lp.log.err(.app, "fill select failed", .{ .err = err });
|
||||
return error.ActionFailed;
|
||||
};
|
||||
} else {
|
||||
return error.InvalidNodeType;
|
||||
}
|
||||
|
||||
const input_evt: *Event = try .initTrusted(comptime .wrap("input"), .{ .bubbles = true }, page);
|
||||
page._event_manager.dispatch(el.asEventTarget(), input_evt) catch |err| {
|
||||
lp.log.err(.app, "dispatch input event failed", .{ .err = err });
|
||||
};
|
||||
|
||||
const change_evt: *Event = try .initTrusted(comptime .wrap("change"), .{ .bubbles = true }, page);
|
||||
page._event_manager.dispatch(el.asEventTarget(), change_evt) catch |err| {
|
||||
lp.log.err(.app, "dispatch change event failed", .{ .err = err });
|
||||
};
|
||||
}
|
||||
|
||||
pub fn scroll(node: ?*DOMNode, x: ?i32, y: ?i32, page: *Page) !void {
|
||||
if (node) |n| {
|
||||
const el = n.is(Element) orelse return error.InvalidNodeType;
|
||||
|
||||
if (x) |val| {
|
||||
el.setScrollLeft(val, page) catch |err| {
|
||||
lp.log.err(.app, "setScrollLeft failed", .{ .err = err });
|
||||
return error.ActionFailed;
|
||||
};
|
||||
}
|
||||
if (y) |val| {
|
||||
el.setScrollTop(val, page) catch |err| {
|
||||
lp.log.err(.app, "setScrollTop failed", .{ .err = err });
|
||||
return error.ActionFailed;
|
||||
};
|
||||
}
|
||||
|
||||
const scroll_evt: *Event = try .initTrusted(comptime .wrap("scroll"), .{ .bubbles = true }, page);
|
||||
page._event_manager.dispatch(el.asEventTarget(), scroll_evt) catch |err| {
|
||||
lp.log.err(.app, "dispatch scroll event failed", .{ .err = err });
|
||||
};
|
||||
} else {
|
||||
page.window.scrollTo(.{ .x = x orelse 0 }, y, page) catch |err| {
|
||||
lp.log.err(.app, "scroll failed", .{ .err = err });
|
||||
return error.ActionFailed;
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -157,7 +157,7 @@ pub fn collectInteractiveElements(
|
||||
.node = node,
|
||||
.tag_name = el.getTagNameLower(),
|
||||
.role = getRole(el),
|
||||
.name = getAccessibleName(el),
|
||||
.name = try getAccessibleName(el, arena),
|
||||
.interactivity_type = itype,
|
||||
.listener_types = listener_types,
|
||||
.disabled = isDisabled(el),
|
||||
@@ -178,12 +178,12 @@ pub fn collectInteractiveElements(
|
||||
return results.items;
|
||||
}
|
||||
|
||||
const ListenerTargetMap = std.AutoHashMapUnmanaged(usize, std.ArrayList([]const u8));
|
||||
pub const ListenerTargetMap = std.AutoHashMapUnmanaged(usize, std.ArrayList([]const u8));
|
||||
|
||||
/// Pre-build a map from event_target pointer → list of event type names.
|
||||
/// This lets both classifyInteractivity (O(1) "has any?") and
|
||||
/// getListenerTypes (O(1) "which ones?") avoid re-iterating per element.
|
||||
fn buildListenerTargetMap(page: *Page, arena: Allocator) !ListenerTargetMap {
|
||||
pub fn buildListenerTargetMap(page: *Page, arena: Allocator) !ListenerTargetMap {
|
||||
var map = ListenerTargetMap{};
|
||||
|
||||
// addEventListener registrations
|
||||
@@ -209,7 +209,7 @@ fn buildListenerTargetMap(page: *Page, arena: Allocator) !ListenerTargetMap {
|
||||
return map;
|
||||
}
|
||||
|
||||
fn classifyInteractivity(
|
||||
pub fn classifyInteractivity(
|
||||
el: *Element,
|
||||
html_el: *Element.Html,
|
||||
listener_targets: ListenerTargetMap,
|
||||
@@ -253,17 +253,52 @@ fn classifyInteractivity(
|
||||
return null;
|
||||
}
|
||||
|
||||
fn isInteractiveRole(role: []const u8) bool {
|
||||
const interactive_roles = [_][]const u8{
|
||||
"button", "link", "tab", "menuitem",
|
||||
"menuitemcheckbox", "menuitemradio", "switch", "checkbox",
|
||||
"radio", "slider", "spinbutton", "searchbox",
|
||||
"combobox", "option", "treeitem",
|
||||
};
|
||||
for (interactive_roles) |r| {
|
||||
if (std.ascii.eqlIgnoreCase(role, r)) return true;
|
||||
}
|
||||
return false;
|
||||
pub fn isInteractiveRole(role: []const u8) bool {
|
||||
const MAX_LEN = "menuitemcheckbox".len;
|
||||
if (role.len > MAX_LEN) return false;
|
||||
var buf: [MAX_LEN]u8 = undefined;
|
||||
const lowered = std.ascii.lowerString(&buf, role);
|
||||
const interactive_roles = std.StaticStringMap(void).initComptime(.{
|
||||
.{ "button", {} },
|
||||
.{ "checkbox", {} },
|
||||
.{ "combobox", {} },
|
||||
.{ "iframe", {} },
|
||||
.{ "link", {} },
|
||||
.{ "listbox", {} },
|
||||
.{ "menuitem", {} },
|
||||
.{ "menuitemcheckbox", {} },
|
||||
.{ "menuitemradio", {} },
|
||||
.{ "option", {} },
|
||||
.{ "radio", {} },
|
||||
.{ "searchbox", {} },
|
||||
.{ "slider", {} },
|
||||
.{ "spinbutton", {} },
|
||||
.{ "switch", {} },
|
||||
.{ "tab", {} },
|
||||
.{ "textbox", {} },
|
||||
.{ "treeitem", {} },
|
||||
});
|
||||
return interactive_roles.has(lowered);
|
||||
}
|
||||
|
||||
pub fn isContentRole(role: []const u8) bool {
|
||||
const MAX_LEN = "columnheader".len;
|
||||
if (role.len > MAX_LEN) return false;
|
||||
var buf: [MAX_LEN]u8 = undefined;
|
||||
const lowered = std.ascii.lowerString(&buf, role);
|
||||
const content_roles = std.StaticStringMap(void).initComptime(.{
|
||||
.{ "article", {} },
|
||||
.{ "cell", {} },
|
||||
.{ "columnheader", {} },
|
||||
.{ "gridcell", {} },
|
||||
.{ "heading", {} },
|
||||
.{ "listitem", {} },
|
||||
.{ "main", {} },
|
||||
.{ "navigation", {} },
|
||||
.{ "region", {} },
|
||||
.{ "rowheader", {} },
|
||||
});
|
||||
return content_roles.has(lowered);
|
||||
}
|
||||
|
||||
fn getRole(el: *Element) ?[]const u8 {
|
||||
@@ -296,7 +331,7 @@ fn getRole(el: *Element) ?[]const u8 {
|
||||
};
|
||||
}
|
||||
|
||||
fn getAccessibleName(el: *Element) ?[]const u8 {
|
||||
fn getAccessibleName(el: *Element, arena: Allocator) !?[]const u8 {
|
||||
// aria-label
|
||||
if (el.getAttributeSafe(comptime .wrap("aria-label"))) |v| {
|
||||
if (v.len > 0) return v;
|
||||
@@ -325,11 +360,15 @@ fn getAccessibleName(el: *Element) ?[]const u8 {
|
||||
}
|
||||
|
||||
// Text content (first non-empty text node, trimmed)
|
||||
return getTextContent(el.asNode());
|
||||
return try getTextContent(el.asNode(), arena);
|
||||
}
|
||||
|
||||
fn getTextContent(node: *Node) ?[]const u8 {
|
||||
var tw = TreeWalker.FullExcludeSelf.init(node, .{});
|
||||
fn getTextContent(node: *Node, arena: Allocator) !?[]const u8 {
|
||||
var tw: TreeWalker.FullExcludeSelf = .init(node, .{});
|
||||
|
||||
var arr: std.ArrayList(u8) = .empty;
|
||||
var single_chunk: ?[]const u8 = null;
|
||||
|
||||
while (tw.next()) |child| {
|
||||
// Skip text inside script/style elements.
|
||||
if (child.is(Element)) |el| {
|
||||
@@ -344,13 +383,29 @@ fn getTextContent(node: *Node) ?[]const u8 {
|
||||
if (child.is(Node.CData)) |cdata| {
|
||||
if (cdata.is(Node.CData.Text)) |text| {
|
||||
const content = std.mem.trim(u8, text.getWholeText(), &std.ascii.whitespace);
|
||||
if (content.len > 0) return content;
|
||||
if (content.len > 0) {
|
||||
if (single_chunk == null and arr.items.len == 0) {
|
||||
single_chunk = content;
|
||||
} else {
|
||||
if (single_chunk) |sc| {
|
||||
try arr.appendSlice(arena, sc);
|
||||
try arr.append(arena, ' ');
|
||||
single_chunk = null;
|
||||
}
|
||||
try arr.appendSlice(arena, content);
|
||||
try arr.append(arena, ' ');
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
if (single_chunk) |sc| return sc;
|
||||
if (arr.items.len == 0) return null;
|
||||
|
||||
// strip out trailing space
|
||||
return arr.items[0 .. arr.items.len - 1];
|
||||
}
|
||||
fn isDisabled(el: *Element) bool {
|
||||
if (el.getAttributeSafe(comptime .wrap("disabled")) != null) return true;
|
||||
return isDisabledByFieldset(el);
|
||||
|
||||
@@ -40,8 +40,8 @@ prev_context: *Context,
|
||||
|
||||
// Takes the raw v8 isolate and extracts the context from it.
|
||||
pub fn init(self: *Caller, v8_isolate: *v8.Isolate) void {
|
||||
const v8_context = v8.v8__Isolate__GetCurrentContext(v8_isolate).?;
|
||||
initWithContext(self, Context.fromC(v8_context), v8_context);
|
||||
const ctx, const v8_context = Context.fromIsolate(.{ .handle = v8_isolate });
|
||||
initWithContext(self, ctx, v8_context);
|
||||
}
|
||||
|
||||
fn initWithContext(self: *Caller, ctx: *Context, v8_context: *const v8.Context) void {
|
||||
@@ -537,9 +537,7 @@ pub const Function = struct {
|
||||
|
||||
pub fn call(comptime T: type, info_handle: *const v8.FunctionCallbackInfo, func: anytype, comptime opts: Opts) void {
|
||||
const v8_isolate = v8.v8__FunctionCallbackInfo__GetIsolate(info_handle).?;
|
||||
const v8_context = v8.v8__Isolate__GetCurrentContext(v8_isolate).?;
|
||||
|
||||
const ctx = Context.fromC(v8_context);
|
||||
const ctx, const v8_context = Context.fromIsolate(.{ .handle = v8_isolate });
|
||||
const info = FunctionCallbackInfo{ .handle = info_handle };
|
||||
|
||||
var hs: js.HandleScope = undefined;
|
||||
|
||||
@@ -119,12 +119,22 @@ const ModuleEntry = struct {
|
||||
resolver_promise: ?js.Promise.Global = null,
|
||||
};
|
||||
|
||||
pub fn fromC(c_context: *const v8.Context) *Context {
|
||||
pub fn fromC(c_context: *const v8.Context) ?*Context {
|
||||
return @ptrCast(@alignCast(v8.v8__Context__GetAlignedPointerFromEmbedderData(c_context, 1)));
|
||||
}
|
||||
|
||||
pub fn fromIsolate(isolate: js.Isolate) *Context {
|
||||
return fromC(v8.v8__Isolate__GetCurrentContext(isolate.handle).?);
|
||||
/// Returns the Context and v8::Context for the given isolate.
|
||||
/// If the current context is from a destroyed Context (e.g., navigated-away iframe),
|
||||
/// falls back to the incumbent context (the calling context).
|
||||
pub fn fromIsolate(isolate: js.Isolate) struct { *Context, *const v8.Context } {
|
||||
const v8_context = v8.v8__Isolate__GetCurrentContext(isolate.handle).?;
|
||||
if (fromC(v8_context)) |ctx| {
|
||||
return .{ ctx, v8_context };
|
||||
}
|
||||
// The current context's Context struct has been freed (e.g., iframe navigated away).
|
||||
// Fall back to the incumbent context (the calling context).
|
||||
const v8_incumbent = v8.v8__Isolate__GetIncumbentContext(isolate.handle).?;
|
||||
return .{ fromC(v8_incumbent).?, v8_incumbent };
|
||||
}
|
||||
|
||||
pub fn deinit(self: *Context) void {
|
||||
@@ -155,6 +165,11 @@ pub fn deinit(self: *Context) void {
|
||||
|
||||
self.session.releaseOrigin(self.origin);
|
||||
|
||||
// Clear the embedder data so that if V8 keeps this context alive
|
||||
// (because objects created in it are still referenced), we don't
|
||||
// have a dangling pointer to our freed Context struct.
|
||||
v8.v8__Context__SetAlignedPointerInEmbedderData(entered.handle, 1, null);
|
||||
|
||||
v8.v8__Global__Reset(&self.handle);
|
||||
env.isolate.notifyContextDisposed();
|
||||
// There can be other tasks associated with this context that we need to
|
||||
@@ -167,11 +182,11 @@ pub fn setOrigin(self: *Context, key: ?[]const u8) !void {
|
||||
const env = self.env;
|
||||
const isolate = env.isolate;
|
||||
|
||||
lp.assert(self.origin.rc == 1, "Ref opaque origin", .{ .rc = self.origin.rc });
|
||||
|
||||
const origin = try self.session.getOrCreateOrigin(key);
|
||||
errdefer self.session.releaseOrigin(origin);
|
||||
|
||||
try self.origin.transferTo(origin);
|
||||
self.origin.deinit(env.app);
|
||||
try origin.takeover(self.origin);
|
||||
|
||||
self.origin = origin;
|
||||
|
||||
@@ -196,18 +211,20 @@ pub fn trackTemp(self: *Context, global: v8.Global) !void {
|
||||
}
|
||||
|
||||
pub fn weakRef(self: *Context, obj: anytype) void {
|
||||
const fc = self.origin.finalizer_callbacks.get(@intFromPtr(obj)) orelse {
|
||||
const resolved = js.Local.resolveValue(obj);
|
||||
const fc = self.origin.finalizer_callbacks.get(@intFromPtr(resolved.ptr)) orelse {
|
||||
if (comptime IS_DEBUG) {
|
||||
// should not be possible
|
||||
std.debug.assert(false);
|
||||
}
|
||||
return;
|
||||
};
|
||||
v8.v8__Global__SetWeakFinalizer(&fc.global, fc, bridge.Struct(@TypeOf(obj)).JsApi.Meta.finalizer.from_v8, v8.kParameter);
|
||||
v8.v8__Global__SetWeakFinalizer(&fc.global, fc, resolved.finalizer_from_v8, v8.kParameter);
|
||||
}
|
||||
|
||||
pub fn safeWeakRef(self: *Context, obj: anytype) void {
|
||||
const fc = self.origin.finalizer_callbacks.get(@intFromPtr(obj)) orelse {
|
||||
const resolved = js.Local.resolveValue(obj);
|
||||
const fc = self.origin.finalizer_callbacks.get(@intFromPtr(resolved.ptr)) orelse {
|
||||
if (comptime IS_DEBUG) {
|
||||
// should not be possible
|
||||
std.debug.assert(false);
|
||||
@@ -215,11 +232,12 @@ pub fn safeWeakRef(self: *Context, obj: anytype) void {
|
||||
return;
|
||||
};
|
||||
v8.v8__Global__ClearWeak(&fc.global);
|
||||
v8.v8__Global__SetWeakFinalizer(&fc.global, fc, bridge.Struct(@TypeOf(obj)).JsApi.Meta.finalizer.from_v8, v8.kParameter);
|
||||
v8.v8__Global__SetWeakFinalizer(&fc.global, fc, resolved.finalizer_from_v8, v8.kParameter);
|
||||
}
|
||||
|
||||
pub fn strongRef(self: *Context, obj: anytype) void {
|
||||
const fc = self.origin.finalizer_callbacks.get(@intFromPtr(obj)) orelse {
|
||||
const resolved = js.Local.resolveValue(obj);
|
||||
const fc = self.origin.finalizer_callbacks.get(@intFromPtr(resolved.ptr)) orelse {
|
||||
if (comptime IS_DEBUG) {
|
||||
// should not be possible
|
||||
std.debug.assert(false);
|
||||
@@ -251,6 +269,10 @@ pub fn toLocal(self: *Context, global: anytype) js.Local.ToLocalReturnType(@Type
|
||||
return l.toLocal(global);
|
||||
}
|
||||
|
||||
pub fn getIncumbent(self: *Context) *Page {
|
||||
return fromC(v8.v8__Isolate__GetIncumbentContext(self.env.isolate.handle).?).?.page;
|
||||
}
|
||||
|
||||
pub fn stringToPersistedFunction(
|
||||
self: *Context,
|
||||
function_body: []const u8,
|
||||
@@ -302,15 +324,15 @@ pub fn module(self: *Context, comptime want_result: bool, local: *const js.Local
|
||||
}
|
||||
|
||||
const owned_url = try arena.dupeZ(u8, url);
|
||||
if (cacheable and !gop.found_existing) {
|
||||
gop.key_ptr.* = owned_url;
|
||||
}
|
||||
const m = try compileModule(local, src, owned_url);
|
||||
|
||||
if (cacheable) {
|
||||
// compileModule is synchronous - nothing can modify the cache during compilation
|
||||
lp.assert(gop.value_ptr.module == null, "Context.module has module", .{});
|
||||
gop.value_ptr.module = try m.persist();
|
||||
if (!gop.found_existing) {
|
||||
gop.key_ptr.* = owned_url;
|
||||
}
|
||||
}
|
||||
|
||||
break :blk .{ m, owned_url };
|
||||
@@ -472,7 +494,7 @@ fn resolveModuleCallback(
|
||||
) callconv(.c) ?*const v8.Module {
|
||||
_ = import_attributes;
|
||||
|
||||
const self = fromC(c_context.?);
|
||||
const self = fromC(c_context.?).?;
|
||||
const local = js.Local{
|
||||
.ctx = self,
|
||||
.handle = c_context.?,
|
||||
@@ -505,7 +527,7 @@ pub fn dynamicModuleCallback(
|
||||
_ = host_defined_options;
|
||||
_ = import_attrs;
|
||||
|
||||
const self = fromC(c_context.?);
|
||||
const self = fromC(c_context.?).?;
|
||||
const local = js.Local{
|
||||
.ctx = self,
|
||||
.handle = c_context.?,
|
||||
@@ -523,13 +545,13 @@ pub fn dynamicModuleCallback(
|
||||
|
||||
break :blk js.String.toSliceZ(.{ .local = &local, .handle = resource_name.? }) catch |err| {
|
||||
log.err(.app, "OOM", .{ .err = err, .src = "dynamicModuleCallback1" });
|
||||
return @constCast((local.rejectPromise("Out of memory") catch return null).handle);
|
||||
return @constCast(local.rejectPromise(.{ .generic_error = "Out of memory" }).handle);
|
||||
};
|
||||
};
|
||||
|
||||
const specifier = js.String.toSliceZ(.{ .local = &local, .handle = v8_specifier.? }) catch |err| {
|
||||
log.err(.app, "OOM", .{ .err = err, .src = "dynamicModuleCallback2" });
|
||||
return @constCast((local.rejectPromise("Out of memory") catch return null).handle);
|
||||
return @constCast(local.rejectPromise(.{ .generic_error = "Out of memory" }).handle);
|
||||
};
|
||||
|
||||
const normalized_specifier = self.script_manager.?.resolveSpecifier(
|
||||
@@ -538,21 +560,21 @@ pub fn dynamicModuleCallback(
|
||||
specifier,
|
||||
) catch |err| {
|
||||
log.err(.app, "OOM", .{ .err = err, .src = "dynamicModuleCallback3" });
|
||||
return @constCast((local.rejectPromise("Out of memory") catch return null).handle);
|
||||
return @constCast(local.rejectPromise(.{ .generic_error = "Out of memory" }).handle);
|
||||
};
|
||||
|
||||
const promise = self._dynamicModuleCallback(normalized_specifier, resource, &local) catch |err| blk: {
|
||||
log.err(.js, "dynamic module callback", .{
|
||||
.err = err,
|
||||
});
|
||||
break :blk local.rejectPromise("Failed to load module") catch return null;
|
||||
break :blk local.rejectPromise(.{ .generic_error = "Out of memory" });
|
||||
};
|
||||
return @constCast(promise.handle);
|
||||
}
|
||||
|
||||
pub fn metaObjectCallback(c_context: ?*v8.Context, c_module: ?*v8.Module, c_meta: ?*v8.Value) callconv(.c) void {
|
||||
// @HandleScope implement this without a fat context/local..
|
||||
const self = fromC(c_context.?);
|
||||
const self = fromC(c_context.?).?;
|
||||
var local = js.Local{
|
||||
.ctx = self,
|
||||
.handle = c_context.?,
|
||||
|
||||
@@ -326,7 +326,7 @@ pub fn createContext(self: *Env, page: *Page) !*Context {
|
||||
.script_manager = &page._script_manager,
|
||||
.scheduler = .init(context_arena),
|
||||
};
|
||||
try context.origin.identity_map.putNoClobber(context_arena, @intFromPtr(page.window), global_global);
|
||||
try context.origin.identity_map.putNoClobber(origin.arena, @intFromPtr(page.window), global_global);
|
||||
|
||||
// Store a pointer to our context inside the v8 context so that, given
|
||||
// a v8 context, we can get our context out
|
||||
@@ -382,8 +382,7 @@ pub fn runMicrotasks(self: *Env) void {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn runMacrotasks(self: *Env) !?u64 {
|
||||
var ms_to_next_task: ?u64 = null;
|
||||
pub fn runMacrotasks(self: *Env) !void {
|
||||
for (self.contexts[0..self.context_count]) |ctx| {
|
||||
if (comptime builtin.is_test == false) {
|
||||
// I hate this comptime check as much as you do. But we have tests
|
||||
@@ -398,13 +397,17 @@ pub fn runMacrotasks(self: *Env) !?u64 {
|
||||
var hs: js.HandleScope = undefined;
|
||||
const entered = ctx.enter(&hs);
|
||||
defer entered.exit();
|
||||
|
||||
const ms = (try ctx.scheduler.run()) orelse continue;
|
||||
if (ms_to_next_task == null or ms < ms_to_next_task.?) {
|
||||
ms_to_next_task = ms;
|
||||
}
|
||||
try ctx.scheduler.run();
|
||||
}
|
||||
return ms_to_next_task;
|
||||
}
|
||||
|
||||
pub fn msToNextMacrotask(self: *Env) ?u64 {
|
||||
var next_task: u64 = std.math.maxInt(u64);
|
||||
for (self.contexts[0..self.context_count]) |ctx| {
|
||||
const candidate = ctx.scheduler.msToNextHigh() orelse continue;
|
||||
next_task = @min(candidate, next_task);
|
||||
}
|
||||
return if (next_task == std.math.maxInt(u64)) null else next_task;
|
||||
}
|
||||
|
||||
pub fn pumpMessageLoop(self: *const Env) void {
|
||||
@@ -492,20 +495,25 @@ pub fn terminate(self: *const Env) void {
|
||||
}
|
||||
|
||||
fn promiseRejectCallback(message_handle: v8.PromiseRejectMessage) callconv(.c) void {
|
||||
const promise_event = v8.v8__PromiseRejectMessage__GetEvent(&message_handle);
|
||||
if (promise_event != v8.kPromiseRejectWithNoHandler and promise_event != v8.kPromiseHandlerAddedAfterReject) {
|
||||
return;
|
||||
}
|
||||
|
||||
const promise_handle = v8.v8__PromiseRejectMessage__GetPromise(&message_handle).?;
|
||||
const v8_isolate = v8.v8__Object__GetIsolate(@ptrCast(promise_handle)).?;
|
||||
const js_isolate = js.Isolate{ .handle = v8_isolate };
|
||||
const ctx = Context.fromIsolate(js_isolate);
|
||||
const isolate = js.Isolate{ .handle = v8_isolate };
|
||||
const ctx, const v8_context = Context.fromIsolate(isolate);
|
||||
|
||||
const local = js.Local{
|
||||
.ctx = ctx,
|
||||
.isolate = js_isolate,
|
||||
.handle = v8.v8__Isolate__GetCurrentContext(v8_isolate).?,
|
||||
.isolate = isolate,
|
||||
.handle = v8_context,
|
||||
.call_arena = ctx.call_arena,
|
||||
};
|
||||
|
||||
const page = ctx.page;
|
||||
page.window.unhandledPromiseRejection(.{
|
||||
page.window.unhandledPromiseRejection(promise_event == v8.kPromiseRejectWithNoHandler, .{
|
||||
.local = &local,
|
||||
.handle = &message_handle,
|
||||
}, page) catch |err| {
|
||||
|
||||
@@ -78,6 +78,21 @@ pub fn createError(self: Isolate, msg: []const u8) *const v8.Value {
|
||||
return v8.v8__Exception__Error(message).?;
|
||||
}
|
||||
|
||||
pub fn createRangeError(self: Isolate, msg: []const u8) *const v8.Value {
|
||||
const message = self.initStringHandle(msg);
|
||||
return v8.v8__Exception__RangeError(message).?;
|
||||
}
|
||||
|
||||
pub fn createReferenceError(self: Isolate, msg: []const u8) *const v8.Value {
|
||||
const message = self.initStringHandle(msg);
|
||||
return v8.v8__Exception__ReferenceError(message).?;
|
||||
}
|
||||
|
||||
pub fn createSyntaxError(self: Isolate, msg: []const u8) *const v8.Value {
|
||||
const message = self.initStringHandle(msg);
|
||||
return v8.v8__Exception__SyntaxError(message).?;
|
||||
}
|
||||
|
||||
pub fn createTypeError(self: Isolate, msg: []const u8) *const v8.Value {
|
||||
const message = self.initStringHandle(msg);
|
||||
return v8.v8__Exception__TypeError(message).?;
|
||||
|
||||
@@ -202,20 +202,20 @@ pub fn compileAndRun(self: *const Local, src: []const u8, name: ?[]const u8) !js
|
||||
// we can just grab it from the identity_map)
|
||||
pub fn mapZigInstanceToJs(self: *const Local, js_obj_handle: ?*const v8.Object, value: anytype) !js.Object {
|
||||
const ctx = self.ctx;
|
||||
const arena = ctx.arena;
|
||||
const origin_arena = ctx.origin.arena;
|
||||
|
||||
const T = @TypeOf(value);
|
||||
switch (@typeInfo(T)) {
|
||||
.@"struct" => {
|
||||
// Struct, has to be placed on the heap
|
||||
const heap = try arena.create(T);
|
||||
const heap = try origin_arena.create(T);
|
||||
heap.* = value;
|
||||
return self.mapZigInstanceToJs(js_obj_handle, heap);
|
||||
},
|
||||
.pointer => |ptr| {
|
||||
const resolved = resolveValue(value);
|
||||
|
||||
const gop = try ctx.origin.identity_map.getOrPut(arena, @intFromPtr(resolved.ptr));
|
||||
const gop = try ctx.origin.addIdentity(@intFromPtr(resolved.ptr));
|
||||
if (gop.found_existing) {
|
||||
// we've seen this instance before, return the same object
|
||||
return (js.Object.Global{ .handle = gop.value_ptr.* }).local(self);
|
||||
@@ -244,7 +244,7 @@ pub fn mapZigInstanceToJs(self: *const Local, js_obj_handle: ?*const v8.Object,
|
||||
// The TAO contains the pointer to our Zig instance as
|
||||
// well as any meta data we'll need to use it later.
|
||||
// See the TaggedOpaque struct for more details.
|
||||
const tao = try arena.create(TaggedOpaque);
|
||||
const tao = try origin_arena.create(TaggedOpaque);
|
||||
tao.* = .{
|
||||
.value = resolved.ptr,
|
||||
.prototype_chain = resolved.prototype_chain.ptr,
|
||||
@@ -1206,9 +1206,15 @@ pub fn stackTrace(self: *const Local) !?[]const u8 {
|
||||
}
|
||||
|
||||
// == Promise Helpers ==
|
||||
pub fn rejectPromise(self: *const Local, value: anytype) !js.Promise {
|
||||
pub fn rejectPromise(self: *const Local, err: js.PromiseResolver.RejectError) js.Promise {
|
||||
var resolver = js.PromiseResolver.init(self);
|
||||
resolver.reject("Local.rejectPromise", value);
|
||||
resolver.rejectError("Local.rejectPromise", err);
|
||||
return resolver.promise();
|
||||
}
|
||||
|
||||
pub fn rejectErrorPromise(self: *const Local, value: js.PromiseResolver.RejectError) !js.Promise {
|
||||
var resolver = js.PromiseResolver.init(self);
|
||||
resolver.rejectError("Local.rejectPromise", value);
|
||||
return resolver.promise();
|
||||
}
|
||||
|
||||
|
||||
@@ -68,6 +68,8 @@ temps: std.AutoHashMapUnmanaged(usize, v8.Global) = .empty,
|
||||
// if v8 hasn't called the finalizer directly itself.
|
||||
finalizer_callbacks: std.AutoHashMapUnmanaged(usize, *FinalizerCallback) = .empty,
|
||||
|
||||
taken_over: std.ArrayList(*Origin),
|
||||
|
||||
pub fn init(app: *App, isolate: js.Isolate, key: []const u8) !*Origin {
|
||||
const arena = try app.arena_pool.acquire();
|
||||
errdefer app.arena_pool.release(arena);
|
||||
@@ -86,14 +88,19 @@ pub fn init(app: *App, isolate: js.Isolate, key: []const u8) !*Origin {
|
||||
.rc = 1,
|
||||
.arena = arena,
|
||||
.key = owned_key,
|
||||
.globals = .empty,
|
||||
.temps = .empty,
|
||||
.globals = .empty,
|
||||
.taken_over = .empty,
|
||||
.security_token = token_global,
|
||||
};
|
||||
return self;
|
||||
}
|
||||
|
||||
pub fn deinit(self: *Origin, app: *App) void {
|
||||
for (self.taken_over.items) |o| {
|
||||
o.deinit(app);
|
||||
}
|
||||
|
||||
// Call finalizers before releasing anything
|
||||
{
|
||||
var it = self.finalizer_callbacks.valueIterator();
|
||||
@@ -129,6 +136,19 @@ pub fn trackGlobal(self: *Origin, global: v8.Global) !void {
|
||||
return self.globals.append(self.arena, global);
|
||||
}
|
||||
|
||||
pub const IdentityResult = struct {
|
||||
value_ptr: *v8.Global,
|
||||
found_existing: bool,
|
||||
};
|
||||
|
||||
pub fn addIdentity(self: *Origin, ptr: usize) !IdentityResult {
|
||||
const gop = try self.identity_map.getOrPut(self.arena, ptr);
|
||||
return .{
|
||||
.value_ptr = gop.value_ptr,
|
||||
.found_existing = gop.found_existing,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn trackTemp(self: *Origin, global: v8.Global) !void {
|
||||
return self.temps.put(self.arena, global.data_ptr, global);
|
||||
}
|
||||
@@ -183,42 +203,44 @@ pub fn createFinalizerCallback(
|
||||
return fc;
|
||||
}
|
||||
|
||||
pub fn transferTo(self: *Origin, dest: *Origin) !void {
|
||||
const arena = dest.arena;
|
||||
pub fn takeover(self: *Origin, original: *Origin) !void {
|
||||
const arena = self.arena;
|
||||
|
||||
try dest.globals.ensureUnusedCapacity(arena, self.globals.items.len);
|
||||
for (self.globals.items) |obj| {
|
||||
dest.globals.appendAssumeCapacity(obj);
|
||||
try self.globals.ensureUnusedCapacity(arena, original.globals.items.len);
|
||||
for (original.globals.items) |obj| {
|
||||
self.globals.appendAssumeCapacity(obj);
|
||||
}
|
||||
self.globals.clearRetainingCapacity();
|
||||
original.globals.clearRetainingCapacity();
|
||||
|
||||
{
|
||||
try dest.temps.ensureUnusedCapacity(arena, self.temps.count());
|
||||
var it = self.temps.iterator();
|
||||
try self.temps.ensureUnusedCapacity(arena, original.temps.count());
|
||||
var it = original.temps.iterator();
|
||||
while (it.next()) |kv| {
|
||||
try dest.temps.put(arena, kv.key_ptr.*, kv.value_ptr.*);
|
||||
try self.temps.put(arena, kv.key_ptr.*, kv.value_ptr.*);
|
||||
}
|
||||
self.temps.clearRetainingCapacity();
|
||||
original.temps.clearRetainingCapacity();
|
||||
}
|
||||
|
||||
{
|
||||
try dest.finalizer_callbacks.ensureUnusedCapacity(arena, self.finalizer_callbacks.count());
|
||||
var it = self.finalizer_callbacks.iterator();
|
||||
try self.finalizer_callbacks.ensureUnusedCapacity(arena, original.finalizer_callbacks.count());
|
||||
var it = original.finalizer_callbacks.iterator();
|
||||
while (it.next()) |kv| {
|
||||
kv.value_ptr.*.origin = dest;
|
||||
try dest.finalizer_callbacks.put(arena, kv.key_ptr.*, kv.value_ptr.*);
|
||||
kv.value_ptr.*.origin = self;
|
||||
try self.finalizer_callbacks.put(arena, kv.key_ptr.*, kv.value_ptr.*);
|
||||
}
|
||||
self.finalizer_callbacks.clearRetainingCapacity();
|
||||
original.finalizer_callbacks.clearRetainingCapacity();
|
||||
}
|
||||
|
||||
{
|
||||
try dest.identity_map.ensureUnusedCapacity(arena, self.identity_map.count());
|
||||
var it = self.identity_map.iterator();
|
||||
try self.identity_map.ensureUnusedCapacity(arena, original.identity_map.count());
|
||||
var it = original.identity_map.iterator();
|
||||
while (it.next()) |kv| {
|
||||
try dest.identity_map.put(arena, kv.key_ptr.*, kv.value_ptr.*);
|
||||
try self.identity_map.put(arena, kv.key_ptr.*, kv.value_ptr.*);
|
||||
}
|
||||
self.identity_map.clearRetainingCapacity();
|
||||
original.identity_map.clearRetainingCapacity();
|
||||
}
|
||||
|
||||
try self.taken_over.append(self.arena, original);
|
||||
}
|
||||
|
||||
// A type that has a finalizer can have its finalizer called one of two ways.
|
||||
|
||||
@@ -18,8 +18,11 @@
|
||||
|
||||
const js = @import("js.zig");
|
||||
const v8 = js.v8;
|
||||
|
||||
const log = @import("../../log.zig");
|
||||
|
||||
const DOMException = @import("../webapi/DOMException.zig");
|
||||
|
||||
const PromiseResolver = @This();
|
||||
|
||||
local: *const js.Local,
|
||||
@@ -63,6 +66,43 @@ pub fn reject(self: PromiseResolver, comptime source: []const u8, value: anytype
|
||||
};
|
||||
}
|
||||
|
||||
pub const RejectError = union(enum) {
|
||||
/// Not to be confused with `DOMException`; this is bare `Error`.
|
||||
generic_error: []const u8,
|
||||
range_error: []const u8,
|
||||
reference_error: []const u8,
|
||||
syntax_error: []const u8,
|
||||
type_error: []const u8,
|
||||
/// DOM exceptions are unknown to V8, belongs to web standards.
|
||||
dom_exception: struct { err: anyerror },
|
||||
};
|
||||
|
||||
/// Rejects the promise w/ an error object.
|
||||
pub fn rejectError(
|
||||
self: PromiseResolver,
|
||||
comptime source: []const u8,
|
||||
err: RejectError,
|
||||
) void {
|
||||
const handle = switch (err) {
|
||||
.generic_error => |msg| self.local.isolate.createError(msg),
|
||||
.range_error => |msg| self.local.isolate.createRangeError(msg),
|
||||
.reference_error => |msg| self.local.isolate.createReferenceError(msg),
|
||||
.syntax_error => |msg| self.local.isolate.createSyntaxError(msg),
|
||||
.type_error => |msg| self.local.isolate.createTypeError(msg),
|
||||
// "Exceptional".
|
||||
.dom_exception => |exception| {
|
||||
self._reject(DOMException.fromError(exception.err) orelse unreachable) catch |reject_err| {
|
||||
log.err(.bug, "rejectDomException", .{ .source = source, .err = reject_err, .persistent = false });
|
||||
};
|
||||
return;
|
||||
},
|
||||
};
|
||||
|
||||
self._reject(js.Value{ .handle = handle, .local = self.local }) catch |reject_err| {
|
||||
log.err(.bug, "rejectError", .{ .source = source, .err = reject_err, .persistent = false });
|
||||
};
|
||||
}
|
||||
|
||||
fn _reject(self: PromiseResolver, value: anytype) !void {
|
||||
const local = self.local;
|
||||
const js_val = try local.zigValueToJs(value, .{});
|
||||
|
||||
@@ -74,9 +74,10 @@ pub fn add(self: *Scheduler, ctx: *anyopaque, cb: Callback, run_in_ms: u32, opts
|
||||
});
|
||||
}
|
||||
|
||||
pub fn run(self: *Scheduler) !?u64 {
|
||||
_ = try self.runQueue(&self.low_priority);
|
||||
return self.runQueue(&self.high_priority);
|
||||
pub fn run(self: *Scheduler) !void {
|
||||
const now = milliTimestamp(.monotonic);
|
||||
try self.runQueue(&self.low_priority, now);
|
||||
try self.runQueue(&self.high_priority, now);
|
||||
}
|
||||
|
||||
pub fn hasReadyTasks(self: *Scheduler) bool {
|
||||
@@ -84,16 +85,23 @@ pub fn hasReadyTasks(self: *Scheduler) bool {
|
||||
return queueuHasReadyTask(&self.low_priority, now) or queueuHasReadyTask(&self.high_priority, now);
|
||||
}
|
||||
|
||||
fn runQueue(self: *Scheduler, queue: *Queue) !?u64 {
|
||||
if (queue.count() == 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
pub fn msToNextHigh(self: *Scheduler) ?u64 {
|
||||
const task = self.high_priority.peek() orelse return null;
|
||||
const now = milliTimestamp(.monotonic);
|
||||
if (task.run_at <= now) {
|
||||
return 0;
|
||||
}
|
||||
return @intCast(task.run_at - now);
|
||||
}
|
||||
|
||||
fn runQueue(self: *Scheduler, queue: *Queue, now: u64) !void {
|
||||
if (queue.count() == 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
while (queue.peek()) |*task_| {
|
||||
if (task_.run_at > now) {
|
||||
return @intCast(task_.run_at - now);
|
||||
return;
|
||||
}
|
||||
var task = queue.remove();
|
||||
if (comptime IS_DEBUG) {
|
||||
@@ -114,7 +122,7 @@ fn runQueue(self: *Scheduler, queue: *Queue) !?u64 {
|
||||
try self.low_priority.add(task);
|
||||
}
|
||||
}
|
||||
return null;
|
||||
return;
|
||||
}
|
||||
|
||||
fn queueuHasReadyTask(queue: *Queue, now: u64) bool {
|
||||
|
||||
@@ -56,7 +56,7 @@ fn _toSlice(self: String, comptime null_terminate: bool, allocator: Allocator) !
|
||||
|
||||
pub fn toSSO(self: String, comptime global: bool) !(if (global) SSO.Global else SSO) {
|
||||
if (comptime global) {
|
||||
return .{ .str = try self.toSSOWithAlloc(self.local.ctx.arena) };
|
||||
return .{ .str = try self.toSSOWithAlloc(self.local.ctx.origin.arena) };
|
||||
}
|
||||
return self.toSSOWithAlloc(self.local.call_arena);
|
||||
}
|
||||
|
||||
@@ -245,6 +245,46 @@ pub fn toJson(self: Value, allocator: Allocator) ![]u8 {
|
||||
return js.String.toSliceWithAlloc(.{ .local = local, .handle = str_handle }, allocator);
|
||||
}
|
||||
|
||||
// Currently does not support host objects (Blob, File, etc.) or transferables
|
||||
// which require delegate callbacks to be implemented.
|
||||
pub fn structuredClone(self: Value) !Value {
|
||||
const local = self.local;
|
||||
const v8_context = local.handle;
|
||||
const v8_isolate = local.isolate.handle;
|
||||
|
||||
const size, const data = blk: {
|
||||
const serializer = v8.v8__ValueSerializer__New(v8_isolate, null) orelse return error.JsException;
|
||||
defer v8.v8__ValueSerializer__DELETE(serializer);
|
||||
|
||||
var write_result: v8.MaybeBool = undefined;
|
||||
v8.v8__ValueSerializer__WriteHeader(serializer);
|
||||
v8.v8__ValueSerializer__WriteValue(serializer, v8_context, self.handle, &write_result);
|
||||
if (!write_result.has_value or !write_result.value) {
|
||||
return error.JsException;
|
||||
}
|
||||
|
||||
var size: usize = undefined;
|
||||
const data = v8.v8__ValueSerializer__Release(serializer, &size) orelse return error.JsException;
|
||||
break :blk .{ size, data };
|
||||
};
|
||||
|
||||
defer v8.v8__ValueSerializer__FreeBuffer(data);
|
||||
|
||||
const cloned_handle = blk: {
|
||||
const deserializer = v8.v8__ValueDeserializer__New(v8_isolate, data, size, null) orelse return error.JsException;
|
||||
defer v8.v8__ValueDeserializer__DELETE(deserializer);
|
||||
|
||||
var read_header_result: v8.MaybeBool = undefined;
|
||||
v8.v8__ValueDeserializer__ReadHeader(deserializer, v8_context, &read_header_result);
|
||||
if (!read_header_result.has_value or !read_header_result.value) {
|
||||
return error.JsException;
|
||||
}
|
||||
break :blk v8.v8__ValueDeserializer__ReadValue(deserializer, v8_context) orelse return error.JsException;
|
||||
};
|
||||
|
||||
return .{ .local = local, .handle = cloned_handle };
|
||||
}
|
||||
|
||||
pub fn persist(self: Value) !Global {
|
||||
return self._persist(true);
|
||||
}
|
||||
|
||||
@@ -725,6 +725,8 @@ pub const JsApis = flattenTypes(&.{
|
||||
@import("../webapi/collections.zig"),
|
||||
@import("../webapi/Console.zig"),
|
||||
@import("../webapi/Crypto.zig"),
|
||||
@import("../webapi/Permissions.zig"),
|
||||
@import("../webapi/StorageManager.zig"),
|
||||
@import("../webapi/CSS.zig"),
|
||||
@import("../webapi/css/CSSRule.zig"),
|
||||
@import("../webapi/css/CSSRuleList.zig"),
|
||||
@@ -848,6 +850,7 @@ pub const JsApis = flattenTypes(&.{
|
||||
@import("../webapi/event/FocusEvent.zig"),
|
||||
@import("../webapi/event/WheelEvent.zig"),
|
||||
@import("../webapi/event/TextEvent.zig"),
|
||||
@import("../webapi/event/InputEvent.zig"),
|
||||
@import("../webapi/event/PromiseRejectionEvent.zig"),
|
||||
@import("../webapi/MessageChannel.zig"),
|
||||
@import("../webapi/MessagePort.zig"),
|
||||
@@ -885,6 +888,7 @@ pub const JsApis = flattenTypes(&.{
|
||||
@import("../webapi/IdleDeadline.zig"),
|
||||
@import("../webapi/Blob.zig"),
|
||||
@import("../webapi/File.zig"),
|
||||
@import("../webapi/FileList.zig"),
|
||||
@import("../webapi/FileReader.zig"),
|
||||
@import("../webapi/Screen.zig"),
|
||||
@import("../webapi/VisualViewport.zig"),
|
||||
|
||||
@@ -24,6 +24,7 @@ const TreeWalker = @import("webapi/TreeWalker.zig");
|
||||
const CData = @import("webapi/CData.zig");
|
||||
const Element = @import("webapi/Element.zig");
|
||||
const Node = @import("webapi/Node.zig");
|
||||
const isAllWhitespace = @import("../string.zig").isAllWhitespace;
|
||||
|
||||
pub const Opts = struct {
|
||||
// Options for future customization (e.g., dialect)
|
||||
@@ -46,13 +47,6 @@ const State = struct {
|
||||
last_char_was_newline: bool = true,
|
||||
};
|
||||
|
||||
fn isBlock(tag: Element.Tag) bool {
|
||||
return switch (tag) {
|
||||
.p, .div, .section, .article, .main, .header, .footer, .nav, .aside, .h1, .h2, .h3, .h4, .h5, .h6, .ul, .ol, .blockquote, .pre, .table, .hr => true,
|
||||
else => false,
|
||||
};
|
||||
}
|
||||
|
||||
fn shouldAddSpacing(tag: Element.Tag) bool {
|
||||
return switch (tag) {
|
||||
.p, .h1, .h2, .h3, .h4, .h5, .h6, .blockquote, .pre, .table => true,
|
||||
@@ -99,26 +93,18 @@ fn isSignificantText(node: *Node) bool {
|
||||
}
|
||||
|
||||
fn isVisibleElement(el: *Element) bool {
|
||||
return switch (el.getTag()) {
|
||||
.script, .style, .noscript, .template, .head, .meta, .link, .title, .svg => false,
|
||||
else => true,
|
||||
};
|
||||
const tag = el.getTag();
|
||||
return !tag.isMetadata() and tag != .svg;
|
||||
}
|
||||
|
||||
fn getAnchorLabel(el: *Element) ?[]const u8 {
|
||||
return el.getAttributeSafe(comptime .wrap("aria-label")) orelse el.getAttributeSafe(comptime .wrap("title"));
|
||||
}
|
||||
|
||||
fn isAllWhitespace(text: []const u8) bool {
|
||||
return for (text) |c| {
|
||||
if (!std.ascii.isWhitespace(c)) break false;
|
||||
} else true;
|
||||
}
|
||||
|
||||
fn hasBlockDescendant(root: *Node) bool {
|
||||
var tw = TreeWalker.FullExcludeSelf.Elements.init(root, .{});
|
||||
while (tw.next()) |el| {
|
||||
if (isBlock(el.getTag())) return true;
|
||||
if (el.getTag().isBlock()) return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
@@ -138,352 +124,362 @@ fn hasVisibleContent(root: *Node) bool {
|
||||
return false;
|
||||
}
|
||||
|
||||
fn ensureNewline(state: *State, writer: *std.Io.Writer) !void {
|
||||
if (!state.last_char_was_newline) {
|
||||
try writer.writeByte('\n');
|
||||
state.last_char_was_newline = true;
|
||||
const Context = struct {
|
||||
state: State,
|
||||
writer: *std.Io.Writer,
|
||||
page: *Page,
|
||||
|
||||
fn ensureNewline(self: *Context) !void {
|
||||
if (!self.state.last_char_was_newline) {
|
||||
try self.writer.writeByte('\n');
|
||||
self.state.last_char_was_newline = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn render(self: *Context, node: *Node) error{WriteFailed}!void {
|
||||
switch (node._type) {
|
||||
.document, .document_fragment => {
|
||||
try self.renderChildren(node);
|
||||
},
|
||||
.element => |el| {
|
||||
try self.renderElement(el);
|
||||
},
|
||||
.cdata => |cd| {
|
||||
if (node.is(Node.CData.Text)) |_| {
|
||||
var text = cd.getData().str();
|
||||
if (self.state.pre_node) |pre| {
|
||||
if (node.parentNode() == pre and node.nextSibling() == null) {
|
||||
text = std.mem.trimRight(u8, text, " \t\r\n");
|
||||
}
|
||||
}
|
||||
try self.renderText(text);
|
||||
}
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
}
|
||||
|
||||
fn renderChildren(self: *Context, parent: *Node) !void {
|
||||
var it = parent.childrenIterator();
|
||||
while (it.next()) |child| {
|
||||
try self.render(child);
|
||||
}
|
||||
}
|
||||
|
||||
fn renderElement(self: *Context, el: *Element) !void {
|
||||
const tag = el.getTag();
|
||||
|
||||
if (!isVisibleElement(el)) return;
|
||||
|
||||
// --- Opening Tag Logic ---
|
||||
|
||||
// Ensure block elements start on a new line (double newline for paragraphs etc)
|
||||
if (tag.isBlock() and !self.state.in_table) {
|
||||
try self.ensureNewline();
|
||||
if (shouldAddSpacing(tag)) {
|
||||
try self.writer.writeByte('\n');
|
||||
}
|
||||
} else if (tag == .li or tag == .tr) {
|
||||
try self.ensureNewline();
|
||||
}
|
||||
|
||||
// Prefixes
|
||||
switch (tag) {
|
||||
.h1 => try self.writer.writeAll("# "),
|
||||
.h2 => try self.writer.writeAll("## "),
|
||||
.h3 => try self.writer.writeAll("### "),
|
||||
.h4 => try self.writer.writeAll("#### "),
|
||||
.h5 => try self.writer.writeAll("##### "),
|
||||
.h6 => try self.writer.writeAll("###### "),
|
||||
.ul => {
|
||||
if (self.state.list_depth < self.state.list_stack.len) {
|
||||
self.state.list_stack[self.state.list_depth] = .{ .type = .unordered, .index = 0 };
|
||||
self.state.list_depth += 1;
|
||||
}
|
||||
},
|
||||
.ol => {
|
||||
if (self.state.list_depth < self.state.list_stack.len) {
|
||||
self.state.list_stack[self.state.list_depth] = .{ .type = .ordered, .index = 1 };
|
||||
self.state.list_depth += 1;
|
||||
}
|
||||
},
|
||||
.li => {
|
||||
const indent = if (self.state.list_depth > 0) self.state.list_depth - 1 else 0;
|
||||
for (0..indent) |_| try self.writer.writeAll(" ");
|
||||
|
||||
if (self.state.list_depth > 0 and self.state.list_stack[self.state.list_depth - 1].type == .ordered) {
|
||||
const current_list = &self.state.list_stack[self.state.list_depth - 1];
|
||||
try self.writer.print("{d}. ", .{current_list.index});
|
||||
current_list.index += 1;
|
||||
} else {
|
||||
try self.writer.writeAll("- ");
|
||||
}
|
||||
self.state.last_char_was_newline = false;
|
||||
},
|
||||
.table => {
|
||||
self.state.in_table = true;
|
||||
self.state.table_row_index = 0;
|
||||
self.state.table_col_count = 0;
|
||||
},
|
||||
.tr => {
|
||||
self.state.table_col_count = 0;
|
||||
try self.writer.writeByte('|');
|
||||
},
|
||||
.td, .th => {
|
||||
// Note: leading pipe handled by previous cell closing or tr opening
|
||||
self.state.last_char_was_newline = false;
|
||||
try self.writer.writeByte(' ');
|
||||
},
|
||||
.blockquote => {
|
||||
try self.writer.writeAll("> ");
|
||||
self.state.last_char_was_newline = false;
|
||||
},
|
||||
.pre => {
|
||||
try self.writer.writeAll("```\n");
|
||||
self.state.pre_node = el.asNode();
|
||||
self.state.last_char_was_newline = true;
|
||||
},
|
||||
.code => {
|
||||
if (self.state.pre_node == null) {
|
||||
try self.writer.writeByte('`');
|
||||
self.state.in_code = true;
|
||||
self.state.last_char_was_newline = false;
|
||||
}
|
||||
},
|
||||
.b, .strong => {
|
||||
try self.writer.writeAll("**");
|
||||
self.state.last_char_was_newline = false;
|
||||
},
|
||||
.i, .em => {
|
||||
try self.writer.writeAll("*");
|
||||
self.state.last_char_was_newline = false;
|
||||
},
|
||||
.s, .del => {
|
||||
try self.writer.writeAll("~~");
|
||||
self.state.last_char_was_newline = false;
|
||||
},
|
||||
.hr => {
|
||||
try self.writer.writeAll("---\n");
|
||||
self.state.last_char_was_newline = true;
|
||||
return;
|
||||
},
|
||||
.br => {
|
||||
if (self.state.in_table) {
|
||||
try self.writer.writeByte(' ');
|
||||
} else {
|
||||
try self.writer.writeByte('\n');
|
||||
self.state.last_char_was_newline = true;
|
||||
}
|
||||
return;
|
||||
},
|
||||
.img => {
|
||||
try self.writer.writeAll(";
|
||||
if (el.getAttributeSafe(comptime .wrap("src"))) |src| {
|
||||
const absolute_src = URL.resolve(self.page.call_arena, self.page.base(), src, .{ .encode = true }) catch src;
|
||||
try self.writer.writeAll(absolute_src);
|
||||
}
|
||||
try self.writer.writeAll(")");
|
||||
self.state.last_char_was_newline = false;
|
||||
return;
|
||||
},
|
||||
.anchor => {
|
||||
const has_content = hasVisibleContent(el.asNode());
|
||||
const label = getAnchorLabel(el);
|
||||
const href_raw = el.getAttributeSafe(comptime .wrap("href"));
|
||||
|
||||
if (!has_content and label == null and href_raw == null) return;
|
||||
|
||||
const has_block = hasBlockDescendant(el.asNode());
|
||||
const href = if (href_raw) |h| URL.resolve(self.page.call_arena, self.page.base(), h, .{ .encode = true }) catch h else null;
|
||||
|
||||
if (has_block) {
|
||||
try self.renderChildren(el.asNode());
|
||||
if (href) |h| {
|
||||
if (!self.state.last_char_was_newline) try self.writer.writeByte('\n');
|
||||
try self.writer.writeAll("([](");
|
||||
try self.writer.writeAll(h);
|
||||
try self.writer.writeAll("))\n");
|
||||
self.state.last_char_was_newline = true;
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
if (isStandaloneAnchor(el)) {
|
||||
if (!self.state.last_char_was_newline) try self.writer.writeByte('\n');
|
||||
try self.writer.writeByte('[');
|
||||
if (has_content) {
|
||||
try self.renderChildren(el.asNode());
|
||||
} else {
|
||||
try self.writer.writeAll(label orelse "");
|
||||
}
|
||||
try self.writer.writeAll("](");
|
||||
if (href) |h| {
|
||||
try self.writer.writeAll(h);
|
||||
}
|
||||
try self.writer.writeAll(")\n");
|
||||
self.state.last_char_was_newline = true;
|
||||
return;
|
||||
}
|
||||
|
||||
try self.writer.writeByte('[');
|
||||
if (has_content) {
|
||||
try self.renderChildren(el.asNode());
|
||||
} else {
|
||||
try self.writer.writeAll(label orelse "");
|
||||
}
|
||||
try self.writer.writeAll("](");
|
||||
if (href) |h| {
|
||||
try self.writer.writeAll(h);
|
||||
}
|
||||
try self.writer.writeByte(')');
|
||||
self.state.last_char_was_newline = false;
|
||||
return;
|
||||
},
|
||||
.input => {
|
||||
const type_attr = el.getAttributeSafe(comptime .wrap("type")) orelse return;
|
||||
if (std.ascii.eqlIgnoreCase(type_attr, "checkbox")) {
|
||||
const checked = el.getAttributeSafe(comptime .wrap("checked")) != null;
|
||||
try self.writer.writeAll(if (checked) "[x] " else "[ ] ");
|
||||
self.state.last_char_was_newline = false;
|
||||
}
|
||||
return;
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
|
||||
// --- Render Children ---
|
||||
try self.renderChildren(el.asNode());
|
||||
|
||||
// --- Closing Tag Logic ---
|
||||
|
||||
// Suffixes
|
||||
switch (tag) {
|
||||
.pre => {
|
||||
if (!self.state.last_char_was_newline) {
|
||||
try self.writer.writeByte('\n');
|
||||
}
|
||||
try self.writer.writeAll("```\n");
|
||||
self.state.pre_node = null;
|
||||
self.state.last_char_was_newline = true;
|
||||
},
|
||||
.code => {
|
||||
if (self.state.pre_node == null) {
|
||||
try self.writer.writeByte('`');
|
||||
self.state.in_code = false;
|
||||
self.state.last_char_was_newline = false;
|
||||
}
|
||||
},
|
||||
.b, .strong => {
|
||||
try self.writer.writeAll("**");
|
||||
self.state.last_char_was_newline = false;
|
||||
},
|
||||
.i, .em => {
|
||||
try self.writer.writeAll("*");
|
||||
self.state.last_char_was_newline = false;
|
||||
},
|
||||
.s, .del => {
|
||||
try self.writer.writeAll("~~");
|
||||
self.state.last_char_was_newline = false;
|
||||
},
|
||||
.blockquote => {},
|
||||
.ul, .ol => {
|
||||
if (self.state.list_depth > 0) self.state.list_depth -= 1;
|
||||
},
|
||||
.table => {
|
||||
self.state.in_table = false;
|
||||
},
|
||||
.tr => {
|
||||
try self.writer.writeByte('\n');
|
||||
if (self.state.table_row_index == 0) {
|
||||
try self.writer.writeByte('|');
|
||||
for (0..self.state.table_col_count) |_| {
|
||||
try self.writer.writeAll("---|");
|
||||
}
|
||||
try self.writer.writeByte('\n');
|
||||
}
|
||||
self.state.table_row_index += 1;
|
||||
self.state.last_char_was_newline = true;
|
||||
},
|
||||
.td, .th => {
|
||||
try self.writer.writeAll(" |");
|
||||
self.state.table_col_count += 1;
|
||||
self.state.last_char_was_newline = false;
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
|
||||
// Post-block newlines
|
||||
if (tag.isBlock() and !self.state.in_table) {
|
||||
try self.ensureNewline();
|
||||
}
|
||||
}
|
||||
|
||||
fn renderText(self: *Context, text: []const u8) !void {
|
||||
if (text.len == 0) return;
|
||||
|
||||
if (self.state.pre_node) |_| {
|
||||
try self.writer.writeAll(text);
|
||||
self.state.last_char_was_newline = text[text.len - 1] == '\n';
|
||||
return;
|
||||
}
|
||||
|
||||
// Check for pure whitespace
|
||||
if (isAllWhitespace(text)) {
|
||||
if (!self.state.last_char_was_newline) {
|
||||
try self.writer.writeByte(' ');
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
// Collapse whitespace
|
||||
var it = std.mem.tokenizeAny(u8, text, " \t\n\r");
|
||||
var first = true;
|
||||
while (it.next()) |word| {
|
||||
if (!first or (!self.state.last_char_was_newline and std.ascii.isWhitespace(text[0]))) {
|
||||
try self.writer.writeByte(' ');
|
||||
}
|
||||
|
||||
try self.escape(word);
|
||||
self.state.last_char_was_newline = false;
|
||||
first = false;
|
||||
}
|
||||
|
||||
// Handle trailing whitespace from the original text
|
||||
if (!first and !self.state.last_char_was_newline and std.ascii.isWhitespace(text[text.len - 1])) {
|
||||
try self.writer.writeByte(' ');
|
||||
}
|
||||
}
|
||||
|
||||
fn escape(self: *Context, text: []const u8) !void {
|
||||
for (text) |c| {
|
||||
switch (c) {
|
||||
'\\', '`', '*', '_', '{', '}', '[', ']', '(', ')', '#', '+', '-', '!', '|' => {
|
||||
try self.writer.writeByte('\\');
|
||||
try self.writer.writeByte(c);
|
||||
},
|
||||
else => try self.writer.writeByte(c),
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
pub fn dump(node: *Node, opts: Opts, writer: *std.Io.Writer, page: *Page) !void {
|
||||
_ = opts;
|
||||
var state = State{};
|
||||
try render(node, &state, writer, page);
|
||||
if (!state.last_char_was_newline) {
|
||||
var ctx: Context = .{
|
||||
.state = .{},
|
||||
.writer = writer,
|
||||
.page = page,
|
||||
};
|
||||
try ctx.render(node);
|
||||
if (!ctx.state.last_char_was_newline) {
|
||||
try writer.writeByte('\n');
|
||||
}
|
||||
}
|
||||
|
||||
fn render(node: *Node, state: *State, writer: *std.Io.Writer, page: *Page) error{WriteFailed}!void {
|
||||
switch (node._type) {
|
||||
.document, .document_fragment => {
|
||||
try renderChildren(node, state, writer, page);
|
||||
},
|
||||
.element => |el| {
|
||||
try renderElement(el, state, writer, page);
|
||||
},
|
||||
.cdata => |cd| {
|
||||
if (node.is(Node.CData.Text)) |_| {
|
||||
var text = cd.getData().str();
|
||||
if (state.pre_node) |pre| {
|
||||
if (node.parentNode() == pre and node.nextSibling() == null) {
|
||||
text = std.mem.trimRight(u8, text, " \t\r\n");
|
||||
}
|
||||
}
|
||||
try renderText(text, state, writer);
|
||||
}
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
}
|
||||
|
||||
fn renderChildren(parent: *Node, state: *State, writer: *std.Io.Writer, page: *Page) !void {
|
||||
var it = parent.childrenIterator();
|
||||
while (it.next()) |child| {
|
||||
try render(child, state, writer, page);
|
||||
}
|
||||
}
|
||||
|
||||
fn renderElement(el: *Element, state: *State, writer: *std.Io.Writer, page: *Page) !void {
|
||||
const tag = el.getTag();
|
||||
|
||||
if (!isVisibleElement(el)) return;
|
||||
|
||||
// --- Opening Tag Logic ---
|
||||
|
||||
// Ensure block elements start on a new line (double newline for paragraphs etc)
|
||||
if (isBlock(tag) and !state.in_table) {
|
||||
try ensureNewline(state, writer);
|
||||
if (shouldAddSpacing(tag)) {
|
||||
try writer.writeByte('\n');
|
||||
}
|
||||
} else if (tag == .li or tag == .tr) {
|
||||
try ensureNewline(state, writer);
|
||||
}
|
||||
|
||||
// Prefixes
|
||||
switch (tag) {
|
||||
.h1 => try writer.writeAll("# "),
|
||||
.h2 => try writer.writeAll("## "),
|
||||
.h3 => try writer.writeAll("### "),
|
||||
.h4 => try writer.writeAll("#### "),
|
||||
.h5 => try writer.writeAll("##### "),
|
||||
.h6 => try writer.writeAll("###### "),
|
||||
.ul => {
|
||||
if (state.list_depth < state.list_stack.len) {
|
||||
state.list_stack[state.list_depth] = .{ .type = .unordered, .index = 0 };
|
||||
state.list_depth += 1;
|
||||
}
|
||||
},
|
||||
.ol => {
|
||||
if (state.list_depth < state.list_stack.len) {
|
||||
state.list_stack[state.list_depth] = .{ .type = .ordered, .index = 1 };
|
||||
state.list_depth += 1;
|
||||
}
|
||||
},
|
||||
.li => {
|
||||
const indent = if (state.list_depth > 0) state.list_depth - 1 else 0;
|
||||
for (0..indent) |_| try writer.writeAll(" ");
|
||||
|
||||
if (state.list_depth > 0 and state.list_stack[state.list_depth - 1].type == .ordered) {
|
||||
const current_list = &state.list_stack[state.list_depth - 1];
|
||||
try writer.print("{d}. ", .{current_list.index});
|
||||
current_list.index += 1;
|
||||
} else {
|
||||
try writer.writeAll("- ");
|
||||
}
|
||||
state.last_char_was_newline = false;
|
||||
},
|
||||
.table => {
|
||||
state.in_table = true;
|
||||
state.table_row_index = 0;
|
||||
state.table_col_count = 0;
|
||||
},
|
||||
.tr => {
|
||||
state.table_col_count = 0;
|
||||
try writer.writeByte('|');
|
||||
},
|
||||
.td, .th => {
|
||||
// Note: leading pipe handled by previous cell closing or tr opening
|
||||
state.last_char_was_newline = false;
|
||||
try writer.writeByte(' ');
|
||||
},
|
||||
.blockquote => {
|
||||
try writer.writeAll("> ");
|
||||
state.last_char_was_newline = false;
|
||||
},
|
||||
.pre => {
|
||||
try writer.writeAll("```\n");
|
||||
state.pre_node = el.asNode();
|
||||
state.last_char_was_newline = true;
|
||||
},
|
||||
.code => {
|
||||
if (state.pre_node == null) {
|
||||
try writer.writeByte('`');
|
||||
state.in_code = true;
|
||||
state.last_char_was_newline = false;
|
||||
}
|
||||
},
|
||||
.b, .strong => {
|
||||
try writer.writeAll("**");
|
||||
state.last_char_was_newline = false;
|
||||
},
|
||||
.i, .em => {
|
||||
try writer.writeAll("*");
|
||||
state.last_char_was_newline = false;
|
||||
},
|
||||
.s, .del => {
|
||||
try writer.writeAll("~~");
|
||||
state.last_char_was_newline = false;
|
||||
},
|
||||
.hr => {
|
||||
try writer.writeAll("---\n");
|
||||
state.last_char_was_newline = true;
|
||||
return;
|
||||
},
|
||||
.br => {
|
||||
if (state.in_table) {
|
||||
try writer.writeByte(' ');
|
||||
} else {
|
||||
try writer.writeByte('\n');
|
||||
state.last_char_was_newline = true;
|
||||
}
|
||||
return;
|
||||
},
|
||||
.img => {
|
||||
try writer.writeAll(";
|
||||
if (el.getAttributeSafe(comptime .wrap("src"))) |src| {
|
||||
const absolute_src = URL.resolve(page.call_arena, page.base(), src, .{ .encode = true }) catch src;
|
||||
try writer.writeAll(absolute_src);
|
||||
}
|
||||
try writer.writeAll(")");
|
||||
state.last_char_was_newline = false;
|
||||
return;
|
||||
},
|
||||
.anchor => {
|
||||
const has_content = hasVisibleContent(el.asNode());
|
||||
const label = getAnchorLabel(el);
|
||||
const href_raw = el.getAttributeSafe(comptime .wrap("href"));
|
||||
|
||||
if (!has_content and label == null and href_raw == null) return;
|
||||
|
||||
const has_block = hasBlockDescendant(el.asNode());
|
||||
const href = if (href_raw) |h| URL.resolve(page.call_arena, page.base(), h, .{ .encode = true }) catch h else null;
|
||||
|
||||
if (has_block) {
|
||||
try renderChildren(el.asNode(), state, writer, page);
|
||||
if (href) |h| {
|
||||
if (!state.last_char_was_newline) try writer.writeByte('\n');
|
||||
try writer.writeAll("([](");
|
||||
try writer.writeAll(h);
|
||||
try writer.writeAll("))\n");
|
||||
state.last_char_was_newline = true;
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
if (isStandaloneAnchor(el)) {
|
||||
if (!state.last_char_was_newline) try writer.writeByte('\n');
|
||||
try writer.writeByte('[');
|
||||
if (has_content) {
|
||||
try renderChildren(el.asNode(), state, writer, page);
|
||||
} else {
|
||||
try writer.writeAll(label orelse "");
|
||||
}
|
||||
try writer.writeAll("](");
|
||||
if (href) |h| {
|
||||
try writer.writeAll(h);
|
||||
}
|
||||
try writer.writeAll(")\n");
|
||||
state.last_char_was_newline = true;
|
||||
return;
|
||||
}
|
||||
|
||||
try writer.writeByte('[');
|
||||
if (has_content) {
|
||||
try renderChildren(el.asNode(), state, writer, page);
|
||||
} else {
|
||||
try writer.writeAll(label orelse "");
|
||||
}
|
||||
try writer.writeAll("](");
|
||||
if (href) |h| {
|
||||
try writer.writeAll(h);
|
||||
}
|
||||
try writer.writeByte(')');
|
||||
state.last_char_was_newline = false;
|
||||
return;
|
||||
},
|
||||
.input => {
|
||||
const type_attr = el.getAttributeSafe(comptime .wrap("type")) orelse return;
|
||||
if (std.ascii.eqlIgnoreCase(type_attr, "checkbox")) {
|
||||
const checked = el.getAttributeSafe(comptime .wrap("checked")) != null;
|
||||
try writer.writeAll(if (checked) "[x] " else "[ ] ");
|
||||
state.last_char_was_newline = false;
|
||||
}
|
||||
return;
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
|
||||
// --- Render Children ---
|
||||
try renderChildren(el.asNode(), state, writer, page);
|
||||
|
||||
// --- Closing Tag Logic ---
|
||||
|
||||
// Suffixes
|
||||
switch (tag) {
|
||||
.pre => {
|
||||
if (!state.last_char_was_newline) {
|
||||
try writer.writeByte('\n');
|
||||
}
|
||||
try writer.writeAll("```\n");
|
||||
state.pre_node = null;
|
||||
state.last_char_was_newline = true;
|
||||
},
|
||||
.code => {
|
||||
if (state.pre_node == null) {
|
||||
try writer.writeByte('`');
|
||||
state.in_code = false;
|
||||
state.last_char_was_newline = false;
|
||||
}
|
||||
},
|
||||
.b, .strong => {
|
||||
try writer.writeAll("**");
|
||||
state.last_char_was_newline = false;
|
||||
},
|
||||
.i, .em => {
|
||||
try writer.writeAll("*");
|
||||
state.last_char_was_newline = false;
|
||||
},
|
||||
.s, .del => {
|
||||
try writer.writeAll("~~");
|
||||
state.last_char_was_newline = false;
|
||||
},
|
||||
.blockquote => {},
|
||||
.ul, .ol => {
|
||||
if (state.list_depth > 0) state.list_depth -= 1;
|
||||
},
|
||||
.table => {
|
||||
state.in_table = false;
|
||||
},
|
||||
.tr => {
|
||||
try writer.writeByte('\n');
|
||||
if (state.table_row_index == 0) {
|
||||
try writer.writeByte('|');
|
||||
for (0..state.table_col_count) |_| {
|
||||
try writer.writeAll("---|");
|
||||
}
|
||||
try writer.writeByte('\n');
|
||||
}
|
||||
state.table_row_index += 1;
|
||||
state.last_char_was_newline = true;
|
||||
},
|
||||
.td, .th => {
|
||||
try writer.writeAll(" |");
|
||||
state.table_col_count += 1;
|
||||
state.last_char_was_newline = false;
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
|
||||
// Post-block newlines
|
||||
if (isBlock(tag) and !state.in_table) {
|
||||
try ensureNewline(state, writer);
|
||||
}
|
||||
}
|
||||
|
||||
fn renderText(text: []const u8, state: *State, writer: *std.Io.Writer) !void {
|
||||
if (text.len == 0) return;
|
||||
|
||||
if (state.pre_node) |_| {
|
||||
try writer.writeAll(text);
|
||||
state.last_char_was_newline = text[text.len - 1] == '\n';
|
||||
return;
|
||||
}
|
||||
|
||||
// Check for pure whitespace
|
||||
if (isAllWhitespace(text)) {
|
||||
if (!state.last_char_was_newline) {
|
||||
try writer.writeByte(' ');
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
// Collapse whitespace
|
||||
var it = std.mem.tokenizeAny(u8, text, " \t\n\r");
|
||||
var first = true;
|
||||
while (it.next()) |word| {
|
||||
if (!first or (!state.last_char_was_newline and std.ascii.isWhitespace(text[0]))) {
|
||||
try writer.writeByte(' ');
|
||||
}
|
||||
|
||||
try escapeMarkdown(writer, word);
|
||||
state.last_char_was_newline = false;
|
||||
first = false;
|
||||
}
|
||||
|
||||
// Handle trailing whitespace from the original text
|
||||
if (!first and !state.last_char_was_newline and std.ascii.isWhitespace(text[text.len - 1])) {
|
||||
try writer.writeByte(' ');
|
||||
}
|
||||
}
|
||||
|
||||
fn escapeMarkdown(writer: *std.Io.Writer, text: []const u8) !void {
|
||||
for (text) |c| {
|
||||
switch (c) {
|
||||
'\\', '`', '*', '_', '{', '}', '[', ']', '(', ')', '#', '+', '-', '!', '|' => {
|
||||
try writer.writeByte('\\');
|
||||
try writer.writeByte(c);
|
||||
},
|
||||
else => try writer.writeByte(c),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn testMarkdownHTML(html: []const u8, expected: []const u8) !void {
|
||||
const testing = @import("../testing.zig");
|
||||
const page = try testing.test_session.createPage();
|
||||
|
||||
@@ -23,6 +23,9 @@ const h5e = @import("html5ever.zig");
|
||||
const Page = @import("../Page.zig");
|
||||
const Node = @import("../webapi/Node.zig");
|
||||
const Element = @import("../webapi/Element.zig");
|
||||
|
||||
pub const AttributeIterator = h5e.AttributeIterator;
|
||||
|
||||
const Allocator = std.mem.Allocator;
|
||||
const IS_DEBUG = @import("builtin").mode == .Debug;
|
||||
|
||||
|
||||
@@ -89,6 +89,41 @@
|
||||
}
|
||||
</script>
|
||||
|
||||
<script id="CanvasRenderingContext2D#getImageData">
|
||||
{
|
||||
const element = document.createElement("canvas");
|
||||
element.width = 100;
|
||||
element.height = 50;
|
||||
const ctx = element.getContext("2d");
|
||||
|
||||
const imageData = ctx.getImageData(0, 0, 10, 20);
|
||||
testing.expectEqual(true, imageData instanceof ImageData);
|
||||
testing.expectEqual(imageData.width, 10);
|
||||
testing.expectEqual(imageData.height, 20);
|
||||
testing.expectEqual(imageData.data.length, 10 * 20 * 4);
|
||||
testing.expectEqual(true, imageData.data instanceof Uint8ClampedArray);
|
||||
|
||||
// Undrawn canvas should return transparent black pixels.
|
||||
testing.expectEqual(imageData.data[0], 0);
|
||||
testing.expectEqual(imageData.data[1], 0);
|
||||
testing.expectEqual(imageData.data[2], 0);
|
||||
testing.expectEqual(imageData.data[3], 0);
|
||||
}
|
||||
</script>
|
||||
|
||||
<script id="CanvasRenderingContext2D#getImageData invalid">
|
||||
{
|
||||
const element = document.createElement("canvas");
|
||||
const ctx = element.getContext("2d");
|
||||
|
||||
// Zero or negative width/height should throw IndexSizeError.
|
||||
testing.expectError('Index or size', () => ctx.getImageData(0, 0, 0, 10));
|
||||
testing.expectError('Index or size', () => ctx.getImageData(0, 0, 10, 0));
|
||||
testing.expectError('Index or size', () => ctx.getImageData(0, 0, -5, 10));
|
||||
testing.expectError('Index or size', () => ctx.getImageData(0, 0, 10, -5));
|
||||
}
|
||||
</script>
|
||||
|
||||
|
||||
<script id="getter">
|
||||
{
|
||||
|
||||
@@ -62,3 +62,26 @@
|
||||
testing.expectEqual(offscreen.height, 96);
|
||||
}
|
||||
</script>
|
||||
|
||||
<script id=OffscreenCanvasRenderingContext2D#getImageData>
|
||||
{
|
||||
const canvas = new OffscreenCanvas(100, 50);
|
||||
const ctx = canvas.getContext("2d");
|
||||
|
||||
const imageData = ctx.getImageData(0, 0, 10, 20);
|
||||
testing.expectEqual(true, imageData instanceof ImageData);
|
||||
testing.expectEqual(imageData.width, 10);
|
||||
testing.expectEqual(imageData.height, 20);
|
||||
testing.expectEqual(imageData.data.length, 10 * 20 * 4);
|
||||
|
||||
// Undrawn canvas should return transparent black pixels.
|
||||
testing.expectEqual(imageData.data[0], 0);
|
||||
testing.expectEqual(imageData.data[1], 0);
|
||||
testing.expectEqual(imageData.data[2], 0);
|
||||
testing.expectEqual(imageData.data[3], 0);
|
||||
|
||||
// Zero or negative dimensions should throw.
|
||||
testing.expectError('Index or size', () => ctx.getImageData(0, 0, 0, 10));
|
||||
testing.expectError('Index or size', () => ctx.getImageData(0, 0, 10, -5));
|
||||
}
|
||||
</script>
|
||||
|
||||
@@ -56,3 +56,25 @@
|
||||
testing.expectEqual('FontFaceSet', document.fonts.constructor.name);
|
||||
}
|
||||
</script>
|
||||
|
||||
<script id="document_fonts_addEventListener">
|
||||
{
|
||||
let loading = false;
|
||||
document.fonts.addEventListener('loading', function() {
|
||||
loading = true;
|
||||
});
|
||||
|
||||
let loadingdone = false;
|
||||
document.fonts.addEventListener('loadingdone', function() {
|
||||
loadingdone = true;
|
||||
});
|
||||
|
||||
document.fonts.load("italic bold 16px Roboto");
|
||||
|
||||
testing.eventually(() => {
|
||||
testing.expectEqual(true, loading);
|
||||
testing.expectEqual(true, loadingdone);
|
||||
});
|
||||
testing.expectEqual(true, true);
|
||||
}
|
||||
</script>
|
||||
|
||||
@@ -0,0 +1,66 @@
|
||||
<!DOCTYPE html>
|
||||
<head>
|
||||
<script src="../testing.js"></script>
|
||||
<script>
|
||||
// Test that document.open/write/close throw InvalidStateError during custom element
|
||||
// reactions when the element is parsed from HTML
|
||||
|
||||
window.constructorOpenException = null;
|
||||
window.constructorWriteException = null;
|
||||
window.constructorCloseException = null;
|
||||
window.constructorCalled = false;
|
||||
|
||||
class ThrowTestElement extends HTMLElement {
|
||||
constructor() {
|
||||
super();
|
||||
window.constructorCalled = true;
|
||||
|
||||
// Try document.open on the same document during constructor - should throw
|
||||
try {
|
||||
document.open();
|
||||
} catch (e) {
|
||||
window.constructorOpenException = e;
|
||||
}
|
||||
|
||||
// Try document.write on the same document during constructor - should throw
|
||||
try {
|
||||
document.write('<b>test</b>');
|
||||
} catch (e) {
|
||||
window.constructorWriteException = e;
|
||||
}
|
||||
|
||||
// Try document.close on the same document during constructor - should throw
|
||||
try {
|
||||
document.close();
|
||||
} catch (e) {
|
||||
window.constructorCloseException = e;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
customElements.define('throw-test-element', ThrowTestElement);
|
||||
</script>
|
||||
</head>
|
||||
<body>
|
||||
<!-- This element will be parsed from HTML, triggering the constructor -->
|
||||
<throw-test-element id="test-element"></throw-test-element>
|
||||
|
||||
<script id="verify_throws">
|
||||
{
|
||||
// Verify the constructor was called
|
||||
testing.expectEqual(true, window.constructorCalled);
|
||||
|
||||
// Verify document.open threw InvalidStateError
|
||||
testing.expectEqual(true, window.constructorOpenException !== null);
|
||||
testing.expectEqual('InvalidStateError', window.constructorOpenException.name);
|
||||
|
||||
// Verify document.write threw InvalidStateError
|
||||
testing.expectEqual(true, window.constructorWriteException !== null);
|
||||
testing.expectEqual('InvalidStateError', window.constructorWriteException.name);
|
||||
|
||||
// Verify document.close threw InvalidStateError
|
||||
testing.expectEqual(true, window.constructorCloseException !== null);
|
||||
testing.expectEqual('InvalidStateError', window.constructorCloseException.name);
|
||||
}
|
||||
</script>
|
||||
</body>
|
||||
@@ -24,11 +24,10 @@
|
||||
|
||||
<script id=byId name="test1">
|
||||
testing.expectEqual(1, document.querySelector.length);
|
||||
testing.expectError("SyntaxError: Syntax Error", () => document.querySelector(''));
|
||||
testing.expectError("SyntaxError", () => document.querySelector(''));
|
||||
testing.withError((err) => {
|
||||
testing.expectEqual(12, err.code);
|
||||
testing.expectEqual("SyntaxError", err.name);
|
||||
testing.expectEqual("Syntax Error", err.message);
|
||||
}, () => document.querySelector(''));
|
||||
|
||||
testing.expectEqual('test1', document.querySelector('#byId').getAttribute('name'));
|
||||
|
||||
@@ -34,11 +34,10 @@
|
||||
</script>
|
||||
|
||||
<script id=script1 name="test1">
|
||||
testing.expectError("SyntaxError: Syntax Error", () => document.querySelectorAll(''));
|
||||
testing.expectError("SyntaxError", () => document.querySelectorAll(''));
|
||||
testing.withError((err) => {
|
||||
testing.expectEqual(12, err.code);
|
||||
testing.expectEqual("SyntaxError", err.name);
|
||||
testing.expectEqual("Syntax Error", err.message);
|
||||
}, () => document.querySelectorAll(''));
|
||||
</script>
|
||||
|
||||
|
||||
@@ -127,7 +127,7 @@
|
||||
|
||||
testing.withError((err) => {
|
||||
testing.expectEqual(3, err.code);
|
||||
testing.expectEqual('Hierarchy Error', err.message);
|
||||
testing.expectEqual('HierarchyRequestError', err.name);
|
||||
testing.expectEqual(true, err instanceof DOMException);
|
||||
testing.expectEqual(true, err instanceof Error);
|
||||
}, () => link.appendChild(content));
|
||||
|
||||
@@ -11,9 +11,9 @@
|
||||
}
|
||||
|
||||
{
|
||||
// Empty XML is a parse error (no root element)
|
||||
const parser = new DOMParser();
|
||||
testing.expectError('Error', () => parser.parseFromString('', 'text/xml'));
|
||||
let d = parser.parseFromString('', 'text/xml');
|
||||
testing.expectEqual('<parsererror>error</parsererror>', new XMLSerializer().serializeToString(d));
|
||||
}
|
||||
}
|
||||
</script>
|
||||
|
||||
@@ -36,7 +36,6 @@
|
||||
testing.withError((err) => {
|
||||
testing.expectEqual(8, err.code);
|
||||
testing.expectEqual("NotFoundError", err.name);
|
||||
testing.expectEqual("Not Found", err.message);
|
||||
}, () => el1.removeAttributeNode(script_id_node));
|
||||
|
||||
testing.expectEqual(an1, el1.removeAttributeNode(an1));
|
||||
|
||||
@@ -12,7 +12,7 @@
|
||||
testing.expectEqual('', $('#a0').href);
|
||||
|
||||
testing.expectEqual(testing.BASE_URL + 'element/anchor1.html', $('#a1').href);
|
||||
testing.expectEqual(testing.ORIGIN + 'hello/world/anchor2.html', $('#a2').href);
|
||||
testing.expectEqual(testing.ORIGIN + '/hello/world/anchor2.html', $('#a2').href);
|
||||
testing.expectEqual('https://www.openmymind.net/Elixirs-With-Statement/', $('#a3').href);
|
||||
|
||||
testing.expectEqual(testing.BASE_URL + 'element/html/foo', $('#link').href);
|
||||
|
||||
@@ -32,7 +32,7 @@
|
||||
testing.expectEqual(testing.BASE_URL + 'element/html/hello', form.action)
|
||||
|
||||
form.action = '/hello';
|
||||
testing.expectEqual(testing.ORIGIN + 'hello', form.action)
|
||||
testing.expectEqual(testing.ORIGIN + '/hello', form.action)
|
||||
|
||||
form.action = 'https://lightpanda.io/hello';
|
||||
testing.expectEqual('https://lightpanda.io/hello', form.action)
|
||||
@@ -343,3 +343,123 @@
|
||||
testing.expectEqual('', form.elements['choice'].value)
|
||||
}
|
||||
</script>
|
||||
|
||||
<!-- Test: requestSubmit() fires the submit event (unlike submit()) -->
|
||||
<form id="test_form2" action="/should-not-navigate2" method="get">
|
||||
<input name="q" value="test2">
|
||||
</form>
|
||||
|
||||
<script id="requestSubmit_fires_submit_event">
|
||||
{
|
||||
const form = $('#test_form2');
|
||||
let submitFired = false;
|
||||
|
||||
form.addEventListener('submit', (e) => {
|
||||
e.preventDefault();
|
||||
submitFired = true;
|
||||
});
|
||||
|
||||
form.requestSubmit();
|
||||
|
||||
testing.expectEqual(true, submitFired);
|
||||
}
|
||||
</script>
|
||||
|
||||
<!-- Test: requestSubmit() with preventDefault stops navigation -->
|
||||
<form id="test_form3" action="/should-not-navigate3" method="get">
|
||||
<input name="q" value="test3">
|
||||
</form>
|
||||
|
||||
<script id="requestSubmit_respects_preventDefault">
|
||||
{
|
||||
const form = $('#test_form3');
|
||||
|
||||
form.addEventListener('submit', (e) => {
|
||||
e.preventDefault();
|
||||
});
|
||||
|
||||
form.requestSubmit();
|
||||
|
||||
// Form submission was prevented, so no navigation should be scheduled
|
||||
testing.expectEqual(true, true);
|
||||
}
|
||||
</script>
|
||||
|
||||
<!-- Test: requestSubmit() with non-submit-button submitter throws TypeError -->
|
||||
<form id="test_form_rs1" action="/should-not-navigate4" method="get">
|
||||
<input id="rs1_text" type="text" name="q" value="test">
|
||||
<input id="rs1_submit" type="submit" value="Go">
|
||||
<input id="rs1_image" type="image" src="x.png">
|
||||
<button id="rs1_btn_submit" type="submit">Submit</button>
|
||||
<button id="rs1_btn_reset" type="reset">Reset</button>
|
||||
<button id="rs1_btn_button" type="button">Button</button>
|
||||
</form>
|
||||
|
||||
<script id="requestSubmit_rejects_non_submit_button">
|
||||
{
|
||||
const form = $('#test_form_rs1');
|
||||
form.addEventListener('submit', (e) => e.preventDefault());
|
||||
|
||||
// A text input is not a submit button — should throw TypeError
|
||||
testing.expectError('TypeError', () => {
|
||||
form.requestSubmit($('#rs1_text'));
|
||||
});
|
||||
|
||||
// A reset button is not a submit button — should throw TypeError
|
||||
testing.expectError('TypeError', () => {
|
||||
form.requestSubmit($('#rs1_btn_reset'));
|
||||
});
|
||||
|
||||
// A <button type="button"> is not a submit button — should throw TypeError
|
||||
testing.expectError('TypeError', () => {
|
||||
form.requestSubmit($('#rs1_btn_button'));
|
||||
});
|
||||
|
||||
// A <div> is not a submit button — should throw TypeError
|
||||
const div = document.createElement('div');
|
||||
form.appendChild(div);
|
||||
testing.expectError('TypeError', () => {
|
||||
form.requestSubmit(div);
|
||||
});
|
||||
}
|
||||
</script>
|
||||
|
||||
<!-- Test: requestSubmit() accepts valid submit buttons -->
|
||||
<script id="requestSubmit_accepts_submit_buttons">
|
||||
{
|
||||
const form = $('#test_form_rs1');
|
||||
let submitCount = 0;
|
||||
form.addEventListener('submit', (e) => { e.preventDefault(); submitCount++; });
|
||||
|
||||
// <input type="submit"> is a valid submitter
|
||||
form.requestSubmit($('#rs1_submit'));
|
||||
testing.expectEqual(1, submitCount);
|
||||
|
||||
// <input type="image"> is a valid submitter
|
||||
form.requestSubmit($('#rs1_image'));
|
||||
testing.expectEqual(2, submitCount);
|
||||
|
||||
// <button type="submit"> is a valid submitter
|
||||
form.requestSubmit($('#rs1_btn_submit'));
|
||||
testing.expectEqual(3, submitCount);
|
||||
}
|
||||
</script>
|
||||
|
||||
<!-- Test: requestSubmit() with submitter not owned by form throws NotFoundError -->
|
||||
<form id="test_form_rs2" action="/should-not-navigate5" method="get">
|
||||
<input type="text" name="q" value="test">
|
||||
</form>
|
||||
<form id="test_form_rs3">
|
||||
<input id="rs3_submit" type="submit" value="Other Submit">
|
||||
</form>
|
||||
|
||||
<script id="requestSubmit_rejects_wrong_form_submitter">
|
||||
{
|
||||
const form = $('#test_form_rs2');
|
||||
|
||||
// Submit button belongs to a different form — should throw NotFoundError
|
||||
testing.expectError('NotFoundError', () => {
|
||||
form.requestSubmit($('#rs3_submit'));
|
||||
});
|
||||
}
|
||||
</script>
|
||||
|
||||
@@ -37,7 +37,7 @@
|
||||
testing.expectEqual('test.png', img.getAttribute('src'));
|
||||
|
||||
img.src = '/absolute/path.png';
|
||||
testing.expectEqual(testing.ORIGIN + 'absolute/path.png', img.src);
|
||||
testing.expectEqual(testing.ORIGIN + '/absolute/path.png', img.src);
|
||||
testing.expectEqual('/absolute/path.png', img.getAttribute('src'));
|
||||
|
||||
img.src = 'https://example.com/image.png';
|
||||
|
||||
@@ -191,14 +191,14 @@
|
||||
|
||||
let eventCount = 0;
|
||||
let lastEvent = null;
|
||||
|
||||
|
||||
input.addEventListener('selectionchange', (e) => {
|
||||
eventCount++;
|
||||
lastEvent = e;
|
||||
});
|
||||
|
||||
|
||||
testing.expectEqual(0, eventCount);
|
||||
|
||||
|
||||
input.setSelectionRange(0, 5);
|
||||
input.select();
|
||||
input.selectionStart = 3;
|
||||
|
||||
@@ -8,7 +8,7 @@
|
||||
testing.expectEqual('https://lightpanda.io/opensource-browser/15', l2.href);
|
||||
|
||||
l2.href = '/over/9000';
|
||||
testing.expectEqual(testing.ORIGIN + 'over/9000', l2.href);
|
||||
testing.expectEqual(testing.ORIGIN + '/over/9000', l2.href);
|
||||
|
||||
l2.crossOrigin = 'nope';
|
||||
testing.expectEqual('anonymous', l2.crossOrigin);
|
||||
@@ -84,3 +84,24 @@
|
||||
testing.eventually(() => testing.expectEqual(true, result));
|
||||
}
|
||||
</script>
|
||||
|
||||
<script id="refs">
|
||||
{
|
||||
const rels = ['stylesheet', 'preload', 'modulepreload'];
|
||||
const results = rels.map(() => false);
|
||||
rels.forEach((rel, i) => {
|
||||
let link = document.createElement('link')
|
||||
link.rel = rel;
|
||||
link.href = '/nope';
|
||||
link.onload = () => results[i] = true;
|
||||
document.documentElement.appendChild(link);
|
||||
});
|
||||
|
||||
|
||||
testing.eventually(() => {
|
||||
results.forEach((r) => {
|
||||
testing.expectEqual(true, r);
|
||||
});
|
||||
});
|
||||
}
|
||||
</script>
|
||||
|
||||
61
src/browser/tests/element/html/script/async_text.html
Normal file
61
src/browser/tests/element/html/script/async_text.html
Normal file
@@ -0,0 +1,61 @@
|
||||
<!DOCTYPE html>
|
||||
<script src="../../../testing.js"></script>
|
||||
|
||||
<script id=force_async>
|
||||
{
|
||||
// Dynamically created scripts have async=true by default
|
||||
let s = document.createElement('script');
|
||||
testing.expectEqual(true, s.async);
|
||||
|
||||
// Setting async=false clears the force async flag and removes attribute
|
||||
s.async = false;
|
||||
testing.expectEqual(false, s.async);
|
||||
testing.expectEqual(false, s.hasAttribute('async'));
|
||||
|
||||
// Setting async=true adds the attribute
|
||||
s.async = true;
|
||||
testing.expectEqual(true, s.async);
|
||||
testing.expectEqual(true, s.hasAttribute('async'));
|
||||
}
|
||||
</script>
|
||||
|
||||
<script></script>
|
||||
<script id=empty>
|
||||
{
|
||||
// Empty parser-inserted script should have async=true (force async retained)
|
||||
let scripts = document.getElementsByTagName('script');
|
||||
let emptyScript = scripts[scripts.length - 2];
|
||||
testing.expectEqual(true, emptyScript.async);
|
||||
}
|
||||
</script>
|
||||
|
||||
<script id=text_content>
|
||||
{
|
||||
let s = document.createElement('script');
|
||||
s.appendChild(document.createComment('COMMENT'));
|
||||
s.appendChild(document.createTextNode(' TEXT '));
|
||||
s.appendChild(document.createProcessingInstruction('P', 'I'));
|
||||
let a = s.appendChild(document.createElement('a'));
|
||||
a.appendChild(document.createTextNode('ELEMENT'));
|
||||
|
||||
// script.text should return only direct Text node children
|
||||
testing.expectEqual(' TEXT ', s.text);
|
||||
// script.textContent should return all descendant text
|
||||
testing.expectEqual(' TEXT ELEMENT', s.textContent);
|
||||
}
|
||||
</script>
|
||||
|
||||
<script id=lazy_inline>
|
||||
{
|
||||
// Empty script in DOM, then append text - should execute
|
||||
window.lazyScriptRan = false;
|
||||
let s = document.createElement('script');
|
||||
document.head.appendChild(s);
|
||||
// Script is in DOM but empty, so not yet executed
|
||||
testing.expectEqual(false, window.lazyScriptRan);
|
||||
// Append text node with code
|
||||
s.appendChild(document.createTextNode('window.lazyScriptRan = true;'));
|
||||
// Now it should have executed
|
||||
testing.expectEqual(true, window.lazyScriptRan);
|
||||
}
|
||||
</script>
|
||||
@@ -66,11 +66,10 @@
|
||||
{
|
||||
const container = $('#test-container');
|
||||
|
||||
testing.expectError("SyntaxError: Syntax Error", () => container.matches(''));
|
||||
testing.expectError("SyntaxError", () => container.matches(''));
|
||||
testing.withError((err) => {
|
||||
testing.expectEqual(12, err.code);
|
||||
testing.expectEqual("SyntaxError", err.name);
|
||||
testing.expectEqual("Syntax Error", err.message);
|
||||
}, () => container.matches(''));
|
||||
}
|
||||
</script>
|
||||
|
||||
@@ -12,11 +12,10 @@
|
||||
const p1 = $('#p1');
|
||||
testing.expectEqual(null, p1.querySelector('#p1'));
|
||||
|
||||
testing.expectError("SyntaxError: Syntax Error", () => p1.querySelector(''));
|
||||
testing.expectError("SyntaxError", () => p1.querySelector(''));
|
||||
testing.withError((err) => {
|
||||
testing.expectEqual(12, err.code);
|
||||
testing.expectEqual("SyntaxError", err.name);
|
||||
testing.expectEqual("Syntax Error", err.message);
|
||||
}, () => p1.querySelector(''));
|
||||
|
||||
testing.expectEqual($('#c2'), p1.querySelector('#c2'));
|
||||
|
||||
@@ -24,11 +24,10 @@
|
||||
<script id=errors>
|
||||
{
|
||||
const root = $('#root');
|
||||
testing.expectError("SyntaxError: Syntax Error", () => root.querySelectorAll(''));
|
||||
testing.expectError("SyntaxError", () => root.querySelectorAll(''));
|
||||
testing.withError((err) => {
|
||||
testing.expectEqual(12, err.code);
|
||||
testing.expectEqual("SyntaxError", err.name);
|
||||
testing.expectEqual("Syntax Error", err.message);
|
||||
}, () => root.querySelectorAll(''));
|
||||
}
|
||||
</script>
|
||||
|
||||
@@ -43,8 +43,8 @@
|
||||
const container = $('#container');
|
||||
|
||||
// Empty selectors
|
||||
testing.expectError("SyntaxError: Syntax Error", () => container.querySelector(''));
|
||||
testing.expectError("SyntaxError: Syntax Error", () => document.querySelectorAll(''));
|
||||
testing.expectError("SyntaxError", () => container.querySelector(''));
|
||||
testing.expectError("SyntaxError", () => document.querySelectorAll(''));
|
||||
}
|
||||
</script>
|
||||
|
||||
|
||||
38
src/browser/tests/event/report_error.html
Normal file
38
src/browser/tests/event/report_error.html
Normal file
@@ -0,0 +1,38 @@
|
||||
<!DOCTYPE html>
|
||||
<script src="../testing.js"></script>
|
||||
|
||||
<script id=onerrorFiveArguments>
|
||||
let called = false;
|
||||
let argCount = 0;
|
||||
window.onerror = function() {
|
||||
called = true;
|
||||
argCount = arguments.length;
|
||||
return true; // suppress default
|
||||
};
|
||||
try { undefinedVariable; } catch(e) { window.reportError(e); }
|
||||
testing.expectEqual(true, called);
|
||||
testing.expectEqual(5, argCount);
|
||||
window.onerror = null;
|
||||
</script>
|
||||
|
||||
<script id=onerrorCalledBeforeEventListener>
|
||||
let callOrder = [];
|
||||
window.onerror = function() { callOrder.push('onerror'); return true; };
|
||||
window.addEventListener('error', function() { callOrder.push('listener'); });
|
||||
try { undefinedVariable; } catch(e) { window.reportError(e); }
|
||||
testing.expectEqual('onerror', callOrder[0]);
|
||||
testing.expectEqual('listener', callOrder[1]);
|
||||
window.onerror = null;
|
||||
</script>
|
||||
|
||||
<script id=onerrorReturnTrueSuppresses>
|
||||
let listenerCalled = false;
|
||||
window.onerror = function() { return true; };
|
||||
window.addEventListener('error', function(e) {
|
||||
// listener still fires even when onerror returns true
|
||||
listenerCalled = true;
|
||||
});
|
||||
try { undefinedVariable; } catch(e) { window.reportError(e); }
|
||||
testing.expectEqual(true, listenerCalled);
|
||||
window.onerror = null;
|
||||
</script>
|
||||
@@ -108,7 +108,7 @@
|
||||
{
|
||||
let f5 = document.createElement('iframe');
|
||||
f5.id = 'f5';
|
||||
f5.src = "support/sub 1.html";
|
||||
f5.src = "support/page.html";
|
||||
document.documentElement.appendChild(f5);
|
||||
f5.src = "about:blank";
|
||||
|
||||
|
||||
25
src/browser/tests/frames/post_message.html
Normal file
25
src/browser/tests/frames/post_message.html
Normal file
@@ -0,0 +1,25 @@
|
||||
<!DOCTYPE html>
|
||||
<script src="../testing.js"></script>
|
||||
|
||||
<iframe id="receiver"></iframe>
|
||||
|
||||
<script id="messages">
|
||||
{
|
||||
let reply = null;
|
||||
window.addEventListener('message', (e) => {
|
||||
console.warn('reply')
|
||||
reply = e.data;
|
||||
});
|
||||
|
||||
const iframe = $('#receiver');
|
||||
iframe.src = 'support/message_receiver.html';
|
||||
iframe.addEventListener('load', () => {
|
||||
iframe.contentWindow.postMessage('ping', '*');
|
||||
});
|
||||
|
||||
testing.eventually(() => {
|
||||
testing.expectEqual('pong', reply.data);
|
||||
testing.expectEqual(testing.ORIGIN, reply.origin);
|
||||
});
|
||||
}
|
||||
</script>
|
||||
9
src/browser/tests/frames/support/message_receiver.html
Normal file
9
src/browser/tests/frames/support/message_receiver.html
Normal file
@@ -0,0 +1,9 @@
|
||||
<!DOCTYPE html>
|
||||
<script>
|
||||
window.addEventListener('message', (e) => {
|
||||
console.warn('Frame Message', e.data);
|
||||
if (e.data === 'ping') {
|
||||
window.top.postMessage({data: 'pong', origin: e.origin}, '*');
|
||||
}
|
||||
});
|
||||
</script>
|
||||
2
src/browser/tests/frames/support/page.html
Normal file
2
src/browser/tests/frames/support/page.html
Normal file
@@ -0,0 +1,2 @@
|
||||
<!DOCTYPE html>
|
||||
a-page
|
||||
42
src/browser/tests/frames/target.html
Normal file
42
src/browser/tests/frames/target.html
Normal file
@@ -0,0 +1,42 @@
|
||||
<!DOCTYPE html>
|
||||
<script src="../testing.js"></script>
|
||||
|
||||
<iframe name=f1 id=frame1></iframe>
|
||||
<a id=l1 target=f1 href=support/page.html></a>
|
||||
<script id=anchor>
|
||||
$('#l1').click();
|
||||
testing.eventually(() => {
|
||||
testing.expectEqual('<html><head></head><body>a-page\n</body></html>', $('#frame1').contentDocument.documentElement.outerHTML);
|
||||
});
|
||||
</script>
|
||||
|
||||
<script id=form>
|
||||
{
|
||||
let frame2 = document.createElement('iframe');
|
||||
frame2.name = 'frame2';
|
||||
document.documentElement.appendChild(frame2);
|
||||
|
||||
let form = document.createElement('form');
|
||||
form.target = 'frame2';
|
||||
form.action = 'support/page.html';
|
||||
form.submit();
|
||||
|
||||
testing.eventually(() => {
|
||||
testing.expectEqual('<html><head></head><body>a-page\n</body></html>', frame2.contentDocument.documentElement.outerHTML);
|
||||
});
|
||||
}
|
||||
</script>
|
||||
|
||||
<iframe name=frame3 id=f3></iframe>
|
||||
<form target="_top" action="support/page.html">
|
||||
<input type=submit id=submit1 formtarget="frame3">
|
||||
</form>
|
||||
|
||||
<script id=formtarget>
|
||||
{
|
||||
$('#submit1').click();
|
||||
testing.eventually(() => {
|
||||
testing.expectEqual('<html><head></head><body>a-page\n</body></html>', $('#f3').contentDocument.documentElement.outerHTML);
|
||||
});
|
||||
}
|
||||
</script>
|
||||
@@ -2,37 +2,17 @@
|
||||
<script src="testing.js"></script>
|
||||
|
||||
<script id=history>
|
||||
testing.expectEqual('auto', history.scrollRestoration);
|
||||
|
||||
history.scrollRestoration = 'manual';
|
||||
testing.expectEqual('manual', history.scrollRestoration);
|
||||
|
||||
history.scrollRestoration = 'auto';
|
||||
testing.expectEqual('auto', history.scrollRestoration);
|
||||
testing.expectEqual(null, history.state)
|
||||
|
||||
history.pushState({ testInProgress: true }, null, 'http://127.0.0.1:9582/src/browser/tests/history_after_nav.skip.html');
|
||||
testing.expectEqual({ testInProgress: true }, history.state);
|
||||
|
||||
history.pushState({ testInProgress: false }, null, 'http://127.0.0.1:9582/xhr/json');
|
||||
history.replaceState({ "new": "field", testComplete: true }, null);
|
||||
|
||||
let state = { "new": "field", testComplete: true };
|
||||
testing.expectEqual(state, history.state);
|
||||
|
||||
let popstateEventFired = false;
|
||||
let popstateEventState = null;
|
||||
|
||||
window.addEventListener('popstate', (event) => {
|
||||
popstateEventFired = true;
|
||||
popstateEventState = event.state;
|
||||
});
|
||||
|
||||
// This test is a bit wonky. But it's trying to test navigation, which is
|
||||
// something we can't do in the main page (we can't navigate away from this
|
||||
// page and still assertOk in the test runner).
|
||||
// If support/history.html has a failed assertion, it'll log the error and
|
||||
// stop the script. If it succeeds, it'll set support_history_completed
|
||||
// which we can use here to assume everything passed.
|
||||
testing.eventually(() => {
|
||||
testing.expectEqual(true, popstateEventFired);
|
||||
testing.expectEqual({testInProgress: true }, popstateEventState);
|
||||
})
|
||||
|
||||
history.back();
|
||||
testing.expectEqual(true, window.support_history_completed);
|
||||
testing.expectEqual(true, window.support_history_popstateEventFired);
|
||||
testing.expectEqual({testInProgress: true }, window.support_history_popstateEventState);
|
||||
});
|
||||
</script>
|
||||
|
||||
<iframe id=frame src="support/history.html"></iframe>
|
||||
|
||||
14
src/browser/tests/mcp_actions.html
Normal file
14
src/browser/tests/mcp_actions.html
Normal file
@@ -0,0 +1,14 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<body>
|
||||
<button id="btn" onclick="window.clicked = true;">Click Me</button>
|
||||
<input id="inp" oninput="window.inputVal = this.value" onchange="window.changed = true;">
|
||||
<select id="sel" onchange="window.selChanged = this.value">
|
||||
<option value="opt1">Option 1</option>
|
||||
<option value="opt2">Option 2</option>
|
||||
</select>
|
||||
<div id="scrollbox" style="width: 100px; height: 100px; overflow: scroll;" onscroll="window.scrolled = true;">
|
||||
<div style="height: 500px;">Long content</div>
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
||||
@@ -27,3 +27,44 @@
|
||||
testing.expectEqual(false, navigator.javaEnabled());
|
||||
testing.expectEqual(false, navigator.webdriver);
|
||||
</script>
|
||||
|
||||
<script id=permission_query>
|
||||
testing.async(async (restore) => {
|
||||
const p = navigator.permissions.query({ name: 'notifications' });
|
||||
testing.expectTrue(p instanceof Promise);
|
||||
const status = await p;
|
||||
restore();
|
||||
testing.expectEqual('prompt', status.state);
|
||||
testing.expectEqual('notifications', status.name);
|
||||
});
|
||||
</script>
|
||||
|
||||
<script id=storage_estimate>
|
||||
testing.async(async (restore) => {
|
||||
const p = navigator.storage.estimate();
|
||||
testing.expectTrue(p instanceof Promise);
|
||||
|
||||
const estimate = await p;
|
||||
restore();
|
||||
testing.expectEqual(0, estimate.usage);
|
||||
testing.expectEqual(1024 * 1024 * 1024, estimate.quota);
|
||||
});
|
||||
</script>
|
||||
|
||||
<script id=deviceMemory>
|
||||
testing.expectEqual(8, navigator.deviceMemory);
|
||||
</script>
|
||||
|
||||
<script id=getBattery>
|
||||
testing.async(async (restore) => {
|
||||
const p = navigator.getBattery();
|
||||
try {
|
||||
await p;
|
||||
testing.fail('getBattery should reject');
|
||||
} catch (err) {
|
||||
restore();
|
||||
testing.expectEqual('NotSupportedError', err.name);
|
||||
}
|
||||
});
|
||||
</script>
|
||||
|
||||
@@ -203,3 +203,39 @@
|
||||
testing.expectEqual(true, response.body !== null);
|
||||
});
|
||||
</script>
|
||||
|
||||
<script id=fetch_blob_url>
|
||||
testing.async(async (restore) => {
|
||||
// Create a blob and get its URL
|
||||
const blob = new Blob(['Hello from blob!'], { type: 'text/plain' });
|
||||
const blobUrl = URL.createObjectURL(blob);
|
||||
|
||||
const response = await fetch(blobUrl);
|
||||
restore();
|
||||
|
||||
testing.expectEqual(200, response.status);
|
||||
testing.expectEqual(true, response.ok);
|
||||
testing.expectEqual(blobUrl, response.url);
|
||||
testing.expectEqual('text/plain', response.headers.get('Content-Type'));
|
||||
|
||||
const text = await response.text();
|
||||
testing.expectEqual('Hello from blob!', text);
|
||||
|
||||
// Clean up
|
||||
URL.revokeObjectURL(blobUrl);
|
||||
});
|
||||
</script>
|
||||
|
||||
<script id=abort>
|
||||
testing.async(async (restore) => {
|
||||
const controller = new AbortController();
|
||||
controller.abort();
|
||||
try {
|
||||
await fetch('http://127.0.0.1:9582/xhr', { signal: controller.signal });
|
||||
testain.fail('fetch should have been aborted');
|
||||
} catch (e) {
|
||||
restore();
|
||||
testing.expectEqual("AbortError", e.name);
|
||||
}
|
||||
});
|
||||
</script>
|
||||
|
||||
@@ -283,3 +283,26 @@
|
||||
testing.expectEqual(XMLHttpRequest.UNSENT, req.readyState);
|
||||
});
|
||||
</script>
|
||||
|
||||
<script id=xhr_blob_url>
|
||||
testing.async(async (restore) => {
|
||||
// Create a blob and get its URL
|
||||
const blob = new Blob(['Hello from blob!'], { type: 'text/plain' });
|
||||
const blobUrl = URL.createObjectURL(blob);
|
||||
|
||||
const req = new XMLHttpRequest();
|
||||
await new Promise((resolve) => {
|
||||
req.onload = resolve;
|
||||
req.open('GET', blobUrl);
|
||||
req.send();
|
||||
});
|
||||
|
||||
restore();
|
||||
testing.expectEqual(200, req.status);
|
||||
testing.expectEqual('Hello from blob!', req.responseText);
|
||||
testing.expectEqual(blobUrl, req.responseURL);
|
||||
|
||||
// Clean up
|
||||
URL.revokeObjectURL(blobUrl);
|
||||
});
|
||||
</script>
|
||||
|
||||
@@ -19,7 +19,6 @@
|
||||
testing.withError((err) => {
|
||||
testing.expectEqual(8, err.code);
|
||||
testing.expectEqual("NotFoundError", err.name);
|
||||
testing.expectEqual("Not Found", err.message);
|
||||
}, () => d1.insertBefore(document.createElement('div'), d2));
|
||||
|
||||
let c1 = document.createElement('div');
|
||||
|
||||
@@ -7,7 +7,6 @@
|
||||
testing.withError((err) => {
|
||||
testing.expectEqual(8, err.code);
|
||||
testing.expectEqual("NotFoundError", err.name);
|
||||
testing.expectEqual("Not Found", err.message);
|
||||
}, () => $('#d1').removeChild($('#p1')));
|
||||
|
||||
const p1 = $('#p1');
|
||||
|
||||
@@ -25,7 +25,6 @@
|
||||
testing.withError((err) => {
|
||||
testing.expectEqual(3, err.code);
|
||||
testing.expectEqual("HierarchyRequestError", err.name);
|
||||
testing.expectEqual("Hierarchy Error", err.message);
|
||||
}, () => d1.replaceChild(c4, c3));
|
||||
|
||||
testing.expectEqual(c2, d1.replaceChild(c4, c2));
|
||||
|
||||
41
src/browser/tests/page/blob.html
Normal file
41
src/browser/tests/page/blob.html
Normal file
@@ -0,0 +1,41 @@
|
||||
<!DOCTYPE html>
|
||||
<body></body>
|
||||
<script src="../testing.js"></script>
|
||||
|
||||
<script id="basic_blob_navigation">
|
||||
{
|
||||
const html = '<html><head></head><body><div id="test">Hello Blob</div></body></html>';
|
||||
const blob = new Blob([html], { type: 'text/html' });
|
||||
const blob_url = URL.createObjectURL(blob);
|
||||
|
||||
const iframe = document.createElement('iframe');
|
||||
document.body.appendChild(iframe);
|
||||
iframe.src = blob_url;
|
||||
|
||||
testing.eventually(() => {
|
||||
testing.expectEqual('Hello Blob', iframe.contentDocument.getElementById('test').textContent);
|
||||
});
|
||||
}
|
||||
</script>
|
||||
|
||||
<script id="multiple_blobs">
|
||||
{
|
||||
const blob1 = new Blob(['<html><body>First</body></html>'], { type: 'text/html' });
|
||||
const blob2 = new Blob(['<html><body>Second</body></html>'], { type: 'text/html' });
|
||||
const url1 = URL.createObjectURL(blob1);
|
||||
const url2 = URL.createObjectURL(blob2);
|
||||
|
||||
const iframe1 = document.createElement('iframe');
|
||||
document.body.appendChild(iframe1);
|
||||
iframe1.src = url1;
|
||||
|
||||
const iframe2 = document.createElement('iframe');
|
||||
document.body.appendChild(iframe2);
|
||||
iframe2.src = url2;
|
||||
|
||||
testing.eventually(() => {
|
||||
testing.expectEqual('First', iframe1.contentDocument.body.textContent);
|
||||
testing.expectEqual('Second', iframe2.contentDocument.body.textContent);
|
||||
});
|
||||
}
|
||||
</script>
|
||||
@@ -451,12 +451,12 @@
|
||||
const p1 = $('#p1');
|
||||
|
||||
// Test setStart with offset beyond node length
|
||||
testing.expectError('IndexSizeError: Index or size is negative or greater than the allowed amount', () => {
|
||||
testing.expectError('IndexSizeError:', () => {
|
||||
range.setStart(p1, 999);
|
||||
});
|
||||
|
||||
// Test with negative offset (wraps to large u32)
|
||||
testing.expectError('IndexSizeError: Index or size is negative or greater than the allowed amount', () => {
|
||||
testing.expectError('IndexSizeError:', () => {
|
||||
range.setStart(p1.firstChild, -1);
|
||||
});
|
||||
}
|
||||
@@ -468,12 +468,12 @@
|
||||
const p1 = $('#p1');
|
||||
|
||||
// Test setEnd with offset beyond node length
|
||||
testing.expectError('IndexSizeError: Index or size is negative or greater than the allowed amount', () => {
|
||||
testing.expectError('IndexSizeError:', () => {
|
||||
range.setEnd(p1, 999);
|
||||
});
|
||||
|
||||
// Test with text node
|
||||
testing.expectError('IndexSizeError: Index or size is negative or greater than the allowed amount', () => {
|
||||
testing.expectError('IndexSizeError:', () => {
|
||||
range.setEnd(p1.firstChild, 9999);
|
||||
});
|
||||
}
|
||||
@@ -525,11 +525,11 @@
|
||||
range.setEnd(p1, 1);
|
||||
|
||||
// Test comparePoint with invalid offset
|
||||
testing.expectError('IndexSizeError: Index or size is negative or greater than the allowed amount', () => {
|
||||
testing.expectError('IndexSizeError:', () => {
|
||||
range.comparePoint(p1, 20);
|
||||
});
|
||||
|
||||
testing.expectError('IndexSizeError: Index or size is negative or greater than the allowed amount', () => {
|
||||
testing.expectError('IndexSizeError:', () => {
|
||||
range.comparePoint(p1.firstChild, -1);
|
||||
});
|
||||
}
|
||||
@@ -650,11 +650,11 @@
|
||||
range.setEnd(p1, 1);
|
||||
|
||||
// Invalid offset should throw IndexSizeError
|
||||
testing.expectError('IndexSizeError: Index or size is negative or greater than the allowed amount', () => {
|
||||
testing.expectError('IndexSizeError:', () => {
|
||||
range.isPointInRange(p1, 999);
|
||||
});
|
||||
|
||||
testing.expectError('IndexSizeError: Index or size is negative or greater than the allowed amount', () => {
|
||||
testing.expectError('IndexSizeError:', () => {
|
||||
range.isPointInRange(p1.firstChild, 9999);
|
||||
});
|
||||
}
|
||||
@@ -854,11 +854,11 @@
|
||||
range2.setStart(p, 0);
|
||||
|
||||
// Invalid how parameter should throw NotSupportedError
|
||||
testing.expectError('NotSupportedError: Not Supported', () => {
|
||||
testing.expectError('NotSupportedError:', () => {
|
||||
range1.compareBoundaryPoints(4, range2);
|
||||
});
|
||||
|
||||
testing.expectError('NotSupportedError: Not Supported', () => {
|
||||
testing.expectError('NotSupportedError:', () => {
|
||||
range1.compareBoundaryPoints(99, range2);
|
||||
});
|
||||
}
|
||||
@@ -883,7 +883,7 @@
|
||||
range2.setEnd(foreignP, 1);
|
||||
|
||||
// Comparing ranges in different documents should throw WrongDocumentError
|
||||
testing.expectError('WrongDocumentError: wrong_document_error', () => {
|
||||
testing.expectError('WrongDocumentError:', () => {
|
||||
range1.compareBoundaryPoints(Range.START_TO_START, range2);
|
||||
});
|
||||
}
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
<div id="host2"></div>
|
||||
<div id="host3"></div>
|
||||
|
||||
<!-- <script id="attachShadow_open">
|
||||
<script id="attachShadow_open">
|
||||
{
|
||||
const host = $('#host1');
|
||||
const shadow = host.attachShadow({ mode: 'open' });
|
||||
@@ -140,7 +140,7 @@
|
||||
shadow.replaceChildren('New content');
|
||||
testing.expectEqual('New content', shadow.innerHTML);
|
||||
}
|
||||
</script> -->
|
||||
</script>
|
||||
|
||||
<script id="getElementById">
|
||||
{
|
||||
@@ -154,3 +154,16 @@
|
||||
testing.expectEqual(null, shadow.getElementById('nonexistent'));
|
||||
}
|
||||
</script>
|
||||
|
||||
|
||||
<script id=adoptedStyleSheets>
|
||||
{
|
||||
const host = document.createElement('div');
|
||||
const shadow = host.attachShadow({ mode: 'open' });
|
||||
|
||||
const acss = shadow.adoptedStyleSheets;
|
||||
testing.expectEqual(0, acss.length);
|
||||
acss.push(new CSSStyleSheet());
|
||||
testing.expectEqual(1, acss.length);
|
||||
}
|
||||
</script>
|
||||
|
||||
33
src/browser/tests/support/history.html
Normal file
33
src/browser/tests/support/history.html
Normal file
@@ -0,0 +1,33 @@
|
||||
<!DOCTYPE html>
|
||||
<script src="../testing.js"></script>
|
||||
<script id=history>
|
||||
testing.expectEqual('auto', history.scrollRestoration);
|
||||
|
||||
history.scrollRestoration = 'manual';
|
||||
testing.expectEqual('manual', history.scrollRestoration);
|
||||
|
||||
history.scrollRestoration = 'auto';
|
||||
testing.expectEqual('auto', history.scrollRestoration);
|
||||
testing.expectEqual(null, history.state)
|
||||
|
||||
history.pushState({ testInProgress: true }, null, testing.BASE_URL + 'history_after_nav.skip.html');
|
||||
testing.expectEqual({ testInProgress: true }, history.state);
|
||||
|
||||
history.pushState({ testInProgress: false }, null, testing.ORIGIN + '/xhr/json');
|
||||
history.replaceState({ "new": "field", testComplete: true }, null);
|
||||
|
||||
let state = { "new": "field", testComplete: true };
|
||||
testing.expectEqual(state, history.state);
|
||||
|
||||
let popstateEventFired = false;
|
||||
let popstateEventState = null;
|
||||
|
||||
window.top.support_history_completed = true;
|
||||
window.addEventListener('popstate', (event) => {
|
||||
window.top.window.support_history_popstateEventFired = true;
|
||||
window.top.window.support_history_popstateEventState = event.state;
|
||||
});
|
||||
|
||||
history.back();
|
||||
</script>
|
||||
|
||||
@@ -99,8 +99,7 @@
|
||||
}
|
||||
}
|
||||
|
||||
// our test runner sets this to true
|
||||
const IS_TEST_RUNNER = window._lightpanda_skip_auto_assert === true;
|
||||
const IS_TEST_RUNNER = window.navigator.userAgent.startsWith("Lightpanda/");
|
||||
|
||||
window.testing = {
|
||||
fail: fail,
|
||||
@@ -114,17 +113,17 @@
|
||||
eventually: eventually,
|
||||
IS_TEST_RUNNER: IS_TEST_RUNNER,
|
||||
HOST: '127.0.0.1',
|
||||
ORIGIN: 'http://127.0.0.1:9582/',
|
||||
ORIGIN: 'http://127.0.0.1:9582',
|
||||
BASE_URL: 'http://127.0.0.1:9582/src/browser/tests/',
|
||||
};
|
||||
|
||||
if (window.navigator.userAgent.startsWith("Lightpanda/") == false) {
|
||||
if (IS_TEST_RUNNER === false) {
|
||||
// The page is running in a different browser. Probably a developer making sure
|
||||
// a test is correct. There are a few tweaks we need to do to make this a
|
||||
// seemless, namely around adapting paths/urls.
|
||||
console.warn(`The page is not being executed in the test runner, certain behavior has been adjusted`);
|
||||
window.testing.HOST = location.hostname;
|
||||
window.testing.ORIGIN = location.origin + '/';
|
||||
window.testing.ORIGIN = location.origin;
|
||||
window.testing.BASE_URL = location.origin + '/src/browser/tests/';
|
||||
window.addEventListener('load', testing.assertOk);
|
||||
}
|
||||
|
||||
@@ -1,15 +1,15 @@
|
||||
<!DOCTYPE html>
|
||||
<body onload="loaded()"></body>
|
||||
<body onload="loadEvent = event"></body>
|
||||
<script src="../testing.js"></script>
|
||||
|
||||
<script id=bodyOnLoad2>
|
||||
let called = 0;
|
||||
function loaded(e) {
|
||||
called += 1;
|
||||
}
|
||||
// Per spec, the handler is compiled as: function(event) { loadEvent = event }
|
||||
// Verify: handler fires, "event" parameter is a proper Event, and handler is a function.
|
||||
let loadEvent = null;
|
||||
|
||||
testing.eventually(() => {
|
||||
testing.expectEqual(1, called);
|
||||
testing.expectEqual("function", typeof document.body.onload);
|
||||
testing.expectTrue(loadEvent instanceof Event);
|
||||
testing.expectEqual("load", loadEvent.type);
|
||||
});
|
||||
</script>
|
||||
|
||||
|
||||
28
src/browser/tests/window/body_onload3.html
Normal file
28
src/browser/tests/window/body_onload3.html
Normal file
@@ -0,0 +1,28 @@
|
||||
<!DOCTYPE html>
|
||||
<body onload="called++"></body>
|
||||
<script src="../testing.js"></script>
|
||||
|
||||
<script id=bodyOnLoad3>
|
||||
// Per spec, the handler is compiled as: function(event) { called++ }
|
||||
// Verify: handler fires exactly once, and body.onload reflects to window.onload.
|
||||
let called = 0;
|
||||
|
||||
testing.eventually(() => {
|
||||
// The attribute handler should have fired exactly once.
|
||||
testing.expectEqual(1, called);
|
||||
|
||||
// body.onload is a Window-reflecting handler per spec.
|
||||
testing.expectEqual("function", typeof document.body.onload);
|
||||
testing.expectEqual(document.body.onload, window.onload);
|
||||
|
||||
// Setting body.onload via property replaces the attribute handler.
|
||||
let propertyCalled = false;
|
||||
document.body.onload = function() { propertyCalled = true; };
|
||||
testing.expectEqual(document.body.onload, window.onload);
|
||||
|
||||
// Setting onload to null removes the handler.
|
||||
document.body.onload = null;
|
||||
testing.expectEqual(null, document.body.onload);
|
||||
testing.expectEqual(null, window.onload);
|
||||
});
|
||||
</script>
|
||||
@@ -82,7 +82,7 @@
|
||||
testing.expectEqual('ceil', atob('Y2VpbA')); // 6 chars, len%4==2, needs '=='
|
||||
|
||||
// length % 4 == 1 must still throw
|
||||
testing.expectError('InvalidCharacterError: Invalid Character', () => {
|
||||
testing.expectError('InvalidCharacterError', () => {
|
||||
atob('Y');
|
||||
});
|
||||
</script>
|
||||
@@ -125,6 +125,143 @@
|
||||
testing.expectEqual(screen, window.screen);
|
||||
</script>
|
||||
|
||||
<script id=structuredClone>
|
||||
// Basic types
|
||||
testing.expectEqual(42, structuredClone(42));
|
||||
testing.expectEqual('hello', structuredClone('hello'));
|
||||
testing.expectEqual(true, structuredClone(true));
|
||||
testing.expectEqual(null, structuredClone(null));
|
||||
testing.expectEqual(undefined, structuredClone(undefined));
|
||||
|
||||
// Objects and arrays (these work with JSON too, but verify they're cloned)
|
||||
const obj = { a: 1, b: { c: 2 } };
|
||||
const clonedObj = structuredClone(obj);
|
||||
testing.expectEqual(1, clonedObj.a);
|
||||
testing.expectEqual(2, clonedObj.b.c);
|
||||
clonedObj.b.c = 999;
|
||||
testing.expectEqual(2, obj.b.c); // original unchanged
|
||||
|
||||
const arr = [1, [2, 3]];
|
||||
const clonedArr = structuredClone(arr);
|
||||
testing.expectEqual(1, clonedArr[0]);
|
||||
testing.expectEqual(2, clonedArr[1][0]);
|
||||
clonedArr[1][0] = 999;
|
||||
testing.expectEqual(2, arr[1][0]); // original unchanged
|
||||
|
||||
// Date - JSON would stringify to ISO string
|
||||
const date = new Date('2024-01-15T12:30:00Z');
|
||||
const clonedDate = structuredClone(date);
|
||||
testing.expectEqual(true, clonedDate instanceof Date);
|
||||
testing.expectEqual(date.getTime(), clonedDate.getTime());
|
||||
testing.expectEqual(date.toISOString(), clonedDate.toISOString());
|
||||
|
||||
// RegExp - JSON would stringify to {}
|
||||
const regex = /test\d+/gi;
|
||||
const clonedRegex = structuredClone(regex);
|
||||
testing.expectEqual(true, clonedRegex instanceof RegExp);
|
||||
testing.expectEqual(regex.source, clonedRegex.source);
|
||||
testing.expectEqual(regex.flags, clonedRegex.flags);
|
||||
testing.expectEqual(true, clonedRegex.test('test123'));
|
||||
|
||||
// Map - JSON can't handle
|
||||
const map = new Map([['a', 1], ['b', 2]]);
|
||||
const clonedMap = structuredClone(map);
|
||||
testing.expectEqual(true, clonedMap instanceof Map);
|
||||
testing.expectEqual(2, clonedMap.size);
|
||||
testing.expectEqual(1, clonedMap.get('a'));
|
||||
testing.expectEqual(2, clonedMap.get('b'));
|
||||
|
||||
// Set - JSON can't handle
|
||||
const set = new Set([1, 2, 3]);
|
||||
const clonedSet = structuredClone(set);
|
||||
testing.expectEqual(true, clonedSet instanceof Set);
|
||||
testing.expectEqual(3, clonedSet.size);
|
||||
testing.expectEqual(true, clonedSet.has(1));
|
||||
testing.expectEqual(true, clonedSet.has(2));
|
||||
testing.expectEqual(true, clonedSet.has(3));
|
||||
|
||||
// ArrayBuffer
|
||||
const buffer = new ArrayBuffer(8);
|
||||
const view = new Uint8Array(buffer);
|
||||
view[0] = 42;
|
||||
view[7] = 99;
|
||||
const clonedBuffer = structuredClone(buffer);
|
||||
testing.expectEqual(true, clonedBuffer instanceof ArrayBuffer);
|
||||
testing.expectEqual(8, clonedBuffer.byteLength);
|
||||
const clonedView = new Uint8Array(clonedBuffer);
|
||||
testing.expectEqual(42, clonedView[0]);
|
||||
testing.expectEqual(99, clonedView[7]);
|
||||
|
||||
// TypedArray
|
||||
const typedArr = new Uint32Array([100, 200, 300]);
|
||||
const clonedTypedArr = structuredClone(typedArr);
|
||||
testing.expectEqual(true, clonedTypedArr instanceof Uint32Array);
|
||||
testing.expectEqual(3, clonedTypedArr.length);
|
||||
testing.expectEqual(100, clonedTypedArr[0]);
|
||||
testing.expectEqual(200, clonedTypedArr[1]);
|
||||
testing.expectEqual(300, clonedTypedArr[2]);
|
||||
|
||||
// Special number values - JSON can't preserve these
|
||||
testing.expectEqual(true, Number.isNaN(structuredClone(NaN)));
|
||||
testing.expectEqual(Infinity, structuredClone(Infinity));
|
||||
testing.expectEqual(-Infinity, structuredClone(-Infinity));
|
||||
|
||||
// Object with undefined value - JSON would omit it
|
||||
const objWithUndef = { a: 1, b: undefined, c: 3 };
|
||||
const clonedObjWithUndef = structuredClone(objWithUndef);
|
||||
testing.expectEqual(1, clonedObjWithUndef.a);
|
||||
testing.expectEqual(undefined, clonedObjWithUndef.b);
|
||||
testing.expectEqual(true, 'b' in clonedObjWithUndef);
|
||||
testing.expectEqual(3, clonedObjWithUndef.c);
|
||||
|
||||
// Error objects
|
||||
const error = new Error('test error');
|
||||
const clonedError = structuredClone(error);
|
||||
testing.expectEqual(true, clonedError instanceof Error);
|
||||
testing.expectEqual('test error', clonedError.message);
|
||||
|
||||
// TypeError
|
||||
const typeError = new TypeError('type error');
|
||||
const clonedTypeError = structuredClone(typeError);
|
||||
testing.expectEqual(true, clonedTypeError instanceof TypeError);
|
||||
testing.expectEqual('type error', clonedTypeError.message);
|
||||
|
||||
// BigInt
|
||||
const bigInt = BigInt('9007199254740993');
|
||||
const clonedBigInt = structuredClone(bigInt);
|
||||
testing.expectEqual(bigInt, clonedBigInt);
|
||||
|
||||
// Circular references ARE supported by structuredClone (unlike JSON)
|
||||
const circular = { a: 1 };
|
||||
circular.self = circular;
|
||||
const clonedCircular = structuredClone(circular);
|
||||
testing.expectEqual(1, clonedCircular.a);
|
||||
testing.expectEqual(clonedCircular, clonedCircular.self); // circular ref preserved
|
||||
|
||||
// Functions cannot be cloned - should throw
|
||||
{
|
||||
let threw = false;
|
||||
try {
|
||||
structuredClone(() => {});
|
||||
} catch (err) {
|
||||
threw = true;
|
||||
// Just verify an error was thrown - V8's message format may vary
|
||||
}
|
||||
testing.expectEqual(true, threw);
|
||||
}
|
||||
|
||||
// Symbols cannot be cloned - should throw
|
||||
{
|
||||
let threw = false;
|
||||
try {
|
||||
structuredClone(Symbol('test'));
|
||||
} catch (err) {
|
||||
threw = true;
|
||||
}
|
||||
testing.expectEqual(true, threw);
|
||||
}
|
||||
</script>
|
||||
|
||||
<script id=unhandled_rejection>
|
||||
{
|
||||
let unhandledCalled = 0;
|
||||
|
||||
34
src/browser/tests/window/window_event.html
Normal file
34
src/browser/tests/window/window_event.html
Normal file
@@ -0,0 +1,34 @@
|
||||
<!DOCTYPE html>
|
||||
<script src="../testing.js"></script>
|
||||
|
||||
<script id=windowEventUndefinedOutsideHandler>
|
||||
testing.expectEqual(undefined, window.event);
|
||||
</script>
|
||||
|
||||
<script id=windowEventSetDuringWindowHandler>
|
||||
var capturedEvent = null;
|
||||
|
||||
window.addEventListener('test-event', function(e) {
|
||||
capturedEvent = window.event;
|
||||
});
|
||||
|
||||
var ev = new Event('test-event');
|
||||
window.dispatchEvent(ev);
|
||||
|
||||
testing.expectEqual(ev, capturedEvent);
|
||||
testing.expectEqual(undefined, window.event);
|
||||
</script>
|
||||
|
||||
<script id=windowEventRestoredAfterHandler>
|
||||
var captured2 = null;
|
||||
|
||||
window.addEventListener('test-event-2', function(e) {
|
||||
captured2 = window.event;
|
||||
});
|
||||
|
||||
var ev2 = new Event('test-event-2');
|
||||
window.dispatchEvent(ev2);
|
||||
|
||||
testing.expectEqual(ev2, captured2);
|
||||
testing.expectEqual(undefined, window.event);
|
||||
</script>
|
||||
@@ -19,15 +19,22 @@
|
||||
const std = @import("std");
|
||||
const js = @import("../js/js.zig");
|
||||
|
||||
const Session = @import("../Session.zig");
|
||||
|
||||
const Node = @import("Node.zig");
|
||||
const Range = @import("Range.zig");
|
||||
|
||||
const Allocator = std.mem.Allocator;
|
||||
const IS_DEBUG = @import("builtin").mode == .Debug;
|
||||
|
||||
const AbstractRange = @This();
|
||||
|
||||
pub const _prototype_root = true;
|
||||
|
||||
_rc: u8,
|
||||
_type: Type,
|
||||
|
||||
_page_id: u32,
|
||||
_arena: Allocator,
|
||||
_end_offset: u32,
|
||||
_start_offset: u32,
|
||||
_end_container: *Node,
|
||||
@@ -36,6 +43,27 @@ _start_container: *Node,
|
||||
// Intrusive linked list node for tracking live ranges on the Page.
|
||||
_range_link: std.DoublyLinkedList.Node = .{},
|
||||
|
||||
pub fn acquireRef(self: *AbstractRange) void {
|
||||
self._rc += 1;
|
||||
}
|
||||
|
||||
pub fn deinit(self: *AbstractRange, shutdown: bool, session: *Session) void {
|
||||
_ = shutdown;
|
||||
const rc = self._rc;
|
||||
if (comptime IS_DEBUG) {
|
||||
std.debug.assert(rc != 0);
|
||||
}
|
||||
|
||||
if (rc == 1) {
|
||||
if (session.findPageById(self._page_id)) |page| {
|
||||
page._live_ranges.remove(&self._range_link);
|
||||
}
|
||||
session.releaseArena(self._arena);
|
||||
return;
|
||||
}
|
||||
self._rc = rc - 1;
|
||||
}
|
||||
|
||||
pub const Type = union(enum) {
|
||||
range: *Range,
|
||||
// TODO: static_range: *StaticRange,
|
||||
@@ -310,6 +338,8 @@ pub const JsApi = struct {
|
||||
pub const name = "AbstractRange";
|
||||
pub const prototype_chain = bridge.prototypeChain();
|
||||
pub var class_id: bridge.ClassId = undefined;
|
||||
pub const weak = true;
|
||||
pub const finalizer = bridge.finalizer(AbstractRange.deinit);
|
||||
};
|
||||
|
||||
pub const startContainer = bridge.accessor(AbstractRange.getStartContainer, null, .{});
|
||||
|
||||
@@ -151,8 +151,13 @@ pub fn asNode(self: *CData) *Node {
|
||||
|
||||
pub fn is(self: *CData, comptime T: type) ?*T {
|
||||
inline for (@typeInfo(Type).@"union".fields) |f| {
|
||||
if (f.type == T and @field(Type, f.name) == self._type) {
|
||||
return &@field(self._type, f.name);
|
||||
if (@field(Type, f.name) == self._type) {
|
||||
if (f.type == T) {
|
||||
return &@field(self._type, f.name);
|
||||
}
|
||||
if (f.type == *T) {
|
||||
return @field(self._type, f.name);
|
||||
}
|
||||
}
|
||||
}
|
||||
return null;
|
||||
|
||||
@@ -125,8 +125,8 @@ pub fn whenDefined(self: *CustomElementRegistry, name: []const u8, page: *Page)
|
||||
return local.resolvePromise(definition.constructor);
|
||||
}
|
||||
|
||||
validateName(name) catch |err| {
|
||||
return local.rejectPromise(DOMException.fromError(err) orelse unreachable);
|
||||
validateName(name) catch |err| switch (err) {
|
||||
error.SyntaxError => return local.rejectPromise(.{ .dom_exception = .{ .err = error.SyntaxError } }),
|
||||
};
|
||||
|
||||
const gop = try self._when_defined.getOrPut(page.arena, name);
|
||||
|
||||
@@ -104,13 +104,27 @@ pub fn getMessage(self: *const DOMException) []const u8 {
|
||||
}
|
||||
return switch (self._code) {
|
||||
.none => "",
|
||||
.invalid_character_error => "Invalid Character",
|
||||
.index_size_error => "Index or size is negative or greater than the allowed amount",
|
||||
.syntax_error => "Syntax Error",
|
||||
.not_supported => "Not Supported",
|
||||
.not_found => "Not Found",
|
||||
.hierarchy_error => "Hierarchy Error",
|
||||
else => @tagName(self._code),
|
||||
.hierarchy_error => "The operation would yield an incorrect node tree",
|
||||
.wrong_document_error => "The object is in the wrong document",
|
||||
.invalid_character_error => "The string contains invalid characters",
|
||||
.no_modification_allowed_error => "The object can not be modified",
|
||||
.not_found => "The object can not be found here",
|
||||
.not_supported => "The operation is not supported",
|
||||
.inuse_attribute_error => "The attribute already in use",
|
||||
.invalid_state_error => "The object is in an invalid state",
|
||||
.syntax_error => "The string did not match the expected pattern",
|
||||
.invalid_modification_error => "The object can not be modified in this way",
|
||||
.namespace_error => "The operation is not allowed by Namespaces in XML",
|
||||
.invalid_access_error => "The object does not support the operation or argument",
|
||||
.security_error => "The operation is insecure",
|
||||
.network_error => "A network error occurred",
|
||||
.abort_error => "The operation was aborted",
|
||||
.url_mismatch_error => "The given URL does not match another URL",
|
||||
.quota_exceeded_error => "The quota has been exceeded",
|
||||
.timeout_error => "The operation timed out",
|
||||
.invalid_node_type_error => "The supplied node is incorrect or has an incorrect ancestor for this operation",
|
||||
.data_clone_error => "The object can not be cloned",
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@@ -86,15 +86,15 @@ pub fn parseFromString(
|
||||
var parser = Parser.init(arena, doc_node, page);
|
||||
parser.parseXML(html);
|
||||
|
||||
if (parser.err) |pe| {
|
||||
return pe.err;
|
||||
if (parser.err != null or doc_node.firstChild() == null) {
|
||||
// Return a document with a <parsererror> element per spec.
|
||||
const err_doc = try page._factory.document(XMLDocument{ ._proto = undefined });
|
||||
var err_parser = Parser.init(arena, err_doc.asNode(), page);
|
||||
err_parser.parseXML("<parsererror xmlns=\"http://www.mozilla.org/newlayout/xml/parsererror.xml\">error</parsererror>");
|
||||
return err_doc.asDocument();
|
||||
}
|
||||
|
||||
const first_child = doc_node.firstChild() orelse {
|
||||
// Empty XML or no root element - this is a parse error.
|
||||
// TODO: Return a document with a <parsererror> element per spec.
|
||||
return error.JsException;
|
||||
};
|
||||
const first_child = doc_node.firstChild().?;
|
||||
|
||||
// If first node is a `ProcessingInstruction`, skip it.
|
||||
if (first_child.getNodeType() == 7) {
|
||||
|
||||
@@ -63,6 +63,11 @@ _script_created_parser: ?Parser.Streaming = null,
|
||||
_adopted_style_sheets: ?js.Object.Global = null,
|
||||
_selection: Selection = .init,
|
||||
|
||||
// https://html.spec.whatwg.org/multipage/dynamic-markup-insertion.html#throw-on-dynamic-markup-insertion-counter
|
||||
// Incremented during custom element reactions when parsing. When > 0,
|
||||
// document.open/close/write/writeln must throw InvalidStateError.
|
||||
_throw_on_dynamic_markup_insertion_counter: u32 = 0,
|
||||
|
||||
_on_selectionchange: ?js.Function.Global = null,
|
||||
|
||||
pub fn getOnSelectionChange(self: *Document) ?js.Function.Global {
|
||||
@@ -360,6 +365,11 @@ pub fn createEvent(_: *const Document, event_type: []const u8, page: *Page) !*@i
|
||||
return (try KeyboardEvent.init("", null, page)).asEvent();
|
||||
}
|
||||
|
||||
if (std.mem.eql(u8, normalized, "inputevent")) {
|
||||
const InputEvent = @import("event/InputEvent.zig");
|
||||
return (try InputEvent.init("", null, page)).asEvent();
|
||||
}
|
||||
|
||||
if (std.mem.eql(u8, normalized, "mouseevent") or std.mem.eql(u8, normalized, "mouseevents")) {
|
||||
const MouseEvent = @import("event/MouseEvent.zig");
|
||||
return (try MouseEvent.init("", null, page)).asEvent();
|
||||
@@ -641,6 +651,10 @@ pub fn write(self: *Document, text: []const []const u8, page: *Page) !void {
|
||||
return error.InvalidStateError;
|
||||
}
|
||||
|
||||
if (self._throw_on_dynamic_markup_insertion_counter > 0) {
|
||||
return error.InvalidStateError;
|
||||
}
|
||||
|
||||
const html = blk: {
|
||||
var joined: std.ArrayList(u8) = .empty;
|
||||
for (text) |str| {
|
||||
@@ -723,6 +737,10 @@ pub fn open(self: *Document, page: *Page) !*Document {
|
||||
return error.InvalidStateError;
|
||||
}
|
||||
|
||||
if (self._throw_on_dynamic_markup_insertion_counter > 0) {
|
||||
return error.InvalidStateError;
|
||||
}
|
||||
|
||||
if (page._load_state == .parsing) {
|
||||
return self;
|
||||
}
|
||||
@@ -761,6 +779,10 @@ pub fn close(self: *Document, page: *Page) !void {
|
||||
return error.InvalidStateError;
|
||||
}
|
||||
|
||||
if (self._throw_on_dynamic_markup_insertion_counter > 0) {
|
||||
return error.InvalidStateError;
|
||||
}
|
||||
|
||||
if (self._script_created_parser == null) {
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -1580,6 +1580,36 @@ pub const Tag = enum {
|
||||
else => tag,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn isBlock(self: Tag) bool {
|
||||
// zig fmt: off
|
||||
return switch (self) {
|
||||
// Semantic Layout
|
||||
.article, .aside, .footer, .header, .main, .nav, .section,
|
||||
// Grouping / Containers
|
||||
.address, .div, .fieldset, .figure, .p,
|
||||
// Headings
|
||||
.h1, .h2, .h3, .h4, .h5, .h6,
|
||||
// Lists
|
||||
.dl, .ol, .ul,
|
||||
// Preformatted / Quotes
|
||||
.blockquote, .pre,
|
||||
// Tables
|
||||
.table,
|
||||
// Other
|
||||
.hr,
|
||||
=> true,
|
||||
else => false,
|
||||
};
|
||||
// zig fmt: on
|
||||
}
|
||||
|
||||
pub fn isMetadata(self: Tag) bool {
|
||||
return switch (self) {
|
||||
.base, .head, .link, .meta, .noscript, .script, .style, .template, .title => true,
|
||||
else => false,
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
pub const JsApi = struct {
|
||||
|
||||
@@ -44,6 +44,7 @@ pub const Type = union(enum) {
|
||||
screen_orientation: *@import("Screen.zig").Orientation,
|
||||
visual_viewport: *@import("VisualViewport.zig"),
|
||||
file_reader: *@import("FileReader.zig"),
|
||||
font_face_set: *@import("css/FontFaceSet.zig"),
|
||||
};
|
||||
|
||||
pub fn init(page: *Page) !*EventTarget {
|
||||
@@ -139,6 +140,7 @@ pub fn format(self: *EventTarget, writer: *std.Io.Writer) !void {
|
||||
.screen_orientation => writer.writeAll("<ScreenOrientation>"),
|
||||
.visual_viewport => writer.writeAll("<VisualViewport>"),
|
||||
.file_reader => writer.writeAll("<FileReader>"),
|
||||
.font_face_set => writer.writeAll("<FontFaceSet>"),
|
||||
};
|
||||
}
|
||||
|
||||
@@ -157,6 +159,7 @@ pub fn toString(self: *EventTarget) []const u8 {
|
||||
.screen_orientation => return "[object ScreenOrientation]",
|
||||
.visual_viewport => return "[object VisualViewport]",
|
||||
.file_reader => return "[object FileReader]",
|
||||
.font_face_set => return "[object FontFaceSet]",
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
28
src/browser/webapi/FileList.zig
Normal file
28
src/browser/webapi/FileList.zig
Normal file
@@ -0,0 +1,28 @@
|
||||
const js = @import("../js/js.zig");
|
||||
|
||||
const FileList = @This();
|
||||
|
||||
/// Padding to avoid zero-size struct, which causes identity_map pointer collisions.
|
||||
_pad: bool = false,
|
||||
|
||||
pub fn getLength(_: *const FileList) u32 {
|
||||
return 0;
|
||||
}
|
||||
|
||||
pub fn item(_: *const FileList, _: u32) ?*@import("File.zig") {
|
||||
return null;
|
||||
}
|
||||
|
||||
pub const JsApi = struct {
|
||||
pub const bridge = js.Bridge(FileList);
|
||||
|
||||
pub const Meta = struct {
|
||||
pub const name = "FileList";
|
||||
pub const prototype_chain = bridge.prototypeChain();
|
||||
pub var class_id: bridge.ClassId = undefined;
|
||||
pub const empty_with_no_proto = true;
|
||||
};
|
||||
|
||||
pub const length = bridge.accessor(FileList.getLength, null, .{});
|
||||
pub const item = bridge.function(FileList.item, .{});
|
||||
};
|
||||
@@ -52,7 +52,7 @@ pub const ConstructorSettings = struct {
|
||||
/// ```
|
||||
///
|
||||
/// We currently support only the first 2.
|
||||
pub fn constructor(
|
||||
pub fn init(
|
||||
width: u32,
|
||||
height: u32,
|
||||
maybe_settings: ?ConstructorSettings,
|
||||
@@ -106,7 +106,7 @@ pub const JsApi = struct {
|
||||
pub var class_id: bridge.ClassId = undefined;
|
||||
};
|
||||
|
||||
pub const constructor = bridge.constructor(ImageData.constructor, .{ .dom_exception = true });
|
||||
pub const constructor = bridge.constructor(ImageData.init, .{ .dom_exception = true });
|
||||
|
||||
pub const colorSpace = bridge.property("srgb", .{ .template = false, .readonly = true });
|
||||
pub const pixelFormat = bridge.property("rgba-unorm8", .{ .template = false, .readonly = true });
|
||||
|
||||
@@ -93,12 +93,12 @@ pub fn init(callback: js.Function.Temp, options: ?ObserverInit, page: *Page) !*I
|
||||
}
|
||||
|
||||
pub fn deinit(self: *IntersectionObserver, shutdown: bool, session: *Session) void {
|
||||
self._callback.release();
|
||||
if ((comptime IS_DEBUG) and !shutdown) {
|
||||
std.debug.assert(self._observing.items.len == 0);
|
||||
if (shutdown) {
|
||||
self._callback.release();
|
||||
session.releaseArena(self._arena);
|
||||
} else if (comptime IS_DEBUG) {
|
||||
std.debug.assert(false);
|
||||
}
|
||||
|
||||
session.releaseArena(self._arena);
|
||||
}
|
||||
|
||||
pub fn observe(self: *IntersectionObserver, target: *Element, page: *Page) !void {
|
||||
@@ -111,7 +111,6 @@ pub fn observe(self: *IntersectionObserver, target: *Element, page: *Page) !void
|
||||
|
||||
// Register with page if this is our first observation
|
||||
if (self._observing.items.len == 0) {
|
||||
page.js.strongRef(self);
|
||||
try page.registerIntersectionObserver(self);
|
||||
}
|
||||
|
||||
@@ -146,22 +145,18 @@ pub fn unobserve(self: *IntersectionObserver, target: *Element, page: *Page) voi
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (self._observing.items.len == 0) {
|
||||
page.js.safeWeakRef(self);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn disconnect(self: *IntersectionObserver, page: *Page) void {
|
||||
page.unregisterIntersectionObserver(self);
|
||||
self._observing.clearRetainingCapacity();
|
||||
self._previous_states.clearRetainingCapacity();
|
||||
|
||||
for (self._pending_entries.items) |entry| {
|
||||
entry.deinit(false, page._session);
|
||||
}
|
||||
self._pending_entries.clearRetainingCapacity();
|
||||
page.js.safeWeakRef(self);
|
||||
|
||||
self._observing.clearRetainingCapacity();
|
||||
page.unregisterIntersectionObserver(self);
|
||||
}
|
||||
|
||||
pub fn takeRecords(self: *IntersectionObserver, page: *Page) ![]*IntersectionObserverEntry {
|
||||
@@ -363,7 +358,6 @@ pub const JsApi = struct {
|
||||
pub const name = "IntersectionObserver";
|
||||
pub const prototype_chain = bridge.prototypeChain();
|
||||
pub var class_id: bridge.ClassId = undefined;
|
||||
pub const weak = true;
|
||||
pub const finalizer = bridge.finalizer(IntersectionObserver.deinit);
|
||||
};
|
||||
|
||||
|
||||
@@ -86,12 +86,12 @@ pub fn init(callback: js.Function.Temp, page: *Page) !*MutationObserver {
|
||||
}
|
||||
|
||||
pub fn deinit(self: *MutationObserver, shutdown: bool, session: *Session) void {
|
||||
self._callback.release();
|
||||
if ((comptime IS_DEBUG) and !shutdown) {
|
||||
std.debug.assert(self._observing.items.len == 0);
|
||||
if (shutdown) {
|
||||
self._callback.release();
|
||||
session.releaseArena(self._arena);
|
||||
} else if (comptime IS_DEBUG) {
|
||||
std.debug.assert(false);
|
||||
}
|
||||
|
||||
session.releaseArena(self._arena);
|
||||
}
|
||||
|
||||
pub fn observe(self: *MutationObserver, target: *Node, options: ObserveOptions, page: *Page) !void {
|
||||
@@ -158,7 +158,6 @@ pub fn observe(self: *MutationObserver, target: *Node, options: ObserveOptions,
|
||||
|
||||
// Register with page if this is our first observation
|
||||
if (self._observing.items.len == 0) {
|
||||
page.js.strongRef(self);
|
||||
try page.registerMutationObserver(self);
|
||||
}
|
||||
|
||||
@@ -169,13 +168,13 @@ pub fn observe(self: *MutationObserver, target: *Node, options: ObserveOptions,
|
||||
}
|
||||
|
||||
pub fn disconnect(self: *MutationObserver, page: *Page) void {
|
||||
page.unregisterMutationObserver(self);
|
||||
self._observing.clearRetainingCapacity();
|
||||
for (self._pending_records.items) |record| {
|
||||
record.deinit(false, page._session);
|
||||
}
|
||||
self._pending_records.clearRetainingCapacity();
|
||||
page.js.safeWeakRef(self);
|
||||
|
||||
self._observing.clearRetainingCapacity();
|
||||
page.unregisterMutationObserver(self);
|
||||
}
|
||||
|
||||
pub fn takeRecords(self: *MutationObserver, page: *Page) ![]*MutationRecord {
|
||||
@@ -441,7 +440,6 @@ pub const JsApi = struct {
|
||||
pub const name = "MutationObserver";
|
||||
pub const prototype_chain = bridge.prototypeChain();
|
||||
pub var class_id: bridge.ClassId = undefined;
|
||||
pub const weak = true;
|
||||
pub const finalizer = bridge.finalizer(MutationObserver.deinit);
|
||||
};
|
||||
|
||||
|
||||
@@ -18,13 +18,21 @@
|
||||
|
||||
const std = @import("std");
|
||||
const builtin = @import("builtin");
|
||||
|
||||
const log = @import("../../log.zig");
|
||||
|
||||
const js = @import("../js/js.zig");
|
||||
const Page = @import("../Page.zig");
|
||||
|
||||
const PluginArray = @import("PluginArray.zig");
|
||||
const Permissions = @import("Permissions.zig");
|
||||
const StorageManager = @import("StorageManager.zig");
|
||||
|
||||
const Navigator = @This();
|
||||
_pad: bool = false,
|
||||
_plugins: PluginArray = .{},
|
||||
_permissions: Permissions = .{},
|
||||
_storage: StorageManager = .{},
|
||||
|
||||
pub const init: Navigator = .{};
|
||||
|
||||
@@ -55,6 +63,19 @@ pub fn getPlugins(self: *Navigator) *PluginArray {
|
||||
return &self._plugins;
|
||||
}
|
||||
|
||||
pub fn getPermissions(self: *Navigator) *Permissions {
|
||||
return &self._permissions;
|
||||
}
|
||||
|
||||
pub fn getStorage(self: *Navigator) *StorageManager {
|
||||
return &self._storage;
|
||||
}
|
||||
|
||||
pub fn getBattery(_: *const Navigator, page: *Page) !js.Promise {
|
||||
log.info(.not_implemented, "navigator.getBattery", .{});
|
||||
return page.js.local.?.rejectErrorPromise(.{ .dom_exception = .{ .err = error.NotSupported } });
|
||||
}
|
||||
|
||||
pub fn registerProtocolHandler(_: *const Navigator, scheme: []const u8, url: [:0]const u8, page: *const Page) !void {
|
||||
try validateProtocolHandlerScheme(scheme);
|
||||
try validateProtocolHandlerURL(url, page);
|
||||
@@ -144,6 +165,7 @@ pub const JsApi = struct {
|
||||
pub const onLine = bridge.property(true, .{ .template = false });
|
||||
pub const cookieEnabled = bridge.property(true, .{ .template = false });
|
||||
pub const hardwareConcurrency = bridge.property(4, .{ .template = false });
|
||||
pub const deviceMemory = bridge.property(@as(f64, 8.0), .{ .template = false });
|
||||
pub const maxTouchPoints = bridge.property(0, .{ .template = false });
|
||||
pub const vendor = bridge.property("", .{ .template = false });
|
||||
pub const product = bridge.property("Gecko", .{ .template = false });
|
||||
@@ -156,4 +178,12 @@ pub const JsApi = struct {
|
||||
|
||||
// Methods
|
||||
pub const javaEnabled = bridge.function(Navigator.javaEnabled, .{});
|
||||
pub const getBattery = bridge.function(Navigator.getBattery, .{});
|
||||
pub const permissions = bridge.accessor(Navigator.getPermissions, null, .{});
|
||||
pub const storage = bridge.accessor(Navigator.getStorage, null, .{});
|
||||
};
|
||||
|
||||
const testing = @import("../../testing.zig");
|
||||
test "WebApi: Navigator" {
|
||||
try testing.htmlRunner("navigator", .{});
|
||||
}
|
||||
|
||||
@@ -285,6 +285,19 @@ pub fn getTextContentAlloc(self: *Node, allocator: Allocator) error{WriteFailed}
|
||||
return data[0 .. data.len - 1 :0];
|
||||
}
|
||||
|
||||
/// Returns the "child text content" which is the concatenation of the data
|
||||
/// of all the Text node children of the node, in tree order.
|
||||
/// This differs from textContent which includes all descendant text.
|
||||
/// See: https://dom.spec.whatwg.org/#concept-child-text-content
|
||||
pub fn getChildTextContent(self: *Node, writer: *std.Io.Writer) error{WriteFailed}!void {
|
||||
var it = self.childrenIterator();
|
||||
while (it.next()) |child| {
|
||||
if (child.is(CData.Text)) |text| {
|
||||
try writer.writeAll(text._proto._data.str());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn setTextContent(self: *Node, data: []const u8, page: *Page) !void {
|
||||
switch (self._type) {
|
||||
.element => |el| {
|
||||
@@ -493,6 +506,11 @@ pub fn ownerDocument(self: *const Node, page: *const Page) ?*Document {
|
||||
return page.document;
|
||||
}
|
||||
|
||||
pub fn ownerPage(self: *const Node, default: *Page) *Page {
|
||||
const doc = self.ownerDocument(default) orelse return default;
|
||||
return doc._page orelse default;
|
||||
}
|
||||
|
||||
pub fn isSameDocumentAs(self: *const Node, other: *const Node, page: *const Page) bool {
|
||||
// Get the root document for each node
|
||||
const self_doc = if (self._type == .document) self._type.document else self.ownerDocument(page);
|
||||
|
||||
94
src/browser/webapi/Permissions.zig
Normal file
94
src/browser/webapi/Permissions.zig
Normal file
@@ -0,0 +1,94 @@
|
||||
// Copyright (C) 2023-2025 Lightpanda (Selecy SAS)
|
||||
//
|
||||
// Francis Bouvier <francis@lightpanda.io>
|
||||
// Pierre Tachoire <pierre@lightpanda.io>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU Affero General Public License as
|
||||
// published by the Free Software Foundation, either version 3 of the
|
||||
// License, or (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU Affero General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU Affero General Public License
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
const std = @import("std");
|
||||
const js = @import("../js/js.zig");
|
||||
const Page = @import("../Page.zig");
|
||||
const Session = @import("../Session.zig");
|
||||
|
||||
const Allocator = std.mem.Allocator;
|
||||
|
||||
pub fn registerTypes() []const type {
|
||||
return &.{ Permissions, PermissionStatus };
|
||||
}
|
||||
|
||||
const Permissions = @This();
|
||||
|
||||
// Padding to avoid zero-size struct pointer collisions
|
||||
_pad: bool = false,
|
||||
|
||||
const QueryDescriptor = struct {
|
||||
name: []const u8,
|
||||
};
|
||||
// We always report 'prompt' (the default safe value — neither granted nor denied).
|
||||
pub fn query(_: *const Permissions, qd: QueryDescriptor, page: *Page) !js.Promise {
|
||||
const arena = try page.getArena(.{ .debug = "PermissionStatus" });
|
||||
errdefer page.releaseArena(arena);
|
||||
|
||||
const status = try arena.create(PermissionStatus);
|
||||
status.* = .{
|
||||
._arena = arena,
|
||||
._state = "prompt",
|
||||
._name = try arena.dupe(u8, qd.name),
|
||||
};
|
||||
return page.js.local.?.resolvePromise(status);
|
||||
}
|
||||
|
||||
const PermissionStatus = struct {
|
||||
_arena: Allocator,
|
||||
_name: []const u8,
|
||||
_state: []const u8,
|
||||
|
||||
pub fn deinit(self: *PermissionStatus, _: bool, session: *Session) void {
|
||||
session.releaseArena(self._arena);
|
||||
}
|
||||
|
||||
fn getName(self: *const PermissionStatus) []const u8 {
|
||||
return self._name;
|
||||
}
|
||||
|
||||
fn getState(self: *const PermissionStatus) []const u8 {
|
||||
return self._state;
|
||||
}
|
||||
|
||||
pub const JsApi = struct {
|
||||
pub const bridge = js.Bridge(PermissionStatus);
|
||||
pub const Meta = struct {
|
||||
pub const name = "PermissionStatus";
|
||||
pub const prototype_chain = bridge.prototypeChain();
|
||||
pub var class_id: bridge.ClassId = undefined;
|
||||
pub const weak = true;
|
||||
pub const finalizer = bridge.finalizer(PermissionStatus.deinit);
|
||||
};
|
||||
pub const name = bridge.accessor(getName, null, .{});
|
||||
pub const state = bridge.accessor(getState, null, .{});
|
||||
};
|
||||
};
|
||||
|
||||
pub const JsApi = struct {
|
||||
pub const bridge = js.Bridge(Permissions);
|
||||
|
||||
pub const Meta = struct {
|
||||
pub const name = "Permissions";
|
||||
pub const prototype_chain = bridge.prototypeChain();
|
||||
pub var class_id: bridge.ClassId = undefined;
|
||||
pub const empty_with_no_proto = true;
|
||||
};
|
||||
|
||||
pub const query = bridge.function(Permissions.query, .{ .dom_exception = true });
|
||||
};
|
||||
@@ -21,22 +21,31 @@ const String = @import("../../string.zig").String;
|
||||
|
||||
const js = @import("../js/js.zig");
|
||||
const Page = @import("../Page.zig");
|
||||
const Session = @import("../Session.zig");
|
||||
|
||||
const Node = @import("Node.zig");
|
||||
const DocumentFragment = @import("DocumentFragment.zig");
|
||||
const AbstractRange = @import("AbstractRange.zig");
|
||||
const DOMRect = @import("DOMRect.zig");
|
||||
|
||||
const Allocator = std.mem.Allocator;
|
||||
|
||||
const Range = @This();
|
||||
|
||||
_proto: *AbstractRange,
|
||||
|
||||
pub fn asAbstractRange(self: *Range) *AbstractRange {
|
||||
return self._proto;
|
||||
pub fn init(page: *Page) !*Range {
|
||||
const arena = try page.getArena(.{ .debug = "Range" });
|
||||
errdefer page.releaseArena(arena);
|
||||
return page._factory.abstractRange(arena, Range{ ._proto = undefined }, page);
|
||||
}
|
||||
|
||||
pub fn init(page: *Page) !*Range {
|
||||
return page._factory.abstractRange(Range{ ._proto = undefined }, page);
|
||||
pub fn deinit(self: *Range, shutdown: bool, session: *Session) void {
|
||||
self._proto.deinit(shutdown, session);
|
||||
}
|
||||
|
||||
pub fn asAbstractRange(self: *Range) *AbstractRange {
|
||||
return self._proto;
|
||||
}
|
||||
|
||||
pub fn setStart(self: *Range, node: *Node, offset: u32) !void {
|
||||
@@ -309,7 +318,10 @@ pub fn intersectsNode(self: *const Range, node: *Node) bool {
|
||||
}
|
||||
|
||||
pub fn cloneRange(self: *const Range, page: *Page) !*Range {
|
||||
const clone = try page._factory.abstractRange(Range{ ._proto = undefined }, page);
|
||||
const arena = try page.getArena(.{ .debug = "Range.clone" });
|
||||
errdefer page.releaseArena(arena);
|
||||
|
||||
const clone = try page._factory.abstractRange(arena, Range{ ._proto = undefined }, page);
|
||||
clone._proto._end_offset = self._proto._end_offset;
|
||||
clone._proto._start_offset = self._proto._start_offset;
|
||||
clone._proto._end_container = self._proto._end_container;
|
||||
@@ -687,6 +699,8 @@ pub const JsApi = struct {
|
||||
pub const name = "Range";
|
||||
pub const prototype_chain = bridge.prototypeChain();
|
||||
pub var class_id: bridge.ClassId = undefined;
|
||||
pub const weak = true;
|
||||
pub const finalizer = bridge.finalizer(Range.deinit);
|
||||
};
|
||||
|
||||
// Constants for compareBoundaryPoints
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user