mirror of
https://github.com/lightpanda-io/browser.git
synced 2026-03-28 15:40:04 +00:00
Compare commits
420 Commits
ci-web-bot
...
http-cache
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c9b8632b50 | ||
|
|
41f154b042 | ||
|
|
666b1d1016 | ||
|
|
1559346a67 | ||
|
|
0c97ff2cab | ||
|
|
2a085dc0c2 | ||
|
|
26ba3b52ca | ||
|
|
076c1942a4 | ||
|
|
def6e564d2 | ||
|
|
368ac00573 | ||
|
|
756373e0ba | ||
|
|
e596e0c310 | ||
|
|
4ccf150ed0 | ||
|
|
dcb08b97ad | ||
|
|
c2afb2fb17 | ||
|
|
8a9007b939 | ||
|
|
cd31e68212 | ||
|
|
46d25652e0 | ||
|
|
6972a3e130 | ||
|
|
8f13ace8a2 | ||
|
|
31921e4890 | ||
|
|
3abf026f77 | ||
|
|
3f0b17c755 | ||
|
|
a97ce89362 | ||
|
|
9da1b96a14 | ||
|
|
6bd5f1d013 | ||
|
|
3f2a4ada53 | ||
|
|
edd2c661f4 | ||
|
|
9e902d176e | ||
|
|
cf46f0097a | ||
|
|
d94fd2a43b | ||
|
|
8c5e737669 | ||
|
|
fb29a1c5bf | ||
|
|
a2e59af44c | ||
|
|
00c962bdd8 | ||
|
|
1fa87442b8 | ||
|
|
ac5400696a | ||
|
|
5062273b7a | ||
|
|
9c2393351d | ||
|
|
f0cfe3ffc8 | ||
|
|
615fcffb99 | ||
|
|
13b746f9e4 | ||
|
|
e90fce4c55 | ||
|
|
59175437b5 | ||
|
|
e950384b9b | ||
|
|
78440350dc | ||
|
|
f435297949 | ||
|
|
54d1563cf3 | ||
|
|
f36499b806 | ||
|
|
fa1dd5237d | ||
|
|
964fa0a8aa | ||
|
|
db01158d2d | ||
|
|
e997f8317e | ||
|
|
a88c21cdb5 | ||
|
|
7a7c4b9f49 | ||
|
|
edd0c5c83f | ||
|
|
c6861829c3 | ||
|
|
e14c8b3025 | ||
|
|
5bc00c595c | ||
|
|
db5fb40de0 | ||
|
|
4e6a357e6e | ||
|
|
6cf515151d | ||
|
|
bf6e4cf3a6 | ||
|
|
60936baa96 | ||
|
|
c29f72a7e8 | ||
|
|
d4427e4370 | ||
|
|
b85ec04175 | ||
|
|
da05ba0eb7 | ||
|
|
414a68abeb | ||
|
|
52455b732b | ||
|
|
ba71268eb3 | ||
|
|
694aac5ce8 | ||
|
|
cbab0b712a | ||
|
|
1aee3db521 | ||
|
|
f634c9843d | ||
|
|
e1e45d1c5d | ||
|
|
ff288c8aa2 | ||
|
|
e1b14a6833 | ||
|
|
015edc3848 | ||
|
|
bd2406f803 | ||
|
|
3c29e7dbd4 | ||
|
|
586413357e | ||
|
|
9a055a61a6 | ||
|
|
5fb561dc9c | ||
|
|
b14ae02548 | ||
|
|
51fb08e6aa | ||
|
|
a6d699ad5d | ||
|
|
8372b45cc5 | ||
|
|
1739ae6b9a | ||
|
|
ba62150f7a | ||
|
|
8143a61955 | ||
|
|
e16c479781 | ||
|
|
c0c4e26d63 | ||
|
|
b252aa71d0 | ||
|
|
9ef8d9c189 | ||
|
|
9f27416603 | ||
|
|
0729f4a03a | ||
|
|
21f7b95db9 | ||
|
|
4125a5aa1e | ||
|
|
6d0dc6cb1e | ||
|
|
0675c23217 | ||
|
|
d0e6a1f5bb | ||
|
|
91afe08235 | ||
|
|
041d9d41fb | ||
|
|
7009fb5899 | ||
|
|
d2003c7c9a | ||
|
|
ce002b999c | ||
|
|
5b1056862a | ||
|
|
cc4ac99b4a | ||
|
|
46df341506 | ||
|
|
b698e2d078 | ||
|
|
5cc5e513dd | ||
|
|
e048b0372f | ||
|
|
d7aaa1c870 | ||
|
|
463aac9b59 | ||
|
|
d9cdd78138 | ||
|
|
44a83c0e1c | ||
|
|
96f24a2662 | ||
|
|
5d2801c652 | ||
|
|
deb08b7880 | ||
|
|
96e5054ffc | ||
|
|
c9753a690d | ||
|
|
27aaf46630 | ||
|
|
84190e1e06 | ||
|
|
b0b1f755ea | ||
|
|
fcf1d30c77 | ||
|
|
3c532e5aef | ||
|
|
3efcb2705d | ||
|
|
c25f389e91 | ||
|
|
533f4075a3 | ||
|
|
f508d37426 | ||
|
|
548c6eeb7a | ||
|
|
c8265f4807 | ||
|
|
a74e46debf | ||
|
|
1ceaabe69f | ||
|
|
91a2441ed8 | ||
|
|
2ecbc833a9 | ||
|
|
dac456d98c | ||
|
|
422320d9ac | ||
|
|
18b635936c | ||
|
|
7b2895ef08 | ||
|
|
b09e9f7398 | ||
|
|
ac651328c3 | ||
|
|
0380df1cb4 | ||
|
|
21421d5b53 | ||
|
|
80c309aa69 | ||
|
|
f5bc7310b1 | ||
|
|
21e9967a8a | ||
|
|
32f450f803 | ||
|
|
1972142703 | ||
|
|
b10d866e4b | ||
|
|
b373fb4a42 | ||
|
|
ddd34dc57b | ||
|
|
265c5aba2e | ||
|
|
21fc6d1cf6 | ||
|
|
1a7fe6129c | ||
|
|
37462a16c5 | ||
|
|
323ec0046c | ||
|
|
dc7c6984fb | ||
|
|
92f7248a16 | ||
|
|
1ec3e156fb | ||
|
|
1121bed49b | ||
|
|
0eb43fb530 | ||
|
|
1f50dc38c3 | ||
|
|
a9d044ec10 | ||
|
|
1bdf464ef2 | ||
|
|
a70da0d176 | ||
|
|
8c52b8357c | ||
|
|
0243c6b450 | ||
|
|
f7071447cb | ||
|
|
c038bfafa1 | ||
|
|
4d60f56e66 | ||
|
|
56d3cf51e8 | ||
|
|
3013e3a9e6 | ||
|
|
fe9b2e672b | ||
|
|
3e9fa4ca47 | ||
|
|
a2e66f85a1 | ||
|
|
a9b9cf14c3 | ||
|
|
d4b941cf30 | ||
|
|
4b6bf29b83 | ||
|
|
a8b147dfc0 | ||
|
|
65627c1296 | ||
|
|
3dcdaa0a9b | ||
|
|
5bc00045c7 | ||
|
|
93ea95af24 | ||
|
|
f754773bf6 | ||
|
|
42bb2f3c58 | ||
|
|
68337a6989 | ||
|
|
bf6dbedbe4 | ||
|
|
a204f40968 | ||
|
|
fe3faa0a5a | ||
|
|
39d5a25258 | ||
|
|
f4044230fd | ||
|
|
4d6d8d9a83 | ||
|
|
c4176a282f | ||
|
|
1352839472 | ||
|
|
535128da71 | ||
|
|
099550dddc | ||
|
|
7fe26bc966 | ||
|
|
cc6587d6e5 | ||
|
|
8b310ce993 | ||
|
|
be8ba53263 | ||
|
|
043d48d1c7 | ||
|
|
e8fe80189b | ||
|
|
0e48f317cb | ||
|
|
867745c71d | ||
|
|
a1a7919f74 | ||
|
|
c3de47de90 | ||
|
|
dd35bdfeb4 | ||
|
|
07c3aec34f | ||
|
|
bce3e8f7c6 | ||
|
|
ba9777e754 | ||
|
|
7040801dfa | ||
|
|
4f8a6b62b8 | ||
|
|
d3dad772cf | ||
|
|
944b672fea | ||
|
|
b1c54aa92d | ||
|
|
4ca6f43aeb | ||
|
|
f09e66e1cc | ||
|
|
8b7a4ceaaa | ||
|
|
51e90f5971 | ||
|
|
8db64772b7 | ||
|
|
bf0be60b89 | ||
|
|
172481dd72 | ||
|
|
c6c0492c33 | ||
|
|
fca29a8be2 | ||
|
|
d365240f91 | ||
|
|
1ed61d4783 | ||
|
|
a1fb11ae33 | ||
|
|
9971816711 | ||
|
|
c38d9a3098 | ||
|
|
02198de455 | ||
|
|
6cd8202310 | ||
|
|
4d7b7d1d42 | ||
|
|
e4e21f52b5 | ||
|
|
84e1cd08b6 | ||
|
|
7796753e7a | ||
|
|
880205e874 | ||
|
|
1b96087b08 | ||
|
|
aa246c9e9f | ||
|
|
f1d311d232 | ||
|
|
e4f7fca10d | ||
|
|
3d6d669a50 | ||
|
|
c4097e2b7e | ||
|
|
619d27c773 | ||
|
|
1522c90294 | ||
|
|
794e15ce21 | ||
|
|
34771b835e | ||
|
|
8df51b232a | ||
|
|
13b8ce18b2 | ||
|
|
448386e52b | ||
|
|
bf07659dd5 | ||
|
|
16dfad0895 | ||
|
|
f61449c31c | ||
|
|
60699229ca | ||
|
|
e1dd26b307 | ||
|
|
7d835ef99d | ||
|
|
0971df4dfc | ||
|
|
9fb57fbac0 | ||
|
|
48ead90850 | ||
|
|
cc88bb7feb | ||
|
|
a725e2aa6a | ||
|
|
ee637c3662 | ||
|
|
65d7a39554 | ||
|
|
37735b1caa | ||
|
|
c8f8d79f45 | ||
|
|
1866e7141e | ||
|
|
feccc9f5ce | ||
|
|
af803da5c8 | ||
|
|
25c89c9940 | ||
|
|
697a2834c2 | ||
|
|
056b8bb536 | ||
|
|
625d424199 | ||
|
|
5329d05005 | ||
|
|
d2c55da6c9 | ||
|
|
2e6dd3edfe | ||
|
|
a95b4ea7b9 | ||
|
|
c891eff664 | ||
|
|
68564ca714 | ||
|
|
ca931a11be | ||
|
|
6c7272061c | ||
|
|
4f262e5bed | ||
|
|
ff26b0d5a4 | ||
|
|
a6ccc72d15 | ||
|
|
487ee18358 | ||
|
|
dc3d2e9790 | ||
|
|
f6d0e484b0 | ||
|
|
4cea9aba3c | ||
|
|
7348a68c84 | ||
|
|
7d90c3f582 | ||
|
|
2a103fc94a | ||
|
|
753391b7e2 | ||
|
|
94ce5edd20 | ||
|
|
3626f70d3e | ||
|
|
24cc24ed50 | ||
|
|
dd29ba4664 | ||
|
|
7927ad8fcf | ||
|
|
d23453ce45 | ||
|
|
a22040efa9 | ||
|
|
ba3da32ce6 | ||
|
|
9d2ba52160 | ||
|
|
e610506df4 | ||
|
|
dd91d28bfa | ||
|
|
1ebf7460fe | ||
|
|
8c930e5c33 | ||
|
|
4fb2f7474c | ||
|
|
5301f79989 | ||
|
|
6a7f7fdf15 | ||
|
|
11fb5f990e | ||
|
|
d1ee0442ea | ||
|
|
62f31ea24a | ||
|
|
f4ca5313e6 | ||
|
|
064e7b404b | ||
|
|
dfd90bd564 | ||
|
|
55508eb418 | ||
|
|
2a4fa4ed6f | ||
|
|
cf7c9f6372 | ||
|
|
ec68c3207d | ||
|
|
ecf140f3d6 | ||
|
|
13f73b7b87 | ||
|
|
12c5bcd24f | ||
|
|
56f47ee574 | ||
|
|
74f0436ac7 | ||
|
|
22d31b1527 | ||
|
|
9f3bca771a | ||
|
|
4e16d90a81 | ||
|
|
d669d5c153 | ||
|
|
343d985e96 | ||
|
|
dc3958356d | ||
|
|
c4e85c3277 | ||
|
|
89e46376dc | ||
|
|
8eeb34dba8 | ||
|
|
7171305972 | ||
|
|
2b0c223425 | ||
|
|
8f960ab0f7 | ||
|
|
60350efa10 | ||
|
|
687f577562 | ||
|
|
8e59ce9e9f | ||
|
|
33d75354a2 | ||
|
|
a318c6263d | ||
|
|
0e4a65efb7 | ||
|
|
b88134cf04 | ||
|
|
2aaa212dbc | ||
|
|
1e37990938 | ||
|
|
a417c73bf7 | ||
|
|
37c34351ee | ||
|
|
8672232ee2 | ||
|
|
83ba974f94 | ||
|
|
85ebbe8759 | ||
|
|
61cba3f6eb | ||
|
|
3ad10ff8d0 | ||
|
|
183643547b | ||
|
|
5568340b9a | ||
|
|
1399bd3065 | ||
|
|
9172e16e80 | ||
|
|
3e5f602396 | ||
|
|
3c97332fd8 | ||
|
|
379a3f27b8 | ||
|
|
ecec932a47 | ||
|
|
e239f69f69 | ||
|
|
c77cb317c4 | ||
|
|
034b089433 | ||
|
|
c0db96482c | ||
|
|
ffa8fa0a6f | ||
|
|
7e1d459a2d | ||
|
|
71c4fce87f | ||
|
|
e91da78ebb | ||
|
|
8adad6fa61 | ||
|
|
b47004bb7c | ||
|
|
08a7fb4de0 | ||
|
|
c17a9b11cc | ||
|
|
245a92a644 | ||
|
|
6b313946fe | ||
|
|
4586fb1d13 | ||
|
|
aa051434cb | ||
|
|
c3a53752e7 | ||
|
|
f3e1204fa1 | ||
|
|
0a5eb93565 | ||
|
|
b8a3135835 | ||
|
|
330dfccb89 | ||
|
|
d80e926015 | ||
|
|
2a2b067633 | ||
|
|
be73c14395 | ||
|
|
9cd5afe5b6 | ||
|
|
1cb5d26344 | ||
|
|
ec9a2d8155 | ||
|
|
4ba40f2295 | ||
|
|
b674c2e448 | ||
|
|
0227afffc8 | ||
|
|
b8139a6e83 | ||
|
|
bde5fc9264 | ||
|
|
6a421a1d96 | ||
|
|
4f55a0f1d0 | ||
|
|
3de55899fa | ||
|
|
ae4ad713ec | ||
|
|
21313adf9c | ||
|
|
9c1293ca45 | ||
|
|
1cb1e6b680 | ||
|
|
ed6ddeaa4c | ||
|
|
de08a89e6b | ||
|
|
dd42ef1920 | ||
|
|
dd192be689 | ||
|
|
52250ed10e | ||
|
|
4a26cd8d68 | ||
|
|
2ca972c3c8 | ||
|
|
74c0d55a6c | ||
|
|
3271e1464e | ||
|
|
cabd62b48f | ||
|
|
58c2355c8b | ||
|
|
bfe2065b9f | ||
|
|
9332b1355e | ||
|
|
45705a3e29 | ||
|
|
e0f0b9f210 | ||
|
|
f2832447d4 | ||
|
|
471ba5baf6 | ||
|
|
248851701f | ||
|
|
0f46277b1f | ||
|
|
679e703754 | ||
|
|
768c3a533b | ||
|
|
9c7ecf221e |
4
.github/actions/install/action.yml
vendored
4
.github/actions/install/action.yml
vendored
@@ -13,7 +13,7 @@ inputs:
|
||||
zig-v8:
|
||||
description: 'zig v8 version to install'
|
||||
required: false
|
||||
default: 'v0.3.1'
|
||||
default: 'v0.3.4'
|
||||
v8:
|
||||
description: 'v8 version to install'
|
||||
required: false
|
||||
@@ -46,7 +46,7 @@ runs:
|
||||
|
||||
- name: Cache v8
|
||||
id: cache-v8
|
||||
uses: actions/cache@v4
|
||||
uses: actions/cache@v5
|
||||
env:
|
||||
cache-name: cache-v8
|
||||
with:
|
||||
|
||||
10
.github/workflows/e2e-integration-test.yml
vendored
10
.github/workflows/e2e-integration-test.yml
vendored
@@ -20,11 +20,9 @@ jobs:
|
||||
if: github.event.pull_request.draft == false
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v6
|
||||
with:
|
||||
fetch-depth: 0
|
||||
# fetch submodules recusively, to get zig-js-runtime submodules also.
|
||||
submodules: recursive
|
||||
|
||||
- uses: ./.github/actions/install
|
||||
|
||||
@@ -32,7 +30,7 @@ jobs:
|
||||
run: zig build -Dprebuilt_v8_path=v8/libc_v8.a -Doptimize=ReleaseFast -Dcpu=x86_64 -Dgit_commit=$(git rev-parse --short ${{ github.sha }})
|
||||
|
||||
- name: upload artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v7
|
||||
with:
|
||||
name: lightpanda-build-release
|
||||
path: |
|
||||
@@ -47,7 +45,7 @@ jobs:
|
||||
timeout-minutes: 15
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v6
|
||||
with:
|
||||
repository: 'lightpanda-io/demo'
|
||||
fetch-depth: 0
|
||||
@@ -55,7 +53,7 @@ jobs:
|
||||
- run: npm install
|
||||
|
||||
- name: download artifact
|
||||
uses: actions/download-artifact@v4
|
||||
uses: actions/download-artifact@v8
|
||||
with:
|
||||
name: lightpanda-build-release
|
||||
|
||||
|
||||
80
.github/workflows/e2e-test.yml
vendored
80
.github/workflows/e2e-test.yml
vendored
@@ -9,15 +9,13 @@ env:
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
branches: [main]
|
||||
paths:
|
||||
- "build.zig"
|
||||
- "src/**/*.zig"
|
||||
- "src/*.zig"
|
||||
- "vendor/zig-js-runtime"
|
||||
- ".github/**"
|
||||
- "vendor/**"
|
||||
- "src/**"
|
||||
- "build.zig"
|
||||
- "build.zig.zon"
|
||||
|
||||
pull_request:
|
||||
|
||||
# By default GH trigger on types opened, synchronize and reopened.
|
||||
@@ -29,12 +27,10 @@ on:
|
||||
|
||||
paths:
|
||||
- ".github/**"
|
||||
- "src/**"
|
||||
- "build.zig"
|
||||
- "src/**/*.zig"
|
||||
- "src/*.zig"
|
||||
- "vendor/**"
|
||||
- ".github/**"
|
||||
- "vendor/**"
|
||||
- "build.zig.zon"
|
||||
|
||||
# Allows you to run this workflow manually from the Actions tab
|
||||
workflow_dispatch:
|
||||
|
||||
@@ -52,8 +48,6 @@ jobs:
|
||||
- uses: actions/checkout@v6
|
||||
with:
|
||||
fetch-depth: 0
|
||||
# fetch submodules recusively, to get zig-js-runtime submodules also.
|
||||
submodules: recursive
|
||||
|
||||
- uses: ./.github/actions/install
|
||||
|
||||
@@ -61,7 +55,7 @@ jobs:
|
||||
run: zig build -Dprebuilt_v8_path=v8/libc_v8.a -Doptimize=ReleaseFast -Dcpu=x86_64 -Dgit_commit=$(git rev-parse --short ${{ github.sha }})
|
||||
|
||||
- name: upload artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v7
|
||||
with:
|
||||
name: lightpanda-build-release
|
||||
path: |
|
||||
@@ -76,7 +70,7 @@ jobs:
|
||||
timeout-minutes: 15
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v6
|
||||
with:
|
||||
repository: 'lightpanda-io/demo'
|
||||
fetch-depth: 0
|
||||
@@ -84,7 +78,7 @@ jobs:
|
||||
- run: npm install
|
||||
|
||||
- name: download artifact
|
||||
uses: actions/download-artifact@v4
|
||||
uses: actions/download-artifact@v8
|
||||
with:
|
||||
name: lightpanda-build-release
|
||||
|
||||
@@ -126,7 +120,7 @@ jobs:
|
||||
timeout-minutes: 15
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v6
|
||||
with:
|
||||
repository: 'lightpanda-io/demo'
|
||||
fetch-depth: 0
|
||||
@@ -134,7 +128,7 @@ jobs:
|
||||
- run: npm install
|
||||
|
||||
- name: download artifact
|
||||
uses: actions/download-artifact@v4
|
||||
uses: actions/download-artifact@v8
|
||||
with:
|
||||
name: lightpanda-build-release
|
||||
|
||||
@@ -182,32 +176,41 @@ jobs:
|
||||
name: wba-test
|
||||
needs: zig-build-release
|
||||
|
||||
env:
|
||||
LIGHTPANDA_DISABLE_TELEMETRY: true
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 15
|
||||
timeout-minutes: 5
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
with:
|
||||
repository: 'lightpanda-io/demo'
|
||||
fetch-depth: 0
|
||||
|
||||
- name: download artifact
|
||||
uses: actions/download-artifact@v4
|
||||
uses: actions/download-artifact@v8
|
||||
with:
|
||||
name: lightpanda-build-release
|
||||
|
||||
- run: chmod a+x ./lightpanda
|
||||
|
||||
- run: echo "${{ secrets.WBA_PRIVATE_KEY_PEM }}" > private_key.pem
|
||||
# force a wakup of the auth server before requesting it w/ the test itself
|
||||
- run: curl https://${{ vars.WBA_DOMAIN }}
|
||||
|
||||
- run: |
|
||||
./lightpanda fetch https://crawltest.com/cdn-cgi/web-bot-auth \
|
||||
--log_level error \
|
||||
--web_bot_auth_key_file private_key.pem \
|
||||
- name: run wba test
|
||||
shell: bash
|
||||
run: |
|
||||
node webbotauth/validator.js &
|
||||
VALIDATOR_PID=$!
|
||||
sleep 5
|
||||
|
||||
exec 3<<< "${{ secrets.WBA_PRIVATE_KEY_PEM }}"
|
||||
|
||||
./lightpanda fetch --dump http://127.0.0.1:8989/ \
|
||||
--web_bot_auth_key_file /proc/self/fd/3 \
|
||||
--web_bot_auth_keyid ${{ vars.WBA_KEY_ID }} \
|
||||
--web_bot_auth_domain ${{ vars.WBA_DOMAIN }} \
|
||||
--dump markdown \
|
||||
| tee output.log
|
||||
--web_bot_auth_domain ${{ vars.WBA_DOMAIN }}
|
||||
|
||||
- run: cat output.log | grep -q "unknown public key or unknown verified bot ID for keyid"
|
||||
wait $VALIDATOR_PID
|
||||
exec 3>&-
|
||||
|
||||
cdp-and-hyperfine-bench:
|
||||
name: cdp-and-hyperfine-bench
|
||||
@@ -217,7 +220,6 @@ jobs:
|
||||
MAX_VmHWM: 28000 # 28MB (KB)
|
||||
MAX_CG_PEAK: 8000 # 8MB (KB)
|
||||
MAX_AVG_DURATION: 17
|
||||
LIGHTPANDA_DISABLE_TELEMETRY: true
|
||||
|
||||
# How to give cgroups access to the user actions-runner on the host:
|
||||
# $ sudo apt install cgroup-tools
|
||||
@@ -232,7 +234,7 @@ jobs:
|
||||
timeout-minutes: 15
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v6
|
||||
with:
|
||||
repository: 'lightpanda-io/demo'
|
||||
fetch-depth: 0
|
||||
@@ -240,7 +242,7 @@ jobs:
|
||||
- run: npm install
|
||||
|
||||
- name: download artifact
|
||||
uses: actions/download-artifact@v4
|
||||
uses: actions/download-artifact@v8
|
||||
with:
|
||||
name: lightpanda-build-release
|
||||
|
||||
@@ -326,7 +328,7 @@ jobs:
|
||||
echo "${{github.sha}}" > commit.txt
|
||||
|
||||
- name: upload artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v7
|
||||
with:
|
||||
name: bench-results
|
||||
path: |
|
||||
@@ -354,7 +356,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: download artifact
|
||||
uses: actions/download-artifact@v4
|
||||
uses: actions/download-artifact@v8
|
||||
with:
|
||||
name: bench-results
|
||||
|
||||
@@ -372,7 +374,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: download artifact
|
||||
uses: actions/download-artifact@v4
|
||||
uses: actions/download-artifact@v8
|
||||
with:
|
||||
name: lightpanda-build-release
|
||||
|
||||
|
||||
@@ -5,7 +5,9 @@ env:
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.NIGHTLY_BUILD_AWS_SECRET_ACCESS_KEY }}
|
||||
AWS_BUCKET: ${{ vars.NIGHTLY_BUILD_AWS_BUCKET }}
|
||||
AWS_REGION: ${{ vars.NIGHTLY_BUILD_AWS_REGION }}
|
||||
|
||||
RELEASE: ${{ github.ref_type == 'tag' && github.ref_name || 'nightly' }}
|
||||
GIT_VERSION_FLAG: ${{ github.ref_type == 'tag' && format('-Dgit_version={0}', github.ref_name) || '' }}
|
||||
|
||||
on:
|
||||
push:
|
||||
@@ -33,8 +35,6 @@ jobs:
|
||||
- uses: actions/checkout@v6
|
||||
with:
|
||||
fetch-depth: 0
|
||||
# fetch submodules recusively, to get zig-js-runtime submodules also.
|
||||
submodules: recursive
|
||||
|
||||
- uses: ./.github/actions/install
|
||||
with:
|
||||
@@ -45,7 +45,7 @@ jobs:
|
||||
run: zig build -Dprebuilt_v8_path=v8/libc_v8.a -Doptimize=ReleaseFast snapshot_creator -- src/snapshot.bin
|
||||
|
||||
- name: zig build
|
||||
run: zig build -Dsnapshot_path=../../snapshot.bin -Dprebuilt_v8_path=v8/libc_v8.a -Doptimize=ReleaseFast -Dcpu=x86_64 -Dgit_commit=$(git rev-parse --short ${{ github.sha }})
|
||||
run: zig build -Dsnapshot_path=../../snapshot.bin -Dprebuilt_v8_path=v8/libc_v8.a -Doptimize=ReleaseFast -Dcpu=x86_64 -Dgit_commit=$(git rev-parse --short ${{ github.sha }}) ${{ env.GIT_VERSION_FLAG }}
|
||||
|
||||
- name: Rename binary
|
||||
run: mv zig-out/bin/lightpanda lightpanda-${{ env.ARCH }}-${{ env.OS }}
|
||||
@@ -72,11 +72,9 @@ jobs:
|
||||
timeout-minutes: 20
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v6
|
||||
with:
|
||||
fetch-depth: 0
|
||||
# fetch submodules recusively, to get zig-js-runtime submodules also.
|
||||
submodules: recursive
|
||||
|
||||
- uses: ./.github/actions/install
|
||||
with:
|
||||
@@ -87,7 +85,7 @@ jobs:
|
||||
run: zig build -Dprebuilt_v8_path=v8/libc_v8.a -Doptimize=ReleaseFast snapshot_creator -- src/snapshot.bin
|
||||
|
||||
- name: zig build
|
||||
run: zig build -Dsnapshot_path=../../snapshot.bin -Dprebuilt_v8_path=v8/libc_v8.a -Doptimize=ReleaseFast -Dcpu=generic -Dgit_commit=$(git rev-parse --short ${{ github.sha }})
|
||||
run: zig build -Dsnapshot_path=../../snapshot.bin -Dprebuilt_v8_path=v8/libc_v8.a -Doptimize=ReleaseFast -Dcpu=generic -Dgit_commit=$(git rev-parse --short ${{ github.sha }}) ${{ env.GIT_VERSION_FLAG }}
|
||||
|
||||
- name: Rename binary
|
||||
run: mv zig-out/bin/lightpanda lightpanda-${{ env.ARCH }}-${{ env.OS }}
|
||||
@@ -116,11 +114,9 @@ jobs:
|
||||
timeout-minutes: 20
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v6
|
||||
with:
|
||||
fetch-depth: 0
|
||||
# fetch submodules recusively, to get zig-js-runtime submodules also.
|
||||
submodules: recursive
|
||||
|
||||
- uses: ./.github/actions/install
|
||||
with:
|
||||
@@ -131,7 +127,7 @@ jobs:
|
||||
run: zig build -Dprebuilt_v8_path=v8/libc_v8.a -Doptimize=ReleaseFast snapshot_creator -- src/snapshot.bin
|
||||
|
||||
- name: zig build
|
||||
run: zig build -Dsnapshot_path=../../snapshot.bin -Dprebuilt_v8_path=v8/libc_v8.a -Doptimize=ReleaseFast -Dgit_commit=$(git rev-parse --short ${{ github.sha }})
|
||||
run: zig build -Dsnapshot_path=../../snapshot.bin -Dprebuilt_v8_path=v8/libc_v8.a -Doptimize=ReleaseFast -Dgit_commit=$(git rev-parse --short ${{ github.sha }}) ${{ env.GIT_VERSION_FLAG }}
|
||||
|
||||
- name: Rename binary
|
||||
run: mv zig-out/bin/lightpanda lightpanda-${{ env.ARCH }}-${{ env.OS }}
|
||||
@@ -158,11 +154,9 @@ jobs:
|
||||
timeout-minutes: 20
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v6
|
||||
with:
|
||||
fetch-depth: 0
|
||||
# fetch submodules recusively, to get zig-js-runtime submodules also.
|
||||
submodules: recursive
|
||||
|
||||
- uses: ./.github/actions/install
|
||||
with:
|
||||
@@ -173,7 +167,7 @@ jobs:
|
||||
run: zig build -Dprebuilt_v8_path=v8/libc_v8.a -Doptimize=ReleaseFast snapshot_creator -- src/snapshot.bin
|
||||
|
||||
- name: zig build
|
||||
run: zig build -Dsnapshot_path=../../snapshot.bin -Dprebuilt_v8_path=v8/libc_v8.a -Doptimize=ReleaseFast -Dgit_commit=$(git rev-parse --short ${{ github.sha }})
|
||||
run: zig build -Dsnapshot_path=../../snapshot.bin -Dprebuilt_v8_path=v8/libc_v8.a -Doptimize=ReleaseFast -Dgit_commit=$(git rev-parse --short ${{ github.sha }}) ${{ env.GIT_VERSION_FLAG }}
|
||||
|
||||
- name: Rename binary
|
||||
run: mv zig-out/bin/lightpanda lightpanda-${{ env.ARCH }}-${{ env.OS }}
|
||||
43
.github/workflows/wpt.yml
vendored
43
.github/workflows/wpt.yml
vendored
@@ -5,11 +5,12 @@ env:
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.LPD_PERF_AWS_SECRET_ACCESS_KEY }}
|
||||
AWS_BUCKET: ${{ vars.LPD_PERF_AWS_BUCKET }}
|
||||
AWS_REGION: ${{ vars.LPD_PERF_AWS_REGION }}
|
||||
AWS_CF_DISTRIBUTION: ${{ vars.AWS_CF_DISTRIBUTION }}
|
||||
LIGHTPANDA_DISABLE_TELEMETRY: true
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "23 2 * * *"
|
||||
- cron: "21 2 * * *"
|
||||
|
||||
# Allows you to run this workflow manually from the Actions tab
|
||||
workflow_dispatch:
|
||||
@@ -18,23 +19,31 @@ jobs:
|
||||
wpt-build-release:
|
||||
name: zig build release
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 15
|
||||
env:
|
||||
ARCH: aarch64
|
||||
OS: linux
|
||||
|
||||
runs-on: ubuntu-24.04-arm
|
||||
timeout-minutes: 20
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
with:
|
||||
fetch-depth: 0
|
||||
# fetch submodules recusively, to get zig-js-runtime submodules also.
|
||||
submodules: recursive
|
||||
|
||||
- uses: ./.github/actions/install
|
||||
with:
|
||||
os: ${{env.OS}}
|
||||
arch: ${{env.ARCH}}
|
||||
|
||||
- name: v8 snapshot
|
||||
run: zig build -Dprebuilt_v8_path=v8/libc_v8.a -Doptimize=ReleaseFast snapshot_creator -- src/snapshot.bin
|
||||
|
||||
- name: zig build release
|
||||
run: zig build -Dprebuilt_v8_path=v8/libc_v8.a -Doptimize=ReleaseFast -Dcpu=x86_64 -Dgit_commit=$(git rev-parse --short ${{ github.sha }})
|
||||
run: zig build -Dsnapshot_path=../../snapshot.bin -Dprebuilt_v8_path=v8/libc_v8.a -Doptimize=ReleaseFast -Dcpu=generic -Dgit_commit=$(git rev-parse --short ${{ github.sha }})
|
||||
|
||||
- name: upload artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v7
|
||||
with:
|
||||
name: lightpanda-build-release
|
||||
path: |
|
||||
@@ -44,7 +53,7 @@ jobs:
|
||||
wpt-build-runner:
|
||||
name: build wpt runner
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-24.04-arm
|
||||
timeout-minutes: 15
|
||||
|
||||
steps:
|
||||
@@ -58,7 +67,7 @@ jobs:
|
||||
CGO_ENABLED=0 go build
|
||||
|
||||
- name: upload artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v7
|
||||
with:
|
||||
name: wptrunner
|
||||
path: |
|
||||
@@ -72,8 +81,8 @@ jobs:
|
||||
- wpt-build-runner
|
||||
|
||||
# use a self host runner.
|
||||
runs-on: lpd-bench-hetzner
|
||||
timeout-minutes: 120
|
||||
runs-on: lpd-wpt-aws
|
||||
timeout-minutes: 600
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
@@ -90,14 +99,14 @@ jobs:
|
||||
run: ./wpt manifest
|
||||
|
||||
- name: download lightpanda release
|
||||
uses: actions/download-artifact@v4
|
||||
uses: actions/download-artifact@v8
|
||||
with:
|
||||
name: lightpanda-build-release
|
||||
|
||||
- run: chmod a+x ./lightpanda
|
||||
|
||||
- name: download wptrunner
|
||||
uses: actions/download-artifact@v4
|
||||
uses: actions/download-artifact@v8
|
||||
with:
|
||||
name: wptrunner
|
||||
|
||||
@@ -106,8 +115,8 @@ jobs:
|
||||
- name: run test with json output
|
||||
run: |
|
||||
./wpt serve 2> /dev/null & echo $! > WPT.pid
|
||||
sleep 10s
|
||||
./wptrunner -lpd-path ./lightpanda -json -concurrency 3 > wpt.json
|
||||
sleep 20s
|
||||
./wptrunner -lpd-path ./lightpanda -json -concurrency 5 -pool 5 --mem-limit 400 > wpt.json
|
||||
kill `cat WPT.pid`
|
||||
|
||||
- name: write commit
|
||||
@@ -115,7 +124,7 @@ jobs:
|
||||
echo "${{github.sha}}" > commit.txt
|
||||
|
||||
- name: upload artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v7
|
||||
with:
|
||||
name: wpt-results
|
||||
path: |
|
||||
@@ -138,7 +147,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: download artifact
|
||||
uses: actions/download-artifact@v4
|
||||
uses: actions/download-artifact@v8
|
||||
with:
|
||||
name: wpt-results
|
||||
|
||||
|
||||
60
.github/workflows/zig-fmt.yml
vendored
60
.github/workflows/zig-fmt.yml
vendored
@@ -1,60 +0,0 @@
|
||||
name: zig-fmt
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
|
||||
# By default GH trigger on types opened, synchronize and reopened.
|
||||
# see https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#pull_request
|
||||
# Since we skip the job when the PR is in draft state, we want to force CI
|
||||
# running when the PR is marked ready_for_review w/o other change.
|
||||
# see https://github.com/orgs/community/discussions/25722#discussioncomment-3248917
|
||||
types: [opened, synchronize, reopened, ready_for_review]
|
||||
|
||||
paths:
|
||||
- ".github/**"
|
||||
- "build.zig"
|
||||
- "src/**/*.zig"
|
||||
- "src/*.zig"
|
||||
# Allows you to run this workflow manually from the Actions tab
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
zig-fmt:
|
||||
name: zig fmt
|
||||
|
||||
# Don't run the CI with draft PR.
|
||||
if: github.event.pull_request.draft == false
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 15
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
# Zig version used from the `minimum_zig_version` field in build.zig.zon
|
||||
- uses: mlugg/setup-zig@v2
|
||||
|
||||
- name: Run zig fmt
|
||||
id: fmt
|
||||
run: |
|
||||
zig fmt --check ./*.zig ./**/*.zig 2> zig-fmt.err > zig-fmt.err2 || echo "Failed"
|
||||
delimiter="$(openssl rand -hex 8)"
|
||||
echo "zig_fmt_errs<<${delimiter}" >> "${GITHUB_OUTPUT}"
|
||||
|
||||
if [ -s zig-fmt.err ]; then
|
||||
echo "// The following errors occurred:" >> "${GITHUB_OUTPUT}"
|
||||
cat zig-fmt.err >> "${GITHUB_OUTPUT}"
|
||||
fi
|
||||
|
||||
if [ -s zig-fmt.err2 ]; then
|
||||
echo "// The following files were not formatted:" >> "${GITHUB_OUTPUT}"
|
||||
cat zig-fmt.err2 >> "${GITHUB_OUTPUT}"
|
||||
fi
|
||||
|
||||
echo "${delimiter}" >> "${GITHUB_OUTPUT}"
|
||||
|
||||
- name: Fail the job
|
||||
if: steps.fmt.outputs.zig_fmt_errs != ''
|
||||
run: exit 1
|
||||
94
.github/workflows/zig-test.yml
vendored
94
.github/workflows/zig-test.yml
vendored
@@ -5,19 +5,18 @@ env:
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.LPD_PERF_AWS_SECRET_ACCESS_KEY }}
|
||||
AWS_BUCKET: ${{ vars.LPD_PERF_AWS_BUCKET }}
|
||||
AWS_REGION: ${{ vars.LPD_PERF_AWS_REGION }}
|
||||
LIGHTPANDA_DISABLE_TELEMETRY: true
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
branches: [main]
|
||||
paths:
|
||||
- "build.zig"
|
||||
- "src/**"
|
||||
- "vendor/zig-js-runtime"
|
||||
- ".github/**"
|
||||
- "vendor/**"
|
||||
pull_request:
|
||||
- "src/**"
|
||||
- "build.zig"
|
||||
- "build.zig.zon"
|
||||
|
||||
pull_request:
|
||||
# By default GH trigger on types opened, synchronize and reopened.
|
||||
# see https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#pull_request
|
||||
# Since we skip the job when the PR is in draft state, we want to force CI
|
||||
@@ -27,28 +26,63 @@ on:
|
||||
|
||||
paths:
|
||||
- ".github/**"
|
||||
- "src/**"
|
||||
- "build.zig"
|
||||
- "src/**/*.zig"
|
||||
- "src/*.zig"
|
||||
- "vendor/**"
|
||||
- ".github/**"
|
||||
- "vendor/**"
|
||||
- "build.zig.zon"
|
||||
|
||||
# Allows you to run this workflow manually from the Actions tab
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
zig-test-debug:
|
||||
name: zig test using v8 in debug mode
|
||||
timeout-minutes: 15
|
||||
zig-fmt:
|
||||
name: zig fmt
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 15
|
||||
|
||||
if: github.event.pull_request.draft == false
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v6
|
||||
|
||||
# Zig version used from the `minimum_zig_version` field in build.zig.zon
|
||||
- uses: mlugg/setup-zig@v2
|
||||
|
||||
- name: Run zig fmt
|
||||
id: fmt
|
||||
run: |
|
||||
zig fmt --check ./*.zig ./**/*.zig 2> zig-fmt.err > zig-fmt.err2 || echo "Failed"
|
||||
delimiter="$(openssl rand -hex 8)"
|
||||
echo "zig_fmt_errs<<${delimiter}" >> "${GITHUB_OUTPUT}"
|
||||
|
||||
if [ -s zig-fmt.err ]; then
|
||||
echo "// The following errors occurred:" >> "${GITHUB_OUTPUT}"
|
||||
cat zig-fmt.err >> "${GITHUB_OUTPUT}"
|
||||
fi
|
||||
|
||||
if [ -s zig-fmt.err2 ]; then
|
||||
echo "// The following files were not formatted:" >> "${GITHUB_OUTPUT}"
|
||||
cat zig-fmt.err2 >> "${GITHUB_OUTPUT}"
|
||||
fi
|
||||
|
||||
echo "${delimiter}" >> "${GITHUB_OUTPUT}"
|
||||
|
||||
- name: Fail the job
|
||||
if: steps.fmt.outputs.zig_fmt_errs != ''
|
||||
run: exit 1
|
||||
|
||||
zig-test-debug:
|
||||
name: zig test using v8 in debug mode
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 15
|
||||
|
||||
if: github.event.pull_request.draft == false
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
with:
|
||||
fetch-depth: 0
|
||||
# fetch submodules recusively, to get zig-js-runtime submodules also.
|
||||
submodules: recursive
|
||||
|
||||
- uses: ./.github/actions/install
|
||||
with:
|
||||
@@ -57,21 +91,18 @@ jobs:
|
||||
- name: zig build test
|
||||
run: zig build -Dprebuilt_v8_path=v8/libc_v8_debug.a -Dtsan=true test
|
||||
|
||||
zig-test:
|
||||
zig-test-release:
|
||||
name: zig test
|
||||
timeout-minutes: 15
|
||||
|
||||
# Don't run the CI with draft PR.
|
||||
if: github.event.pull_request.draft == false
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 15
|
||||
|
||||
if: github.event.pull_request.draft == false
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v6
|
||||
with:
|
||||
fetch-depth: 0
|
||||
# fetch submodules recusively, to get zig-js-runtime submodules also.
|
||||
submodules: recursive
|
||||
|
||||
- uses: ./.github/actions/install
|
||||
|
||||
@@ -83,7 +114,7 @@ jobs:
|
||||
echo "${{github.sha}}" > commit.txt
|
||||
|
||||
- name: upload artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v7
|
||||
with:
|
||||
name: bench-results
|
||||
path: |
|
||||
@@ -93,14 +124,13 @@ jobs:
|
||||
|
||||
bench-fmt:
|
||||
name: perf-fmt
|
||||
needs: zig-test
|
||||
|
||||
# Don't execute on PR
|
||||
if: github.event_name != 'pull_request'
|
||||
needs: zig-test-release
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 15
|
||||
|
||||
if: github.event_name != 'pull_request'
|
||||
|
||||
container:
|
||||
image: ghcr.io/lightpanda-io/perf-fmt:latest
|
||||
credentials:
|
||||
@@ -109,7 +139,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: download artifact
|
||||
uses: actions/download-artifact@v4
|
||||
uses: actions/download-artifact@v8
|
||||
with:
|
||||
name: bench-results
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@ FROM debian:stable-slim
|
||||
ARG MINISIG=0.12
|
||||
ARG ZIG_MINISIG=RWSGOq2NVecA2UPNdBUZykf1CCb147pkmdtYxgb3Ti+JO/wCYvhbAb/U
|
||||
ARG V8=14.0.365.4
|
||||
ARG ZIG_V8=v0.3.1
|
||||
ARG ZIG_V8=v0.3.4
|
||||
ARG TARGETPLATFORM
|
||||
|
||||
RUN apt-get update -yq && \
|
||||
|
||||
8
Makefile
8
Makefile
@@ -47,7 +47,7 @@ help:
|
||||
|
||||
# $(ZIG) commands
|
||||
# ------------
|
||||
.PHONY: build build-v8-snapshot build-dev run run-release shell test bench data end2end
|
||||
.PHONY: build build-v8-snapshot build-dev run run-release test bench data end2end
|
||||
|
||||
## Build v8 snapshot
|
||||
build-v8-snapshot:
|
||||
@@ -77,11 +77,6 @@ run-debug: build-dev
|
||||
@printf "\033[36mRunning...\033[0m\n"
|
||||
@./zig-out/bin/lightpanda || (printf "\033[33mRun ERROR\033[0m\n"; exit 1;)
|
||||
|
||||
## Run a JS shell in debug mode
|
||||
shell:
|
||||
@printf "\033[36mBuilding shell...\033[0m\n"
|
||||
@$(ZIG) build shell || (printf "\033[33mBuild ERROR\033[0m\n"; exit 1;)
|
||||
|
||||
## Test - `grep` is used to filter out the huge compile command on build
|
||||
ifeq ($(OS), macos)
|
||||
test:
|
||||
@@ -106,4 +101,3 @@ install: build
|
||||
|
||||
data:
|
||||
cd src/data && go run public_suffix_list_gen.go > public_suffix_list.zig
|
||||
|
||||
|
||||
36
README.md
36
README.md
@@ -1,18 +1,32 @@
|
||||
<p align="center">
|
||||
<a href="https://lightpanda.io"><img src="https://cdn.lightpanda.io/assets/images/logo/lpd-logo.png" alt="Logo" height=170></a>
|
||||
</p>
|
||||
|
||||
<h1 align="center">Lightpanda Browser</h1>
|
||||
<p align="center">
|
||||
<strong>The headless browser built from scratch for AI agents and automation.</strong><br>
|
||||
Not a Chromium fork. Not a WebKit patch. A new browser, written in Zig.
|
||||
</p>
|
||||
|
||||
<p align="center"><a href="https://lightpanda.io/">lightpanda.io</a></p>
|
||||
|
||||
</div>
|
||||
<div align="center">
|
||||
|
||||
[](https://github.com/lightpanda-io/browser/blob/main/LICENSE)
|
||||
[](https://twitter.com/lightpanda_io)
|
||||
[](https://github.com/lightpanda-io/browser)
|
||||
[](https://discord.gg/K63XeymfB5)
|
||||
|
||||
</div>
|
||||
<div align="center">
|
||||
|
||||
[<img width="350px" src="https://cdn.lightpanda.io/assets/images/github/execution-time.svg">
|
||||
](https://github.com/lightpanda-io/demo)
|
||||
 
|
||||
[<img width="350px" src="https://cdn.lightpanda.io/assets/images/github/memory-frame.svg">
|
||||
](https://github.com/lightpanda-io/demo)
|
||||
</div>
|
||||
|
||||
_Puppeteer requesting 100 pages from a local website on a AWS EC2 m5.large instance.
|
||||
See [benchmark details](https://github.com/lightpanda-io/demo)._
|
||||
|
||||
Lightpanda is the open-source browser made for headless usage:
|
||||
|
||||
@@ -26,16 +40,6 @@ Fast web automation for AI agents, LLM training, scraping and testing:
|
||||
- Exceptionally fast execution (11x faster than Chrome)
|
||||
- Instant startup
|
||||
|
||||
[<img width="350px" src="https://cdn.lightpanda.io/assets/images/github/execution-time.svg">
|
||||
](https://github.com/lightpanda-io/demo)
|
||||
 
|
||||
[<img width="350px" src="https://cdn.lightpanda.io/assets/images/github/memory-frame.svg">
|
||||
](https://github.com/lightpanda-io/demo)
|
||||
</div>
|
||||
|
||||
_Puppeteer requesting 100 pages from a local website on a AWS EC2 m5.large instance.
|
||||
See [benchmark details](https://github.com/lightpanda-io/demo)._
|
||||
|
||||
[^1]: **Playwright support disclaimer:**
|
||||
Due to the nature of Playwright, a script that works with the current version of the browser may not function correctly with a future version. Playwright uses an intermediate JavaScript layer that selects an execution strategy based on the browser's available features. If Lightpanda adds a new [Web API](https://developer.mozilla.org/en-US/docs/Web/API), Playwright may choose to execute different code for the same script. This new code path could attempt to use features that are not yet implemented. Lightpanda makes an effort to add compatibility tests, but we can't cover all scenarios. If you encounter an issue, please create a [GitHub issue](https://github.com/lightpanda-io/browser/issues) and include the last known working version of the script.
|
||||
|
||||
@@ -186,8 +190,6 @@ Here are the key features we have implemented:
|
||||
|
||||
NOTE: There are hundreds of Web APIs. Developing a browser (even just for headless mode) is a huge task. Coverage will increase over time.
|
||||
|
||||
You can also follow the progress of our Javascript support in our dedicated [zig-js-runtime](https://github.com/lightpanda-io/zig-js-runtime#development) project.
|
||||
|
||||
## Build from sources
|
||||
|
||||
### Prerequisites
|
||||
@@ -196,10 +198,10 @@ Lightpanda is written with [Zig](https://ziglang.org/) `0.15.2`. You have to
|
||||
install it with the right version in order to build the project.
|
||||
|
||||
Lightpanda also depends on
|
||||
[zig-js-runtime](https://github.com/lightpanda-io/zig-js-runtime/) (with v8),
|
||||
[v8](https://chromium.googlesource.com/v8/v8.git),
|
||||
[Libcurl](https://curl.se/libcurl/) and [html5ever](https://github.com/servo/html5ever).
|
||||
|
||||
To be able to build the v8 engine for zig-js-runtime, you have to install some libs:
|
||||
To be able to build the v8 engine, you have to install some libs:
|
||||
|
||||
For **Debian/Ubuntu based Linux**:
|
||||
|
||||
|
||||
36
build.zig
36
build.zig
@@ -27,12 +27,14 @@ pub fn build(b: *Build) !void {
|
||||
const manifest = Manifest.init(b);
|
||||
|
||||
const git_commit = b.option([]const u8, "git_commit", "Current git commit");
|
||||
const git_version = b.option([]const u8, "git_version", "Current git version (from tag)");
|
||||
const prebuilt_v8_path = b.option([]const u8, "prebuilt_v8_path", "Path to prebuilt libc_v8.a");
|
||||
const snapshot_path = b.option([]const u8, "snapshot_path", "Path to v8 snapshot");
|
||||
|
||||
var opts = b.addOptions();
|
||||
opts.addOption([]const u8, "version", manifest.version);
|
||||
opts.addOption([]const u8, "git_commit", git_commit orelse "dev");
|
||||
opts.addOption(?[]const u8, "git_version", git_version orelse null);
|
||||
opts.addOption(?[]const u8, "snapshot_path", snapshot_path);
|
||||
|
||||
const enable_tsan = b.option(bool, "tsan", "Enable Thread Sanitizer") orelse false;
|
||||
@@ -52,8 +54,19 @@ pub fn build(b: *Build) !void {
|
||||
mod.addImport("lightpanda", mod); // allow circular "lightpanda" import
|
||||
mod.addImport("build_config", opts.createModule());
|
||||
|
||||
// Format check
|
||||
const fmt_step = b.step("fmt", "Check code formatting");
|
||||
const fmt = b.addFmt(.{
|
||||
.paths = &.{ "src", "build.zig", "build.zig.zon" },
|
||||
.check = true,
|
||||
});
|
||||
fmt_step.dependOn(&fmt.step);
|
||||
|
||||
// Set default behavior
|
||||
b.default_step.dependOn(fmt_step);
|
||||
|
||||
try linkV8(b, mod, enable_asan, enable_tsan, prebuilt_v8_path);
|
||||
try linkCurl(b, mod);
|
||||
try linkCurl(b, mod, enable_tsan);
|
||||
try linkHtml5Ever(b, mod);
|
||||
|
||||
break :blk mod;
|
||||
@@ -189,19 +202,19 @@ fn linkHtml5Ever(b: *Build, mod: *Build.Module) !void {
|
||||
mod.addObjectFile(obj);
|
||||
}
|
||||
|
||||
fn linkCurl(b: *Build, mod: *Build.Module) !void {
|
||||
fn linkCurl(b: *Build, mod: *Build.Module, is_tsan: bool) !void {
|
||||
const target = mod.resolved_target.?;
|
||||
|
||||
const curl = buildCurl(b, target, mod.optimize.?);
|
||||
const curl = buildCurl(b, target, mod.optimize.?, is_tsan);
|
||||
mod.linkLibrary(curl);
|
||||
|
||||
const zlib = buildZlib(b, target, mod.optimize.?);
|
||||
const zlib = buildZlib(b, target, mod.optimize.?, is_tsan);
|
||||
curl.root_module.linkLibrary(zlib);
|
||||
|
||||
const brotli = buildBrotli(b, target, mod.optimize.?);
|
||||
const brotli = buildBrotli(b, target, mod.optimize.?, is_tsan);
|
||||
for (brotli) |lib| curl.root_module.linkLibrary(lib);
|
||||
|
||||
const nghttp2 = buildNghttp2(b, target, mod.optimize.?);
|
||||
const nghttp2 = buildNghttp2(b, target, mod.optimize.?, is_tsan);
|
||||
curl.root_module.linkLibrary(nghttp2);
|
||||
|
||||
const boringssl = buildBoringSsl(b, target, mod.optimize.?);
|
||||
@@ -218,13 +231,14 @@ fn linkCurl(b: *Build, mod: *Build.Module) !void {
|
||||
}
|
||||
}
|
||||
|
||||
fn buildZlib(b: *Build, target: Build.ResolvedTarget, optimize: std.builtin.OptimizeMode) *Build.Step.Compile {
|
||||
fn buildZlib(b: *Build, target: Build.ResolvedTarget, optimize: std.builtin.OptimizeMode, is_tsan: bool) *Build.Step.Compile {
|
||||
const dep = b.dependency("zlib", .{});
|
||||
|
||||
const mod = b.createModule(.{
|
||||
.target = target,
|
||||
.optimize = optimize,
|
||||
.link_libc = true,
|
||||
.sanitize_thread = is_tsan,
|
||||
});
|
||||
|
||||
const lib = b.addLibrary(.{ .name = "z", .root_module = mod });
|
||||
@@ -249,13 +263,14 @@ fn buildZlib(b: *Build, target: Build.ResolvedTarget, optimize: std.builtin.Opti
|
||||
return lib;
|
||||
}
|
||||
|
||||
fn buildBrotli(b: *Build, target: Build.ResolvedTarget, optimize: std.builtin.OptimizeMode) [3]*Build.Step.Compile {
|
||||
fn buildBrotli(b: *Build, target: Build.ResolvedTarget, optimize: std.builtin.OptimizeMode, is_tsan: bool) [3]*Build.Step.Compile {
|
||||
const dep = b.dependency("brotli", .{});
|
||||
|
||||
const mod = b.createModule(.{
|
||||
.target = target,
|
||||
.optimize = optimize,
|
||||
.link_libc = true,
|
||||
.sanitize_thread = is_tsan,
|
||||
});
|
||||
mod.addIncludePath(dep.path("c/include"));
|
||||
|
||||
@@ -311,13 +326,14 @@ fn buildBoringSsl(b: *Build, target: Build.ResolvedTarget, optimize: std.builtin
|
||||
return .{ ssl, crypto };
|
||||
}
|
||||
|
||||
fn buildNghttp2(b: *Build, target: Build.ResolvedTarget, optimize: std.builtin.OptimizeMode) *Build.Step.Compile {
|
||||
fn buildNghttp2(b: *Build, target: Build.ResolvedTarget, optimize: std.builtin.OptimizeMode, is_tsan: bool) *Build.Step.Compile {
|
||||
const dep = b.dependency("nghttp2", .{});
|
||||
|
||||
const mod = b.createModule(.{
|
||||
.target = target,
|
||||
.optimize = optimize,
|
||||
.link_libc = true,
|
||||
.sanitize_thread = is_tsan,
|
||||
});
|
||||
mod.addIncludePath(dep.path("lib/includes"));
|
||||
|
||||
@@ -362,6 +378,7 @@ fn buildCurl(
|
||||
b: *Build,
|
||||
target: Build.ResolvedTarget,
|
||||
optimize: std.builtin.OptimizeMode,
|
||||
is_tsan: bool,
|
||||
) *Build.Step.Compile {
|
||||
const dep = b.dependency("curl", .{});
|
||||
|
||||
@@ -369,6 +386,7 @@ fn buildCurl(
|
||||
.target = target,
|
||||
.optimize = optimize,
|
||||
.link_libc = true,
|
||||
.sanitize_thread = is_tsan,
|
||||
});
|
||||
mod.addIncludePath(dep.path("lib"));
|
||||
mod.addIncludePath(dep.path("include"));
|
||||
|
||||
@@ -5,11 +5,10 @@
|
||||
.minimum_zig_version = "0.15.2",
|
||||
.dependencies = .{
|
||||
.v8 = .{
|
||||
.url = "https://github.com/lightpanda-io/zig-v8-fork/archive/refs/tags/v0.3.1.tar.gz",
|
||||
.hash = "v8-0.0.0-xddH64J7BAC81mkf6G9RbEJxS-W3TIRl5iFnShwbqCqy",
|
||||
|
||||
.url = "https://github.com/lightpanda-io/zig-v8-fork/archive/refs/tags/v0.3.4.tar.gz",
|
||||
.hash = "v8-0.0.0-xddH6_F3BAAiFvKY6R1H-gkuQlk19BkDQ0--uZuTrSup",
|
||||
},
|
||||
//.v8 = .{ .path = "../zig-v8-fork" },
|
||||
// .v8 = .{ .path = "../zig-v8-fork" },
|
||||
.brotli = .{
|
||||
// v1.2.0
|
||||
.url = "https://github.com/google/brotli/archive/028fb5a23661f123017c060daa546b55cf4bde29.tar.gz",
|
||||
|
||||
52
src/App.zig
52
src/App.zig
@@ -25,44 +25,38 @@ const Config = @import("Config.zig");
|
||||
const Snapshot = @import("browser/js/Snapshot.zig");
|
||||
const Platform = @import("browser/js/Platform.zig");
|
||||
const Telemetry = @import("telemetry/telemetry.zig").Telemetry;
|
||||
const RobotStore = @import("browser/Robots.zig").RobotStore;
|
||||
const WebBotAuth = @import("browser/WebBotAuth.zig");
|
||||
|
||||
pub const Http = @import("http/Http.zig");
|
||||
const Network = @import("network/Runtime.zig");
|
||||
pub const ArenaPool = @import("ArenaPool.zig");
|
||||
|
||||
const App = @This();
|
||||
|
||||
http: Http,
|
||||
network: Network,
|
||||
config: *const Config,
|
||||
platform: Platform,
|
||||
snapshot: Snapshot,
|
||||
telemetry: Telemetry,
|
||||
allocator: Allocator,
|
||||
arena_pool: ArenaPool,
|
||||
robots: RobotStore,
|
||||
web_bot_auth: ?WebBotAuth,
|
||||
app_dir_path: ?[]const u8,
|
||||
shutdown: bool = false,
|
||||
|
||||
pub fn init(allocator: Allocator, config: *const Config) !*App {
|
||||
const app = try allocator.create(App);
|
||||
errdefer allocator.destroy(app);
|
||||
|
||||
app.config = config;
|
||||
app.allocator = allocator;
|
||||
app.* = .{
|
||||
.config = config,
|
||||
.allocator = allocator,
|
||||
.network = undefined,
|
||||
.platform = undefined,
|
||||
.snapshot = undefined,
|
||||
.app_dir_path = undefined,
|
||||
.telemetry = undefined,
|
||||
.arena_pool = undefined,
|
||||
};
|
||||
|
||||
app.robots = RobotStore.init(allocator);
|
||||
|
||||
if (config.webBotAuth()) |wba_cfg| {
|
||||
app.web_bot_auth = try WebBotAuth.fromConfig(allocator, &wba_cfg);
|
||||
} else {
|
||||
app.web_bot_auth = null;
|
||||
}
|
||||
errdefer if (app.web_bot_auth) |wba| wba.deinit(allocator);
|
||||
|
||||
app.http = try Http.init(allocator, &app.robots, &app.web_bot_auth, config);
|
||||
errdefer app.http.deinit();
|
||||
app.network = try Network.init(allocator, app, config);
|
||||
errdefer app.network.deinit();
|
||||
|
||||
app.platform = try Platform.init();
|
||||
errdefer app.platform.deinit();
|
||||
@@ -73,7 +67,7 @@ pub fn init(allocator: Allocator, config: *const Config) !*App {
|
||||
app.app_dir_path = getAndMakeAppDir(allocator);
|
||||
|
||||
app.telemetry = try Telemetry.init(app, config.mode);
|
||||
errdefer app.telemetry.deinit();
|
||||
errdefer app.telemetry.deinit(allocator);
|
||||
|
||||
app.arena_pool = ArenaPool.init(allocator, 512, 1024 * 16);
|
||||
errdefer app.arena_pool.deinit();
|
||||
@@ -81,22 +75,18 @@ pub fn init(allocator: Allocator, config: *const Config) !*App {
|
||||
return app;
|
||||
}
|
||||
|
||||
pub fn deinit(self: *App) void {
|
||||
if (@atomicRmw(bool, &self.shutdown, .Xchg, true, .monotonic)) {
|
||||
return;
|
||||
}
|
||||
pub fn shutdown(self: *const App) bool {
|
||||
return self.network.shutdown.load(.acquire);
|
||||
}
|
||||
|
||||
pub fn deinit(self: *App) void {
|
||||
const allocator = self.allocator;
|
||||
if (self.app_dir_path) |app_dir_path| {
|
||||
allocator.free(app_dir_path);
|
||||
self.app_dir_path = null;
|
||||
}
|
||||
self.telemetry.deinit();
|
||||
self.robots.deinit();
|
||||
if (self.web_bot_auth) |wba| {
|
||||
wba.deinit(allocator);
|
||||
}
|
||||
self.http.deinit();
|
||||
self.telemetry.deinit(allocator);
|
||||
self.network.deinit();
|
||||
self.snapshot.deinit();
|
||||
self.platform.deinit();
|
||||
self.arena_pool.deinit();
|
||||
|
||||
@@ -23,7 +23,7 @@ const Allocator = std.mem.Allocator;
|
||||
const log = @import("log.zig");
|
||||
const dump = @import("browser/dump.zig");
|
||||
|
||||
const WebBotAuthConfig = @import("browser/WebBotAuth.zig").Config;
|
||||
const WebBotAuthConfig = @import("network/WebBotAuth.zig").Config;
|
||||
|
||||
pub const RunMode = enum {
|
||||
help,
|
||||
@@ -33,6 +33,7 @@ pub const RunMode = enum {
|
||||
mcp,
|
||||
};
|
||||
|
||||
pub const MAX_LISTENERS = 16;
|
||||
pub const CDP_MAX_HTTP_REQUEST_SIZE = 4096;
|
||||
|
||||
// max message size
|
||||
@@ -155,6 +156,20 @@ pub fn userAgentSuffix(self: *const Config) ?[]const u8 {
|
||||
};
|
||||
}
|
||||
|
||||
pub fn cacheDir(self: *const Config) ?[]const u8 {
|
||||
return switch (self.mode) {
|
||||
inline .serve, .fetch, .mcp => |opts| opts.common.cache_dir,
|
||||
else => null,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn cdpTimeout(self: *const Config) usize {
|
||||
return switch (self.mode) {
|
||||
.serve => |opts| if (opts.timeout > 604_800) 604_800_000 else @as(usize, opts.timeout) * 1000,
|
||||
else => unreachable,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn webBotAuth(self: *const Config) ?WebBotAuthConfig {
|
||||
return switch (self.mode) {
|
||||
inline .serve, .fetch, .mcp => |opts| WebBotAuthConfig{
|
||||
@@ -205,6 +220,8 @@ pub const DumpFormat = enum {
|
||||
html,
|
||||
markdown,
|
||||
wpt,
|
||||
semantic_tree,
|
||||
semantic_tree_text,
|
||||
};
|
||||
|
||||
pub const Fetch = struct {
|
||||
@@ -230,6 +247,7 @@ pub const Common = struct {
|
||||
log_format: ?log.Format = null,
|
||||
log_filter_scopes: ?[]log.Scope = null,
|
||||
user_agent_suffix: ?[]const u8 = null,
|
||||
cache_dir: ?[]const u8 = null,
|
||||
|
||||
web_bot_auth_key_file: ?[]const u8 = null,
|
||||
web_bot_auth_keyid: ?[]const u8 = null,
|
||||
@@ -363,7 +381,7 @@ pub fn printUsageAndExit(self: *const Config, success: bool) void {
|
||||
\\
|
||||
\\Options:
|
||||
\\--dump Dumps document to stdout.
|
||||
\\ Argument must be 'html' or 'markdown'.
|
||||
\\ Argument must be 'html', 'markdown', 'semantic_tree', or 'semantic_tree_text'.
|
||||
\\ Defaults to no dump.
|
||||
\\
|
||||
\\--strip_mode Comma separated list of tag groups to remove from dump
|
||||
@@ -897,5 +915,14 @@ fn parseCommonArg(
|
||||
return true;
|
||||
}
|
||||
|
||||
if (std.mem.eql(u8, "--cache_dir", opt)) {
|
||||
const str = args.next() orelse {
|
||||
log.fatal(.app, "missing argument value", .{ .arg = "--cache_dir" });
|
||||
return error.InvalidArgument;
|
||||
};
|
||||
common.cache_dir = try allocator.dupe(u8, str);
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
@@ -21,7 +21,7 @@ const lp = @import("lightpanda");
|
||||
|
||||
const log = @import("log.zig");
|
||||
const Page = @import("browser/Page.zig");
|
||||
const Transfer = @import("http/Client.zig").Transfer;
|
||||
const Transfer = @import("browser/HttpClient.zig").Transfer;
|
||||
|
||||
const Allocator = std.mem.Allocator;
|
||||
|
||||
|
||||
532
src/SemanticTree.zig
Normal file
532
src/SemanticTree.zig
Normal file
@@ -0,0 +1,532 @@
|
||||
// Copyright (C) 2023-2026 Lightpanda (Selecy SAS)
|
||||
//
|
||||
// Francis Bouvier <francis@lightpanda.io>
|
||||
// Pierre Tachoire <pierre@lightpanda.io>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU Affero General Public License as
|
||||
// published by the Free Software Foundation, either version 3 of the
|
||||
// License, or (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU Affero General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU Affero General Public License
|
||||
// along with this program. See <https://www.gnu.org/licenses/>.
|
||||
|
||||
const std = @import("std");
|
||||
|
||||
const lp = @import("lightpanda");
|
||||
const log = @import("log.zig");
|
||||
const isAllWhitespace = @import("string.zig").isAllWhitespace;
|
||||
const Page = lp.Page;
|
||||
const interactive = @import("browser/interactive.zig");
|
||||
|
||||
const CData = @import("browser/webapi/CData.zig");
|
||||
const Element = @import("browser/webapi/Element.zig");
|
||||
const Node = @import("browser/webapi/Node.zig");
|
||||
const AXNode = @import("cdp/AXNode.zig");
|
||||
const CDPNode = @import("cdp/Node.zig");
|
||||
|
||||
const Self = @This();
|
||||
|
||||
dom_node: *Node,
|
||||
registry: *CDPNode.Registry,
|
||||
page: *Page,
|
||||
arena: std.mem.Allocator,
|
||||
prune: bool = true,
|
||||
interactive_only: bool = false,
|
||||
max_depth: u32 = std.math.maxInt(u32) - 1,
|
||||
|
||||
pub fn jsonStringify(self: @This(), jw: *std.json.Stringify) error{WriteFailed}!void {
|
||||
var visitor = JsonVisitor{ .jw = jw, .tree = self };
|
||||
var xpath_buffer: std.ArrayList(u8) = .{};
|
||||
const listener_targets = interactive.buildListenerTargetMap(self.page, self.arena) catch |err| {
|
||||
log.err(.app, "listener map failed", .{ .err = err });
|
||||
return error.WriteFailed;
|
||||
};
|
||||
self.walk(self.dom_node, &xpath_buffer, null, &visitor, 1, listener_targets, 0) catch |err| {
|
||||
log.err(.app, "semantic tree json dump failed", .{ .err = err });
|
||||
return error.WriteFailed;
|
||||
};
|
||||
}
|
||||
|
||||
pub fn textStringify(self: @This(), writer: *std.Io.Writer) error{WriteFailed}!void {
|
||||
var visitor = TextVisitor{ .writer = writer, .tree = self, .depth = 0 };
|
||||
var xpath_buffer: std.ArrayList(u8) = .empty;
|
||||
const listener_targets = interactive.buildListenerTargetMap(self.page, self.arena) catch |err| {
|
||||
log.err(.app, "listener map failed", .{ .err = err });
|
||||
return error.WriteFailed;
|
||||
};
|
||||
self.walk(self.dom_node, &xpath_buffer, null, &visitor, 1, listener_targets, 0) catch |err| {
|
||||
log.err(.app, "semantic tree text dump failed", .{ .err = err });
|
||||
return error.WriteFailed;
|
||||
};
|
||||
}
|
||||
|
||||
const OptionData = struct {
|
||||
value: []const u8,
|
||||
text: []const u8,
|
||||
selected: bool,
|
||||
};
|
||||
|
||||
const NodeData = struct {
|
||||
id: CDPNode.Id,
|
||||
axn: AXNode,
|
||||
role: []const u8,
|
||||
name: ?[]const u8,
|
||||
value: ?[]const u8,
|
||||
options: ?[]OptionData = null,
|
||||
xpath: []const u8,
|
||||
is_interactive: bool,
|
||||
node_name: []const u8,
|
||||
};
|
||||
|
||||
fn walk(self: @This(), node: *Node, xpath_buffer: *std.ArrayList(u8), parent_name: ?[]const u8, visitor: anytype, index: usize, listener_targets: interactive.ListenerTargetMap, current_depth: u32) !void {
|
||||
if (current_depth > self.max_depth) return;
|
||||
|
||||
// 1. Skip non-content nodes
|
||||
if (node.is(Element)) |el| {
|
||||
const tag = el.getTag();
|
||||
if (tag.isMetadata() or tag == .svg) return;
|
||||
|
||||
// We handle options/optgroups natively inside their parents, skip them in the general walk
|
||||
if (tag == .datalist or tag == .option or tag == .optgroup) return;
|
||||
|
||||
// Check visibility using the engine's checkVisibility which handles CSS display: none
|
||||
if (!el.checkVisibility(self.page)) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (el.is(Element.Html)) |html_el| {
|
||||
if (html_el.getHidden()) return;
|
||||
}
|
||||
} else if (node.is(CData.Text)) |text_node| {
|
||||
const text = text_node.getWholeText();
|
||||
if (isAllWhitespace(text)) {
|
||||
return;
|
||||
}
|
||||
} else if (node._type != .document and node._type != .document_fragment) {
|
||||
return;
|
||||
}
|
||||
|
||||
const cdp_node = try self.registry.register(node);
|
||||
const axn = AXNode.fromNode(node);
|
||||
const role = try axn.getRole();
|
||||
|
||||
var is_interactive = false;
|
||||
var value: ?[]const u8 = null;
|
||||
var options: ?[]OptionData = null;
|
||||
var node_name: []const u8 = "text";
|
||||
|
||||
if (node.is(Element)) |el| {
|
||||
node_name = el.getTagNameLower();
|
||||
|
||||
if (el.is(Element.Html.Input)) |input| {
|
||||
value = input.getValue();
|
||||
if (el.getAttributeSafe(comptime lp.String.wrap("list"))) |list_id| {
|
||||
options = try extractDataListOptions(list_id, self.page, self.arena);
|
||||
}
|
||||
} else if (el.is(Element.Html.TextArea)) |textarea| {
|
||||
value = textarea.getValue();
|
||||
} else if (el.is(Element.Html.Select)) |select| {
|
||||
value = select.getValue(self.page);
|
||||
options = try extractSelectOptions(el.asNode(), self.page, self.arena);
|
||||
}
|
||||
|
||||
if (el.is(Element.Html)) |html_el| {
|
||||
if (interactive.classifyInteractivity(el, html_el, listener_targets) != null) {
|
||||
is_interactive = true;
|
||||
}
|
||||
}
|
||||
} else if (node._type == .document or node._type == .document_fragment) {
|
||||
node_name = "root";
|
||||
}
|
||||
|
||||
const initial_xpath_len = xpath_buffer.items.len;
|
||||
try appendXPathSegment(node, xpath_buffer.writer(self.arena), index);
|
||||
const xpath = xpath_buffer.items;
|
||||
|
||||
var name = try axn.getName(self.page, self.arena);
|
||||
|
||||
const has_explicit_label = if (node.is(Element)) |el|
|
||||
el.getAttributeSafe(.wrap("aria-label")) != null or el.getAttributeSafe(.wrap("title")) != null
|
||||
else
|
||||
false;
|
||||
|
||||
const structural = isStructuralRole(role);
|
||||
|
||||
// Filter out computed concatenated names for generic containers without explicit labels.
|
||||
// This prevents token bloat and ensures their StaticText children aren't incorrectly pruned.
|
||||
// We ignore interactivity because a generic wrapper with an event listener still shouldn't hoist all text.
|
||||
if (name != null and structural and !has_explicit_label) {
|
||||
name = null;
|
||||
}
|
||||
|
||||
var data = NodeData{
|
||||
.id = cdp_node.id,
|
||||
.axn = axn,
|
||||
.role = role,
|
||||
.name = name,
|
||||
.value = value,
|
||||
.options = options,
|
||||
.xpath = xpath,
|
||||
.is_interactive = is_interactive,
|
||||
.node_name = node_name,
|
||||
};
|
||||
|
||||
var should_visit = true;
|
||||
if (self.interactive_only) {
|
||||
var keep = false;
|
||||
if (interactive.isInteractiveRole(role)) {
|
||||
keep = true;
|
||||
} else if (interactive.isContentRole(role)) {
|
||||
if (name != null and name.?.len > 0) {
|
||||
keep = true;
|
||||
}
|
||||
} else if (std.mem.eql(u8, role, "RootWebArea")) {
|
||||
keep = true;
|
||||
} else if (is_interactive) {
|
||||
keep = true;
|
||||
}
|
||||
if (!keep) {
|
||||
should_visit = false;
|
||||
}
|
||||
} else if (self.prune) {
|
||||
if (structural and !is_interactive and !has_explicit_label) {
|
||||
should_visit = false;
|
||||
}
|
||||
|
||||
if (std.mem.eql(u8, role, "StaticText") and node._parent != null) {
|
||||
if (parent_name != null and name != null and std.mem.indexOf(u8, parent_name.?, name.?) != null) {
|
||||
should_visit = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var did_visit = false;
|
||||
var should_walk_children = true;
|
||||
if (should_visit) {
|
||||
should_walk_children = try visitor.visit(node, &data);
|
||||
did_visit = true; // Always true if should_visit was true, because visit() executed and opened structures
|
||||
} else {
|
||||
// If we skip the node, we must NOT tell the visitor to close it later
|
||||
did_visit = false;
|
||||
}
|
||||
|
||||
if (should_walk_children) {
|
||||
// If we are printing this node normally OR skipping it and unrolling its children,
|
||||
// we walk the children iterator.
|
||||
var it = node.childrenIterator();
|
||||
var tag_counts = std.StringArrayHashMap(usize).init(self.arena);
|
||||
while (it.next()) |child| {
|
||||
var tag: []const u8 = "text()";
|
||||
if (child.is(Element)) |el| {
|
||||
tag = el.getTagNameLower();
|
||||
}
|
||||
|
||||
const gop = try tag_counts.getOrPut(tag);
|
||||
if (!gop.found_existing) {
|
||||
gop.value_ptr.* = 0;
|
||||
}
|
||||
gop.value_ptr.* += 1;
|
||||
|
||||
try self.walk(child, xpath_buffer, name, visitor, gop.value_ptr.*, listener_targets, current_depth + 1);
|
||||
}
|
||||
}
|
||||
|
||||
if (did_visit) {
|
||||
try visitor.leave();
|
||||
}
|
||||
|
||||
xpath_buffer.shrinkRetainingCapacity(initial_xpath_len);
|
||||
}
|
||||
|
||||
fn extractSelectOptions(node: *Node, page: *Page, arena: std.mem.Allocator) ![]OptionData {
|
||||
var options = std.ArrayListUnmanaged(OptionData){};
|
||||
var it = node.childrenIterator();
|
||||
while (it.next()) |child| {
|
||||
if (child.is(Element)) |el| {
|
||||
if (el.getTag() == .option) {
|
||||
if (el.is(Element.Html.Option)) |opt| {
|
||||
const text = opt.getText(page);
|
||||
const value = opt.getValue(page);
|
||||
const selected = opt.getSelected();
|
||||
try options.append(arena, .{ .text = text, .value = value, .selected = selected });
|
||||
}
|
||||
} else if (el.getTag() == .optgroup) {
|
||||
var group_it = child.childrenIterator();
|
||||
while (group_it.next()) |group_child| {
|
||||
if (group_child.is(Element.Html.Option)) |opt| {
|
||||
const text = opt.getText(page);
|
||||
const value = opt.getValue(page);
|
||||
const selected = opt.getSelected();
|
||||
try options.append(arena, .{ .text = text, .value = value, .selected = selected });
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return options.toOwnedSlice(arena);
|
||||
}
|
||||
|
||||
fn extractDataListOptions(list_id: []const u8, page: *Page, arena: std.mem.Allocator) !?[]OptionData {
|
||||
if (page.document.getElementById(list_id, page)) |referenced_el| {
|
||||
if (referenced_el.getTag() == .datalist) {
|
||||
return try extractSelectOptions(referenced_el.asNode(), page, arena);
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
fn appendXPathSegment(node: *Node, writer: anytype, index: usize) !void {
|
||||
if (node.is(Element)) |el| {
|
||||
const tag = el.getTagNameLower();
|
||||
try std.fmt.format(writer, "/{s}[{d}]", .{ tag, index });
|
||||
} else if (node.is(CData.Text)) |_| {
|
||||
try std.fmt.format(writer, "/text()[{d}]", .{index});
|
||||
}
|
||||
}
|
||||
|
||||
const JsonVisitor = struct {
|
||||
jw: *std.json.Stringify,
|
||||
tree: Self,
|
||||
|
||||
pub fn visit(self: *JsonVisitor, node: *Node, data: *NodeData) !bool {
|
||||
try self.jw.beginObject();
|
||||
|
||||
try self.jw.objectField("nodeId");
|
||||
try self.jw.write(try std.fmt.allocPrint(self.tree.arena, "{d}", .{data.id}));
|
||||
|
||||
try self.jw.objectField("backendDOMNodeId");
|
||||
try self.jw.write(data.id);
|
||||
|
||||
try self.jw.objectField("nodeName");
|
||||
try self.jw.write(data.node_name);
|
||||
|
||||
try self.jw.objectField("xpath");
|
||||
try self.jw.write(data.xpath);
|
||||
|
||||
if (node.is(Element)) |el| {
|
||||
try self.jw.objectField("nodeType");
|
||||
try self.jw.write(1);
|
||||
|
||||
try self.jw.objectField("isInteractive");
|
||||
try self.jw.write(data.is_interactive);
|
||||
|
||||
try self.jw.objectField("role");
|
||||
try self.jw.write(data.role);
|
||||
|
||||
if (data.name) |name| {
|
||||
if (name.len > 0) {
|
||||
try self.jw.objectField("name");
|
||||
try self.jw.write(name);
|
||||
}
|
||||
}
|
||||
|
||||
if (data.value) |value| {
|
||||
try self.jw.objectField("value");
|
||||
try self.jw.write(value);
|
||||
}
|
||||
|
||||
if (el._attributes) |attrs| {
|
||||
try self.jw.objectField("attributes");
|
||||
try self.jw.beginObject();
|
||||
var iter = attrs.iterator();
|
||||
while (iter.next()) |attr| {
|
||||
try self.jw.objectField(attr._name.str());
|
||||
try self.jw.write(attr._value.str());
|
||||
}
|
||||
try self.jw.endObject();
|
||||
}
|
||||
|
||||
if (data.options) |options| {
|
||||
try self.jw.objectField("options");
|
||||
try self.jw.beginArray();
|
||||
for (options) |opt| {
|
||||
try self.jw.beginObject();
|
||||
try self.jw.objectField("value");
|
||||
try self.jw.write(opt.value);
|
||||
try self.jw.objectField("text");
|
||||
try self.jw.write(opt.text);
|
||||
try self.jw.objectField("selected");
|
||||
try self.jw.write(opt.selected);
|
||||
try self.jw.endObject();
|
||||
}
|
||||
try self.jw.endArray();
|
||||
}
|
||||
} else if (node.is(CData.Text)) |text_node| {
|
||||
try self.jw.objectField("nodeType");
|
||||
try self.jw.write(3);
|
||||
try self.jw.objectField("nodeValue");
|
||||
try self.jw.write(text_node.getWholeText());
|
||||
} else {
|
||||
try self.jw.objectField("nodeType");
|
||||
try self.jw.write(9);
|
||||
}
|
||||
|
||||
try self.jw.objectField("children");
|
||||
try self.jw.beginArray();
|
||||
|
||||
if (data.options != null) {
|
||||
// Signal to not walk children, as we handled them natively
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
pub fn leave(self: *JsonVisitor) !void {
|
||||
try self.jw.endArray();
|
||||
try self.jw.endObject();
|
||||
}
|
||||
};
|
||||
|
||||
fn isStructuralRole(role: []const u8) bool {
|
||||
const structural_roles = std.StaticStringMap(void).initComptime(.{
|
||||
.{ "none", {} },
|
||||
.{ "generic", {} },
|
||||
.{ "InlineTextBox", {} },
|
||||
.{ "banner", {} },
|
||||
.{ "navigation", {} },
|
||||
.{ "main", {} },
|
||||
.{ "list", {} },
|
||||
.{ "listitem", {} },
|
||||
.{ "table", {} },
|
||||
.{ "rowgroup", {} },
|
||||
.{ "row", {} },
|
||||
.{ "cell", {} },
|
||||
.{ "region", {} },
|
||||
});
|
||||
return structural_roles.has(role);
|
||||
}
|
||||
|
||||
const TextVisitor = struct {
|
||||
writer: *std.Io.Writer,
|
||||
tree: Self,
|
||||
depth: usize,
|
||||
|
||||
pub fn visit(self: *TextVisitor, node: *Node, data: *NodeData) !bool {
|
||||
for (0..self.depth) |_| {
|
||||
try self.writer.writeByte(' ');
|
||||
}
|
||||
|
||||
var name_to_print: ?[]const u8 = null;
|
||||
if (data.name) |n| {
|
||||
if (n.len > 0) {
|
||||
name_to_print = n;
|
||||
}
|
||||
} else if (node.is(CData.Text)) |text_node| {
|
||||
const trimmed = std.mem.trim(u8, text_node.getWholeText(), " \t\r\n");
|
||||
if (trimmed.len > 0) {
|
||||
name_to_print = trimmed;
|
||||
}
|
||||
}
|
||||
|
||||
const is_text_only = std.mem.eql(u8, data.role, "StaticText") or std.mem.eql(u8, data.role, "none") or std.mem.eql(u8, data.role, "generic");
|
||||
|
||||
try self.writer.print("{d}", .{data.id});
|
||||
if (!is_text_only) {
|
||||
try self.writer.print(" {s}", .{data.role});
|
||||
}
|
||||
if (name_to_print) |n| {
|
||||
try self.writer.print(" '{s}'", .{n});
|
||||
}
|
||||
|
||||
if (data.value) |v| {
|
||||
if (v.len > 0) {
|
||||
try self.writer.print(" value='{s}'", .{v});
|
||||
}
|
||||
}
|
||||
|
||||
if (data.options) |options| {
|
||||
try self.writer.writeAll(" options=[");
|
||||
for (options, 0..) |opt, i| {
|
||||
if (i > 0) try self.writer.writeAll(",");
|
||||
try self.writer.print("'{s}'", .{opt.value});
|
||||
if (opt.selected) {
|
||||
try self.writer.writeAll("*");
|
||||
}
|
||||
}
|
||||
try self.writer.writeAll("]\n");
|
||||
self.depth += 1;
|
||||
return false; // Native handling complete, do not walk children
|
||||
}
|
||||
|
||||
try self.writer.writeByte('\n');
|
||||
self.depth += 1;
|
||||
|
||||
// If this is a leaf-like semantic node and we already have a name,
|
||||
// skip children to avoid redundant StaticText or noise.
|
||||
const is_leaf_semantic = std.mem.eql(u8, data.role, "link") or
|
||||
std.mem.eql(u8, data.role, "button") or
|
||||
std.mem.eql(u8, data.role, "heading") or
|
||||
std.mem.eql(u8, data.role, "code");
|
||||
if (is_leaf_semantic and data.name != null and data.name.?.len > 0) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
pub fn leave(self: *TextVisitor) !void {
|
||||
if (self.depth > 0) {
|
||||
self.depth -= 1;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const testing = @import("testing.zig");
|
||||
|
||||
test "SemanticTree backendDOMNodeId" {
|
||||
var registry: CDPNode.Registry = .init(testing.allocator);
|
||||
defer registry.deinit();
|
||||
|
||||
var page = try testing.pageTest("cdp/registry1.html");
|
||||
defer testing.reset();
|
||||
defer page._session.removePage();
|
||||
|
||||
const st: Self = .{
|
||||
.dom_node = page.window._document.asNode(),
|
||||
.registry = ®istry,
|
||||
.page = page,
|
||||
.arena = testing.arena_allocator,
|
||||
.prune = false,
|
||||
.interactive_only = false,
|
||||
.max_depth = std.math.maxInt(u32) - 1,
|
||||
};
|
||||
|
||||
const json_str = try std.json.Stringify.valueAlloc(testing.allocator, st, .{});
|
||||
defer testing.allocator.free(json_str);
|
||||
|
||||
try testing.expect(std.mem.indexOf(u8, json_str, "\"backendDOMNodeId\":") != null);
|
||||
}
|
||||
|
||||
test "SemanticTree max_depth" {
|
||||
var registry: CDPNode.Registry = .init(testing.allocator);
|
||||
defer registry.deinit();
|
||||
|
||||
var page = try testing.pageTest("cdp/registry1.html");
|
||||
defer testing.reset();
|
||||
defer page._session.removePage();
|
||||
|
||||
const st: Self = .{
|
||||
.dom_node = page.window._document.asNode(),
|
||||
.registry = ®istry,
|
||||
.page = page,
|
||||
.arena = testing.arena_allocator,
|
||||
.prune = false,
|
||||
.interactive_only = false,
|
||||
.max_depth = 1,
|
||||
};
|
||||
|
||||
var aw: std.Io.Writer.Allocating = .init(testing.allocator);
|
||||
defer aw.deinit();
|
||||
|
||||
try st.textStringify(&aw.writer);
|
||||
const text_str = aw.written();
|
||||
|
||||
try testing.expect(std.mem.indexOf(u8, text_str, "other") == null);
|
||||
}
|
||||
124
src/Server.zig
124
src/Server.zig
@@ -18,8 +18,6 @@
|
||||
|
||||
const std = @import("std");
|
||||
const lp = @import("lightpanda");
|
||||
const builtin = @import("builtin");
|
||||
|
||||
const net = std.net;
|
||||
const posix = std.posix;
|
||||
|
||||
@@ -30,16 +28,13 @@ const log = @import("log.zig");
|
||||
const App = @import("App.zig");
|
||||
const Config = @import("Config.zig");
|
||||
const CDP = @import("cdp/cdp.zig").CDP;
|
||||
const Net = @import("Net.zig");
|
||||
const Http = @import("http/Http.zig");
|
||||
const HttpClient = @import("http/Client.zig");
|
||||
const Net = @import("network/websocket.zig");
|
||||
const HttpClient = @import("browser/HttpClient.zig");
|
||||
|
||||
const Server = @This();
|
||||
|
||||
app: *App,
|
||||
shutdown: std.atomic.Value(bool) = .init(false),
|
||||
allocator: Allocator,
|
||||
listener: ?posix.socket_t,
|
||||
json_version_response: []const u8,
|
||||
|
||||
// Thread management
|
||||
@@ -48,103 +43,52 @@ clients: std.ArrayList(*Client) = .{},
|
||||
client_mutex: std.Thread.Mutex = .{},
|
||||
clients_pool: std.heap.MemoryPool(Client),
|
||||
|
||||
pub fn init(app: *App, address: net.Address) !Server {
|
||||
pub fn init(app: *App, address: net.Address) !*Server {
|
||||
const allocator = app.allocator;
|
||||
const json_version_response = try buildJSONVersionResponse(allocator, address);
|
||||
errdefer allocator.free(json_version_response);
|
||||
|
||||
return .{
|
||||
const self = try allocator.create(Server);
|
||||
errdefer allocator.destroy(self);
|
||||
|
||||
self.* = .{
|
||||
.app = app,
|
||||
.listener = null,
|
||||
.allocator = allocator,
|
||||
.json_version_response = json_version_response,
|
||||
.clients_pool = std.heap.MemoryPool(Client).init(app.allocator),
|
||||
.clients_pool = std.heap.MemoryPool(Client).init(allocator),
|
||||
};
|
||||
|
||||
try self.app.network.bind(address, self, onAccept);
|
||||
log.info(.app, "server running", .{ .address = address });
|
||||
|
||||
return self;
|
||||
}
|
||||
|
||||
/// Interrupts the server so that main can complete normally and call all defer handlers.
|
||||
pub fn stop(self: *Server) void {
|
||||
if (self.shutdown.swap(true, .release)) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Shutdown all active clients
|
||||
{
|
||||
pub fn shutdown(self: *Server) void {
|
||||
self.client_mutex.lock();
|
||||
defer self.client_mutex.unlock();
|
||||
|
||||
for (self.clients.items) |client| {
|
||||
client.stop();
|
||||
}
|
||||
}
|
||||
|
||||
// Linux and BSD/macOS handle canceling a socket blocked on accept differently.
|
||||
// For Linux, we use std.shutdown, which will cause accept to return error.SocketNotListening (EINVAL).
|
||||
// For BSD, shutdown will return an error. Instead we call posix.close, which will result with error.ConnectionAborted (BADF).
|
||||
if (self.listener) |listener| switch (builtin.target.os.tag) {
|
||||
.linux => posix.shutdown(listener, .recv) catch |err| {
|
||||
log.warn(.app, "listener shutdown", .{ .err = err });
|
||||
},
|
||||
.macos, .freebsd, .netbsd, .openbsd => {
|
||||
self.listener = null;
|
||||
posix.close(listener);
|
||||
},
|
||||
else => unreachable,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn deinit(self: *Server) void {
|
||||
if (!self.shutdown.load(.acquire)) {
|
||||
self.stop();
|
||||
}
|
||||
|
||||
self.shutdown();
|
||||
self.joinThreads();
|
||||
if (self.listener) |listener| {
|
||||
posix.close(listener);
|
||||
self.listener = null;
|
||||
}
|
||||
self.clients.deinit(self.allocator);
|
||||
self.clients_pool.deinit();
|
||||
self.allocator.free(self.json_version_response);
|
||||
self.allocator.destroy(self);
|
||||
}
|
||||
|
||||
pub fn run(self: *Server, address: net.Address, timeout_ms: u32) !void {
|
||||
const flags = posix.SOCK.STREAM | posix.SOCK.CLOEXEC | posix.SOCK.NONBLOCK;
|
||||
const listener = try posix.socket(address.any.family, flags, posix.IPPROTO.TCP);
|
||||
self.listener = listener;
|
||||
|
||||
try posix.setsockopt(listener, posix.SOL.SOCKET, posix.SO.REUSEADDR, &std.mem.toBytes(@as(c_int, 1)));
|
||||
if (@hasDecl(posix.TCP, "NODELAY")) {
|
||||
try posix.setsockopt(listener, posix.IPPROTO.TCP, posix.TCP.NODELAY, &std.mem.toBytes(@as(c_int, 1)));
|
||||
}
|
||||
|
||||
try posix.bind(listener, &address.any, address.getOsSockLen());
|
||||
try posix.listen(listener, self.app.config.maxPendingConnections());
|
||||
|
||||
log.info(.app, "server running", .{ .address = address });
|
||||
while (!self.shutdown.load(.acquire)) {
|
||||
const socket = posix.accept(listener, null, null, posix.SOCK.NONBLOCK) catch |err| {
|
||||
switch (err) {
|
||||
error.SocketNotListening, error.ConnectionAborted => {
|
||||
log.info(.app, "server stopped", .{});
|
||||
break;
|
||||
},
|
||||
error.WouldBlock => {
|
||||
std.Thread.sleep(10 * std.time.ns_per_ms);
|
||||
continue;
|
||||
},
|
||||
else => {
|
||||
log.err(.app, "CDP accept", .{ .err = err });
|
||||
std.Thread.sleep(std.time.ns_per_s);
|
||||
continue;
|
||||
},
|
||||
}
|
||||
};
|
||||
|
||||
fn onAccept(ctx: *anyopaque, socket: posix.socket_t) void {
|
||||
const self: *Server = @ptrCast(@alignCast(ctx));
|
||||
const timeout_ms: u32 = @intCast(self.app.config.cdpTimeout());
|
||||
self.spawnWorker(socket, timeout_ms) catch |err| {
|
||||
log.err(.app, "CDP spawn", .{ .err = err });
|
||||
posix.close(socket);
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
fn handleConnection(self: *Server, socket: posix.socket_t, timeout_ms: u32) void {
|
||||
@@ -173,10 +117,10 @@ fn handleConnection(self: *Server, socket: posix.socket_t, timeout_ms: u32) void
|
||||
self.registerClient(client);
|
||||
defer self.unregisterClient(client);
|
||||
|
||||
// Check shutdown after registering to avoid missing stop() signal.
|
||||
// If stop() already iterated over clients, this client won't receive stop()
|
||||
// Check shutdown after registering to avoid missing the stop signal.
|
||||
// If deinit() already iterated over clients, this client won't receive stop()
|
||||
// and would block joinThreads() indefinitely.
|
||||
if (self.shutdown.load(.acquire)) {
|
||||
if (self.app.shutdown()) {
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -213,7 +157,7 @@ fn unregisterClient(self: *Server, client: *Client) void {
|
||||
}
|
||||
|
||||
fn spawnWorker(self: *Server, socket: posix.socket_t, timeout_ms: u32) !void {
|
||||
if (self.shutdown.load(.acquire)) {
|
||||
if (self.app.shutdown()) {
|
||||
return error.ShuttingDown;
|
||||
}
|
||||
|
||||
@@ -283,7 +227,7 @@ pub const Client = struct {
|
||||
log.info(.app, "client connected", .{ .ip = client_address });
|
||||
}
|
||||
|
||||
const http = try app.http.createClient(allocator);
|
||||
const http = try HttpClient.init(allocator, &app.network);
|
||||
errdefer http.deinit();
|
||||
|
||||
return .{
|
||||
@@ -298,7 +242,10 @@ pub const Client = struct {
|
||||
fn stop(self: *Client) void {
|
||||
switch (self.mode) {
|
||||
.http => {},
|
||||
.cdp => |*cdp| cdp.browser.env.terminate(),
|
||||
.cdp => |*cdp| {
|
||||
cdp.browser.env.terminate();
|
||||
self.ws.sendClose();
|
||||
},
|
||||
}
|
||||
self.ws.shutdown();
|
||||
}
|
||||
@@ -351,7 +298,7 @@ pub const Client = struct {
|
||||
}
|
||||
|
||||
var cdp = &self.mode.cdp;
|
||||
var last_message = timestamp(.monotonic);
|
||||
var last_message = milliTimestamp(.monotonic);
|
||||
var ms_remaining = self.ws.timeout_ms;
|
||||
|
||||
while (true) {
|
||||
@@ -360,7 +307,7 @@ pub const Client = struct {
|
||||
if (self.readSocket() == false) {
|
||||
return;
|
||||
}
|
||||
last_message = timestamp(.monotonic);
|
||||
last_message = milliTimestamp(.monotonic);
|
||||
ms_remaining = self.ws.timeout_ms;
|
||||
},
|
||||
.no_page => {
|
||||
@@ -375,16 +322,18 @@ pub const Client = struct {
|
||||
if (self.readSocket() == false) {
|
||||
return;
|
||||
}
|
||||
last_message = timestamp(.monotonic);
|
||||
last_message = milliTimestamp(.monotonic);
|
||||
ms_remaining = self.ws.timeout_ms;
|
||||
},
|
||||
.done => {
|
||||
const elapsed = timestamp(.monotonic) - last_message;
|
||||
if (elapsed > ms_remaining) {
|
||||
const now = milliTimestamp(.monotonic);
|
||||
const elapsed = now - last_message;
|
||||
if (elapsed >= ms_remaining) {
|
||||
log.info(.app, "CDP timeout", .{});
|
||||
return;
|
||||
}
|
||||
ms_remaining -= @intCast(elapsed);
|
||||
last_message = now;
|
||||
},
|
||||
}
|
||||
}
|
||||
@@ -557,6 +506,7 @@ fn buildJSONVersionResponse(
|
||||
}
|
||||
|
||||
pub const timestamp = @import("datetime.zig").timestamp;
|
||||
pub const milliTimestamp = @import("datetime.zig").milliTimestamp;
|
||||
|
||||
const testing = std.testing;
|
||||
test "server: buildJSONVersionResponse" {
|
||||
|
||||
@@ -24,7 +24,7 @@ const ArenaAllocator = std.heap.ArenaAllocator;
|
||||
const js = @import("js/js.zig");
|
||||
const log = @import("../log.zig");
|
||||
const App = @import("../App.zig");
|
||||
const HttpClient = @import("../http/Client.zig");
|
||||
const HttpClient = @import("HttpClient.zig");
|
||||
|
||||
const ArenaPool = App.ArenaPool;
|
||||
|
||||
@@ -91,25 +91,32 @@ pub fn runMicrotasks(self: *Browser) void {
|
||||
self.env.runMicrotasks();
|
||||
}
|
||||
|
||||
pub fn runMacrotasks(self: *Browser) !?u64 {
|
||||
pub fn runMacrotasks(self: *Browser) !void {
|
||||
const env = &self.env;
|
||||
|
||||
const time_to_next = try self.env.runMacrotasks();
|
||||
try self.env.runMacrotasks();
|
||||
env.pumpMessageLoop();
|
||||
|
||||
// either of the above could have queued more microtasks
|
||||
env.runMicrotasks();
|
||||
|
||||
return time_to_next;
|
||||
}
|
||||
|
||||
pub fn hasBackgroundTasks(self: *Browser) bool {
|
||||
return self.env.hasBackgroundTasks();
|
||||
}
|
||||
|
||||
pub fn waitForBackgroundTasks(self: *Browser) void {
|
||||
self.env.waitForBackgroundTasks();
|
||||
}
|
||||
|
||||
pub fn msToNextMacrotask(self: *Browser) ?u64 {
|
||||
return self.env.msToNextMacrotask();
|
||||
}
|
||||
|
||||
pub fn msTo(self: *Browser) bool {
|
||||
return self.env.hasBackgroundTasks();
|
||||
}
|
||||
|
||||
pub fn runIdleTasks(self: *const Browser) void {
|
||||
self.env.runIdleTasks();
|
||||
}
|
||||
|
||||
@@ -205,7 +205,7 @@ pub fn dispatch(self: *EventManager, target: *EventTarget, event: *Event) Dispat
|
||||
|
||||
pub fn dispatchOpts(self: *EventManager, target: *EventTarget, event: *Event, comptime opts: DispatchOpts) DispatchError!void {
|
||||
event.acquireRef();
|
||||
defer event.deinit(false, self.page);
|
||||
defer event.deinit(false, self.page._session);
|
||||
|
||||
if (comptime IS_DEBUG) {
|
||||
log.debug(.event, "eventManager.dispatch", .{ .type = event._type_string.str(), .bubbles = event._bubbles });
|
||||
@@ -233,8 +233,14 @@ const DispatchDirectOptions = struct {
|
||||
pub fn dispatchDirect(self: *EventManager, target: *EventTarget, event: *Event, handler: anytype, comptime opts: DispatchDirectOptions) !void {
|
||||
const page = self.page;
|
||||
|
||||
// Set window.event to the currently dispatching event (WHATWG spec)
|
||||
const window = page.window;
|
||||
const prev_event = window._current_event;
|
||||
window._current_event = event;
|
||||
defer window._current_event = prev_event;
|
||||
|
||||
event.acquireRef();
|
||||
defer event.deinit(false, page);
|
||||
defer event.deinit(false, page._session);
|
||||
|
||||
if (comptime IS_DEBUG) {
|
||||
log.debug(.event, "dispatchDirect", .{ .type = event._type_string, .context = opts.context });
|
||||
@@ -365,6 +371,29 @@ fn getFunction(handler: anytype, local: *const js.Local) ?js.Function {
|
||||
};
|
||||
}
|
||||
|
||||
/// Check if there are any listeners for a direct dispatch (non-DOM target).
|
||||
/// Use this to avoid creating an event when there are no listeners.
|
||||
pub fn hasDirectListeners(self: *EventManager, target: *EventTarget, typ: []const u8, handler: anytype) bool {
|
||||
if (hasHandler(handler)) {
|
||||
return true;
|
||||
}
|
||||
return self.lookup.get(.{
|
||||
.event_target = @intFromPtr(target),
|
||||
.type_string = .wrap(typ),
|
||||
}) != null;
|
||||
}
|
||||
|
||||
fn hasHandler(handler: anytype) bool {
|
||||
const ti = @typeInfo(@TypeOf(handler));
|
||||
if (ti == .null) {
|
||||
return false;
|
||||
}
|
||||
if (ti == .optional) {
|
||||
return handler != null;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
fn dispatchNode(self: *EventManager, target: *Node, event: *Event, comptime opts: DispatchOpts) !void {
|
||||
const ShadowRoot = @import("webapi/ShadowRoot.zig");
|
||||
|
||||
@@ -375,6 +404,13 @@ fn dispatchNode(self: *EventManager, target: *Node, event: *Event, comptime opts
|
||||
}
|
||||
|
||||
const page = self.page;
|
||||
|
||||
// Set window.event to the currently dispatching event (WHATWG spec)
|
||||
const window = page.window;
|
||||
const prev_event = window._current_event;
|
||||
window._current_event = event;
|
||||
defer window._current_event = prev_event;
|
||||
|
||||
var was_handled = false;
|
||||
|
||||
// Create a single scope for all event handlers in this dispatch.
|
||||
|
||||
@@ -48,13 +48,11 @@ const Factory = @This();
|
||||
_arena: Allocator,
|
||||
_slab: SlabAllocator,
|
||||
|
||||
pub fn init(arena: Allocator) !*Factory {
|
||||
const self = try arena.create(Factory);
|
||||
self.* = .{
|
||||
pub fn init(arena: Allocator) Factory {
|
||||
return .{
|
||||
._arena = arena,
|
||||
._slab = SlabAllocator.init(arena, 128),
|
||||
};
|
||||
return self;
|
||||
}
|
||||
|
||||
// this is a root object
|
||||
@@ -249,16 +247,15 @@ fn eventInit(arena: Allocator, typ: String, value: anytype) !Event {
|
||||
};
|
||||
}
|
||||
|
||||
pub fn blob(self: *Factory, child: anytype) !*@TypeOf(child) {
|
||||
const allocator = self._slab.allocator();
|
||||
|
||||
pub fn blob(_: *const Factory, arena: Allocator, child: anytype) !*@TypeOf(child) {
|
||||
// Special case: Blob has slice and mime fields, so we need manual setup
|
||||
const chain = try PrototypeChain(
|
||||
&.{ Blob, @TypeOf(child) },
|
||||
).allocate(allocator);
|
||||
).allocate(arena);
|
||||
|
||||
const blob_ptr = chain.get(0);
|
||||
blob_ptr.* = .{
|
||||
._arena = arena,
|
||||
._type = unionInit(Blob.Type, chain.get(1)),
|
||||
._slice = "",
|
||||
._mime = "",
|
||||
@@ -268,19 +265,23 @@ pub fn blob(self: *Factory, child: anytype) !*@TypeOf(child) {
|
||||
return chain.get(1);
|
||||
}
|
||||
|
||||
pub fn abstractRange(self: *Factory, child: anytype, page: *Page) !*@TypeOf(child) {
|
||||
const allocator = self._slab.allocator();
|
||||
const chain = try PrototypeChain(&.{ AbstractRange, @TypeOf(child) }).allocate(allocator);
|
||||
pub fn abstractRange(_: *const Factory, arena: Allocator, child: anytype, page: *Page) !*@TypeOf(child) {
|
||||
const chain = try PrototypeChain(&.{ AbstractRange, @TypeOf(child) }).allocate(arena);
|
||||
|
||||
const doc = page.document.asNode();
|
||||
chain.set(0, AbstractRange{
|
||||
const abstract_range = chain.get(0);
|
||||
abstract_range.* = AbstractRange{
|
||||
._rc = 0,
|
||||
._arena = arena,
|
||||
._page_id = page.id,
|
||||
._type = unionInit(AbstractRange.Type, chain.get(1)),
|
||||
._end_offset = 0,
|
||||
._start_offset = 0,
|
||||
._end_container = doc,
|
||||
._start_container = doc,
|
||||
});
|
||||
};
|
||||
chain.setLeaf(1, child);
|
||||
page._live_ranges.append(&abstract_range._range_link);
|
||||
return chain.get(1);
|
||||
}
|
||||
|
||||
|
||||
@@ -17,30 +17,34 @@
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
const std = @import("std");
|
||||
const lp = @import("lightpanda");
|
||||
|
||||
const log = @import("../log.zig");
|
||||
const builtin = @import("builtin");
|
||||
const posix = std.posix;
|
||||
|
||||
const Net = @import("../Net.zig");
|
||||
const lp = @import("lightpanda");
|
||||
const log = @import("../log.zig");
|
||||
const Net = @import("../network/http.zig");
|
||||
const Network = @import("../network/Runtime.zig");
|
||||
const Config = @import("../Config.zig");
|
||||
const URL = @import("../browser/URL.zig");
|
||||
const Notification = @import("../Notification.zig");
|
||||
const CookieJar = @import("../browser/webapi/storage/Cookie.zig").Jar;
|
||||
const Robots = @import("../browser/Robots.zig");
|
||||
const Robots = @import("../network/Robots.zig");
|
||||
const RobotStore = Robots.RobotStore;
|
||||
const WebBotAuth = @import("../browser/WebBotAuth.zig");
|
||||
const WebBotAuth = @import("../network/WebBotAuth.zig");
|
||||
|
||||
const posix = std.posix;
|
||||
const Cache = @import("../network/cache/Cache.zig");
|
||||
const CacheMetadata = Cache.CachedMetadata;
|
||||
const CachedResponse = Cache.CachedResponse;
|
||||
|
||||
const Allocator = std.mem.Allocator;
|
||||
const ArenaAllocator = std.heap.ArenaAllocator;
|
||||
|
||||
const IS_DEBUG = builtin.mode == .Debug;
|
||||
|
||||
const Method = Net.Method;
|
||||
const ResponseHead = Net.ResponseHead;
|
||||
const HeaderIterator = Net.HeaderIterator;
|
||||
pub const Method = Net.Method;
|
||||
pub const Headers = Net.Headers;
|
||||
pub const ResponseHead = Net.ResponseHead;
|
||||
pub const HeaderIterator = Net.HeaderIterator;
|
||||
|
||||
// This is loosely tied to a browser Page. Loading all the <scripts>, doing
|
||||
// XHR requests, and loading imports all happens through here. Sine the app
|
||||
@@ -67,9 +71,18 @@ active: usize,
|
||||
// 'networkAlmostIdle' Page.lifecycleEvent in CDP).
|
||||
intercepted: usize,
|
||||
|
||||
// Our easy handles, managed by a curl multi.
|
||||
// Our curl multi handle.
|
||||
handles: Net.Handles,
|
||||
|
||||
// Connections currently in this client's curl_multi.
|
||||
in_use: std.DoublyLinkedList = .{},
|
||||
|
||||
// Connections that failed to be removed from curl_multi during perform.
|
||||
dirty: std.DoublyLinkedList = .{},
|
||||
|
||||
// Whether we're currently inside a curl_multi_perform call.
|
||||
performing: bool = false,
|
||||
|
||||
// Use to generate the next request ID
|
||||
next_request_id: u32 = 0,
|
||||
|
||||
@@ -79,22 +92,18 @@ queue: TransferQueue,
|
||||
// The main app allocator
|
||||
allocator: Allocator,
|
||||
|
||||
// Reference to the App-owned Robot Store.
|
||||
robot_store: *RobotStore,
|
||||
network: *Network,
|
||||
// Queue of requests that depend on a robots.txt.
|
||||
// Allows us to fetch the robots.txt just once.
|
||||
pending_robots_queue: std.StringHashMapUnmanaged(std.ArrayList(Request)) = .empty,
|
||||
|
||||
// Reference to the App-owned WebBotAuth.
|
||||
web_bot_auth: *const ?WebBotAuth,
|
||||
|
||||
// Once we have a handle/easy to process a request with, we create a Transfer
|
||||
// which contains the Request as well as any state we need to process the
|
||||
// request. These wil come and go with each request.
|
||||
transfer_pool: std.heap.MemoryPool(Transfer),
|
||||
|
||||
// only needed for CDP which can change the proxy and then restore it. When
|
||||
// restoring, this originally-configured value is what it goes to.
|
||||
// The current proxy. CDP can change it, restoreOriginalProxy restores
|
||||
// from config.
|
||||
http_proxy: ?[:0]const u8 = null,
|
||||
|
||||
// track if the client use a proxy for connections.
|
||||
@@ -102,7 +111,10 @@ http_proxy: ?[:0]const u8 = null,
|
||||
// CDP.
|
||||
use_proxy: bool,
|
||||
|
||||
config: *const Config,
|
||||
// Current TLS verification state, applied per-connection in makeRequest.
|
||||
tls_verify: bool = true,
|
||||
|
||||
obey_robots: bool,
|
||||
|
||||
cdp_client: ?CDPClient = null,
|
||||
|
||||
@@ -126,28 +138,17 @@ pub const CDPClient = struct {
|
||||
|
||||
const TransferQueue = std.DoublyLinkedList;
|
||||
|
||||
pub fn init(
|
||||
allocator: Allocator,
|
||||
ca_blob: ?Net.Blob,
|
||||
robot_store: *RobotStore,
|
||||
web_bot_auth: *const ?WebBotAuth,
|
||||
config: *const Config,
|
||||
) !*Client {
|
||||
pub fn init(allocator: Allocator, network: *Network) !*Client {
|
||||
var transfer_pool = std.heap.MemoryPool(Transfer).init(allocator);
|
||||
errdefer transfer_pool.deinit();
|
||||
|
||||
const client = try allocator.create(Client);
|
||||
errdefer allocator.destroy(client);
|
||||
|
||||
var handles = try Net.Handles.init(allocator, ca_blob, config);
|
||||
errdefer handles.deinit(allocator);
|
||||
var handles = try Net.Handles.init(network.config);
|
||||
errdefer handles.deinit();
|
||||
|
||||
// Set transfer callbacks on each connection.
|
||||
for (handles.connections) |*conn| {
|
||||
try conn.setCallbacks(Transfer.headerCallback, Transfer.dataCallback);
|
||||
}
|
||||
|
||||
const http_proxy = config.httpProxy();
|
||||
const http_proxy = network.config.httpProxy();
|
||||
|
||||
client.* = .{
|
||||
.queue = .{},
|
||||
@@ -155,11 +156,11 @@ pub fn init(
|
||||
.intercepted = 0,
|
||||
.handles = handles,
|
||||
.allocator = allocator,
|
||||
.robot_store = robot_store,
|
||||
.web_bot_auth = web_bot_auth,
|
||||
.network = network,
|
||||
.http_proxy = http_proxy,
|
||||
.use_proxy = http_proxy != null,
|
||||
.config = config,
|
||||
.tls_verify = network.config.tlsVerifyHost(),
|
||||
.obey_robots = network.config.obeyRobots(),
|
||||
.transfer_pool = transfer_pool,
|
||||
};
|
||||
|
||||
@@ -168,7 +169,7 @@ pub fn init(
|
||||
|
||||
pub fn deinit(self: *Client) void {
|
||||
self.abort();
|
||||
self.handles.deinit(self.allocator);
|
||||
self.handles.deinit();
|
||||
|
||||
self.transfer_pool.deinit();
|
||||
|
||||
@@ -182,7 +183,7 @@ pub fn deinit(self: *Client) void {
|
||||
}
|
||||
|
||||
pub fn newHeaders(self: *const Client) !Net.Headers {
|
||||
return Net.Headers.init(self.config.http_headers.user_agent_header);
|
||||
return Net.Headers.init(self.network.config.http_headers.user_agent_header);
|
||||
}
|
||||
|
||||
pub fn abort(self: *Client) void {
|
||||
@@ -197,14 +198,14 @@ pub fn abortFrame(self: *Client, frame_id: u32) void {
|
||||
// but abort can avoid the frame_id check at comptime.
|
||||
fn _abort(self: *Client, comptime abort_all: bool, frame_id: u32) void {
|
||||
{
|
||||
var q = &self.handles.in_use;
|
||||
var q = &self.in_use;
|
||||
var n = q.first;
|
||||
while (n) |node| {
|
||||
n = node.next;
|
||||
const conn: *Net.Connection = @fieldParentPtr("node", node);
|
||||
var transfer = Transfer.fromConnection(conn) catch |err| {
|
||||
// Let's cleanup what we can
|
||||
self.handles.remove(conn);
|
||||
self.removeConn(conn);
|
||||
log.err(.http, "get private info", .{ .err = err, .source = "abort" });
|
||||
continue;
|
||||
};
|
||||
@@ -241,8 +242,7 @@ fn _abort(self: *Client, comptime abort_all: bool, frame_id: u32) void {
|
||||
}
|
||||
|
||||
if (comptime IS_DEBUG and abort_all) {
|
||||
std.debug.assert(self.handles.in_use.first == null);
|
||||
std.debug.assert(self.handles.available.len() == self.handles.connections.len);
|
||||
std.debug.assert(self.in_use.first == null);
|
||||
|
||||
const running = self.handles.perform() catch |err| {
|
||||
lp.assert(false, "multi perform in abort", .{ .err = err });
|
||||
@@ -252,27 +252,27 @@ fn _abort(self: *Client, comptime abort_all: bool, frame_id: u32) void {
|
||||
}
|
||||
|
||||
pub fn tick(self: *Client, timeout_ms: u32) !PerformStatus {
|
||||
while (true) {
|
||||
if (self.handles.hasAvailable() == false) {
|
||||
while (self.queue.popFirst()) |queue_node| {
|
||||
const conn = self.network.getConnection() orelse {
|
||||
self.queue.prepend(queue_node);
|
||||
break;
|
||||
}
|
||||
const queue_node = self.queue.popFirst() orelse break;
|
||||
};
|
||||
const transfer: *Transfer = @fieldParentPtr("_node", queue_node);
|
||||
|
||||
// we know this exists, because we checked hasAvailable() above
|
||||
const conn = self.handles.get().?;
|
||||
try self.makeRequest(conn, transfer);
|
||||
}
|
||||
return self.perform(@intCast(timeout_ms));
|
||||
}
|
||||
|
||||
pub fn request(self: *Client, req: Request) !void {
|
||||
if (self.config.obeyRobots()) {
|
||||
if (self.obey_robots == false) {
|
||||
return self.processRequest(req);
|
||||
}
|
||||
|
||||
const robots_url = try URL.getRobotsUrl(self.allocator, req.url);
|
||||
errdefer self.allocator.free(robots_url);
|
||||
|
||||
// If we have this robots cached, we can take a fast path.
|
||||
if (self.robot_store.get(robots_url)) |robot_entry| {
|
||||
if (self.network.robot_store.get(robots_url)) |robot_entry| {
|
||||
defer self.allocator.free(robots_url);
|
||||
|
||||
switch (robot_entry) {
|
||||
@@ -290,14 +290,72 @@ pub fn request(self: *Client, req: Request) !void {
|
||||
|
||||
return self.processRequest(req);
|
||||
}
|
||||
|
||||
return self.fetchRobotsThenProcessRequest(robots_url, req);
|
||||
}
|
||||
|
||||
fn serveFromCache(req: Request, cached: *const CachedResponse) !void {
|
||||
const response = Response.fromCached(req.ctx, cached);
|
||||
|
||||
if (req.start_callback) |cb| {
|
||||
try cb(response);
|
||||
}
|
||||
|
||||
return self.processRequest(req);
|
||||
const proceed = try req.header_callback(response);
|
||||
if (!proceed) {
|
||||
switch (cached.data) {
|
||||
.buffer => |_| {},
|
||||
.file => |file| file.close(),
|
||||
}
|
||||
req.error_callback(req.ctx, error.Abort);
|
||||
return;
|
||||
}
|
||||
|
||||
switch (cached.data) {
|
||||
.buffer => |data| {
|
||||
if (data.len > 0) {
|
||||
try req.data_callback(response, data);
|
||||
}
|
||||
},
|
||||
.file => |file| {
|
||||
defer file.close();
|
||||
var buf: [1024]u8 = undefined;
|
||||
var file_reader = file.reader(&buf);
|
||||
|
||||
const reader = &file_reader.interface;
|
||||
var read_buf: [1024]u8 = undefined;
|
||||
|
||||
while (true) {
|
||||
const curr = try reader.readSliceShort(&read_buf);
|
||||
if (curr == 0) break;
|
||||
try req.data_callback(response, read_buf[0..curr]);
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
try req.done_callback(req.ctx);
|
||||
}
|
||||
|
||||
fn processRequest(self: *Client, req: Request) !void {
|
||||
if (self.network.cache) |*cache| {
|
||||
if (req.method == .GET) {
|
||||
const arena = try self.network.app.arena_pool.acquire();
|
||||
defer self.network.app.arena_pool.release(arena);
|
||||
|
||||
if (cache.get(arena, .{ .url = req.url, .timestamp = std.time.timestamp() })) |cached| {
|
||||
log.debug(.browser, "http.cache.get", .{
|
||||
.url = req.url,
|
||||
.found = true,
|
||||
.metadata = cached.metadata,
|
||||
});
|
||||
|
||||
defer req.headers.deinit();
|
||||
return serveFromCache(req, &cached);
|
||||
} else {
|
||||
log.debug(.browser, "http.cache.get", .{ .url = req.url, .found = false });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const transfer = try self.makeTransfer(req);
|
||||
|
||||
transfer.req.notification.dispatch(.http_request_start, &.{ .transfer = transfer });
|
||||
@@ -384,8 +442,10 @@ fn fetchRobotsThenProcessRequest(self: *Client, robots_url: [:0]const u8, req: R
|
||||
try entry.value_ptr.append(self.allocator, req);
|
||||
}
|
||||
|
||||
fn robotsHeaderCallback(transfer: *Transfer) !bool {
|
||||
const ctx: *RobotsRequestContext = @ptrCast(@alignCast(transfer.ctx));
|
||||
fn robotsHeaderCallback(response: Response) !bool {
|
||||
const ctx: *RobotsRequestContext = @ptrCast(@alignCast(response.ctx));
|
||||
// Robots callbacks only happen on real live requests.
|
||||
const transfer = response.inner.live;
|
||||
|
||||
if (transfer.response_header) |hdr| {
|
||||
log.debug(.browser, "robots status", .{ .status = hdr.status, .robots_url = ctx.robots_url });
|
||||
@@ -399,8 +459,8 @@ fn robotsHeaderCallback(transfer: *Transfer) !bool {
|
||||
return true;
|
||||
}
|
||||
|
||||
fn robotsDataCallback(transfer: *Transfer, data: []const u8) !void {
|
||||
const ctx: *RobotsRequestContext = @ptrCast(@alignCast(transfer.ctx));
|
||||
fn robotsDataCallback(response: Response, data: []const u8) !void {
|
||||
const ctx: *RobotsRequestContext = @ptrCast(@alignCast(response.ctx));
|
||||
try ctx.buffer.appendSlice(ctx.client.allocator, data);
|
||||
}
|
||||
|
||||
@@ -413,18 +473,18 @@ fn robotsDoneCallback(ctx_ptr: *anyopaque) !void {
|
||||
switch (ctx.status) {
|
||||
200 => {
|
||||
if (ctx.buffer.items.len > 0) {
|
||||
const robots: ?Robots = ctx.client.robot_store.robotsFromBytes(
|
||||
ctx.client.config.http_headers.user_agent,
|
||||
const robots: ?Robots = ctx.client.network.robot_store.robotsFromBytes(
|
||||
ctx.client.network.config.http_headers.user_agent,
|
||||
ctx.buffer.items,
|
||||
) catch blk: {
|
||||
log.warn(.browser, "failed to parse robots", .{ .robots_url = ctx.robots_url });
|
||||
// If we fail to parse, we just insert it as absent and ignore.
|
||||
try ctx.client.robot_store.putAbsent(ctx.robots_url);
|
||||
try ctx.client.network.robot_store.putAbsent(ctx.robots_url);
|
||||
break :blk null;
|
||||
};
|
||||
|
||||
if (robots) |r| {
|
||||
try ctx.client.robot_store.put(ctx.robots_url, r);
|
||||
try ctx.client.network.robot_store.put(ctx.robots_url, r);
|
||||
const path = URL.getPathname(ctx.req.url);
|
||||
allowed = r.isAllowed(path);
|
||||
}
|
||||
@@ -433,12 +493,12 @@ fn robotsDoneCallback(ctx_ptr: *anyopaque) !void {
|
||||
404 => {
|
||||
log.debug(.http, "robots not found", .{ .url = ctx.robots_url });
|
||||
// If we get a 404, we just insert it as absent.
|
||||
try ctx.client.robot_store.putAbsent(ctx.robots_url);
|
||||
try ctx.client.network.robot_store.putAbsent(ctx.robots_url);
|
||||
},
|
||||
else => {
|
||||
log.debug(.http, "unexpected status on robots", .{ .url = ctx.robots_url, .status = ctx.status });
|
||||
// If we get an unexpected status, we just insert as absent.
|
||||
try ctx.client.robot_store.putAbsent(ctx.robots_url);
|
||||
try ctx.client.network.robot_store.putAbsent(ctx.robots_url);
|
||||
},
|
||||
}
|
||||
|
||||
@@ -544,8 +604,8 @@ fn waitForInterceptedResponse(self: *Client, transfer: *Transfer) !bool {
|
||||
fn process(self: *Client, transfer: *Transfer) !void {
|
||||
// libcurl doesn't allow recursive calls, if we're in a `perform()` operation
|
||||
// then we _have_ to queue this.
|
||||
if (self.handles.performing == false) {
|
||||
if (self.handles.get()) |conn| {
|
||||
if (self.performing == false) {
|
||||
if (self.network.getConnection()) |conn| {
|
||||
return self.makeRequest(conn, transfer);
|
||||
}
|
||||
}
|
||||
@@ -619,9 +679,8 @@ fn makeTransfer(self: *Client, req: Request) !*Transfer {
|
||||
.id = id,
|
||||
.url = req.url,
|
||||
.req = req,
|
||||
.ctx = req.ctx,
|
||||
.client = self,
|
||||
.max_response_size = self.config.httpMaxResponseSize(),
|
||||
.max_response_size = self.network.config.httpMaxResponseSize(),
|
||||
};
|
||||
return transfer;
|
||||
}
|
||||
@@ -642,9 +701,9 @@ fn requestFailed(transfer: *Transfer, err: anyerror, comptime execute_callback:
|
||||
});
|
||||
|
||||
if (execute_callback) {
|
||||
transfer.req.error_callback(transfer.ctx, err);
|
||||
transfer.req.error_callback(transfer.req.ctx, err);
|
||||
} else if (transfer.req.shutdown_callback) |cb| {
|
||||
cb(transfer.ctx);
|
||||
cb(transfer.req.ctx);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -659,10 +718,7 @@ fn requestFailed(transfer: *Transfer, err: anyerror, comptime execute_callback:
|
||||
// can be changed at any point in the easy's lifecycle.
|
||||
pub fn changeProxy(self: *Client, proxy: [:0]const u8) !void {
|
||||
try self.ensureNoActiveConnection();
|
||||
|
||||
for (self.handles.connections) |*conn| {
|
||||
try conn.setProxy(proxy.ptr);
|
||||
}
|
||||
self.http_proxy = proxy;
|
||||
self.use_proxy = true;
|
||||
}
|
||||
|
||||
@@ -671,31 +727,21 @@ pub fn changeProxy(self: *Client, proxy: [:0]const u8) !void {
|
||||
pub fn restoreOriginalProxy(self: *Client) !void {
|
||||
try self.ensureNoActiveConnection();
|
||||
|
||||
const proxy = if (self.http_proxy) |p| p.ptr else null;
|
||||
for (self.handles.connections) |*conn| {
|
||||
try conn.setProxy(proxy);
|
||||
}
|
||||
self.use_proxy = proxy != null;
|
||||
self.http_proxy = self.network.config.httpProxy();
|
||||
self.use_proxy = self.http_proxy != null;
|
||||
}
|
||||
|
||||
// Enable TLS verification on all connections.
|
||||
pub fn enableTlsVerify(self: *Client) !void {
|
||||
pub fn setTlsVerify(self: *Client, verify: bool) !void {
|
||||
// Remove inflight connections check on enable TLS b/c chromiumoxide calls
|
||||
// the command during navigate and Curl seems to accept it...
|
||||
|
||||
for (self.handles.connections) |*conn| {
|
||||
try conn.setTlsVerify(true, self.use_proxy);
|
||||
}
|
||||
}
|
||||
|
||||
// Disable TLS verification on all connections.
|
||||
pub fn disableTlsVerify(self: *Client) !void {
|
||||
// Remove inflight connections check on disable TLS b/c chromiumoxide calls
|
||||
// the command during navigate and Curl seems to accept it...
|
||||
|
||||
for (self.handles.connections) |*conn| {
|
||||
try conn.setTlsVerify(false, self.use_proxy);
|
||||
var it = self.in_use.first;
|
||||
while (it) |node| : (it = node.next) {
|
||||
const conn: *Net.Connection = @fieldParentPtr("node", node);
|
||||
try conn.setTlsVerify(verify, self.use_proxy);
|
||||
}
|
||||
self.tls_verify = verify;
|
||||
}
|
||||
|
||||
fn makeRequest(self: *Client, conn: *Net.Connection, transfer: *Transfer) anyerror!void {
|
||||
@@ -706,9 +752,14 @@ fn makeRequest(self: *Client, conn: *Net.Connection, transfer: *Transfer) anyerr
|
||||
errdefer {
|
||||
transfer._conn = null;
|
||||
transfer.deinit();
|
||||
self.handles.isAvailable(conn);
|
||||
self.releaseConn(conn);
|
||||
}
|
||||
|
||||
// Set callbacks and per-client settings on the pooled connection.
|
||||
try conn.setCallbacks(Transfer.headerCallback, Transfer.dataCallback);
|
||||
try conn.setProxy(self.http_proxy);
|
||||
try conn.setTlsVerify(self.tls_verify, self.use_proxy);
|
||||
|
||||
try conn.setURL(req.url);
|
||||
try conn.setMethod(req.method);
|
||||
if (req.body) |b| {
|
||||
@@ -718,11 +769,11 @@ fn makeRequest(self: *Client, conn: *Net.Connection, transfer: *Transfer) anyerr
|
||||
}
|
||||
|
||||
var header_list = req.headers;
|
||||
try conn.secretHeaders(&header_list, &self.config.http_headers); // Add headers that must be hidden from intercepts
|
||||
try conn.secretHeaders(&header_list, &self.network.config.http_headers); // Add headers that must be hidden from intercepts
|
||||
try conn.setHeaders(&header_list);
|
||||
|
||||
// If we have WebBotAuth, sign our request.
|
||||
if (self.web_bot_auth.*) |wba| {
|
||||
if (self.network.web_bot_auth) |*wba| {
|
||||
const authority = URL.getHost(req.url);
|
||||
try wba.signRequest(transfer.arena.allocator(), &header_list, authority);
|
||||
}
|
||||
@@ -736,7 +787,11 @@ fn makeRequest(self: *Client, conn: *Net.Connection, transfer: *Transfer) anyerr
|
||||
|
||||
// add credentials
|
||||
if (req.credentials) |creds| {
|
||||
if (transfer._auth_challenge != null and transfer._auth_challenge.?.source == .proxy) {
|
||||
try conn.setProxyCredentials(creds);
|
||||
} else {
|
||||
try conn.setCredentials(creds);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -745,15 +800,17 @@ fn makeRequest(self: *Client, conn: *Net.Connection, transfer: *Transfer) anyerr
|
||||
// fails BEFORE `curl_multi_add_handle` succeeds, the we still need to do
|
||||
// cleanup. But if things fail after `curl_multi_add_handle`, we expect
|
||||
// perfom to pickup the failure and cleanup.
|
||||
self.in_use.append(&conn.node);
|
||||
self.handles.add(conn) catch |err| {
|
||||
transfer._conn = null;
|
||||
transfer.deinit();
|
||||
self.handles.isAvailable(conn);
|
||||
self.in_use.remove(&conn.node);
|
||||
self.releaseConn(conn);
|
||||
return err;
|
||||
};
|
||||
|
||||
if (req.start_callback) |cb| {
|
||||
cb(transfer) catch |err| {
|
||||
cb(Response.fromLive(transfer)) catch |err| {
|
||||
transfer.deinit();
|
||||
return err;
|
||||
};
|
||||
@@ -769,7 +826,22 @@ pub const PerformStatus = enum {
|
||||
};
|
||||
|
||||
fn perform(self: *Client, timeout_ms: c_int) !PerformStatus {
|
||||
const running = try self.handles.perform();
|
||||
const running = blk: {
|
||||
self.performing = true;
|
||||
defer self.performing = false;
|
||||
|
||||
break :blk try self.handles.perform();
|
||||
};
|
||||
|
||||
// Process dirty connections — return them to Runtime pool.
|
||||
while (self.dirty.popFirst()) |node| {
|
||||
const conn: *Net.Connection = @fieldParentPtr("node", node);
|
||||
self.handles.remove(conn) catch |err| {
|
||||
log.fatal(.http, "multi remove handle", .{ .err = err, .src = "perform" });
|
||||
@panic("multi_remove_handle");
|
||||
};
|
||||
self.releaseConn(conn);
|
||||
}
|
||||
|
||||
// We're potentially going to block for a while until we get data. Process
|
||||
// whatever messages we have waiting ahead of time.
|
||||
@@ -870,29 +942,57 @@ fn processMessages(self: *Client) !bool {
|
||||
break :blk;
|
||||
}
|
||||
}
|
||||
transfer.req.done_callback(transfer.ctx) catch |err| {
|
||||
transfer.req.done_callback(transfer.req.ctx) catch |err| {
|
||||
// transfer isn't valid at this point, don't use it.
|
||||
log.err(.http, "done_callback", .{ .err = err });
|
||||
requestFailed(transfer, err, true);
|
||||
continue;
|
||||
};
|
||||
|
||||
if (transfer.pending_cache_metadata) |metadata| {
|
||||
const cache = &self.network.cache.?;
|
||||
|
||||
// TODO: Support Vary Keying
|
||||
const cache_key = transfer.req.url;
|
||||
|
||||
log.debug(.browser, "http cache", .{ .key = cache_key, .metadata = metadata });
|
||||
cache.put(metadata, transfer.pending_cache_body.items) catch |err| {
|
||||
log.warn(.http, "cache put failed", .{ .err = err });
|
||||
};
|
||||
log.debug(.browser, "http.cache.put", .{ .url = transfer.req.url });
|
||||
}
|
||||
}
|
||||
|
||||
transfer.req.notification.dispatch(.http_request_done, &.{
|
||||
.transfer = transfer,
|
||||
});
|
||||
processed = true;
|
||||
}
|
||||
}
|
||||
return processed;
|
||||
}
|
||||
|
||||
fn endTransfer(self: *Client, transfer: *Transfer) void {
|
||||
const conn = transfer._conn.?;
|
||||
self.handles.remove(conn);
|
||||
self.removeConn(conn);
|
||||
transfer._conn = null;
|
||||
self.active -= 1;
|
||||
}
|
||||
|
||||
fn removeConn(self: *Client, conn: *Net.Connection) void {
|
||||
self.in_use.remove(&conn.node);
|
||||
if (self.handles.remove(conn)) {
|
||||
self.releaseConn(conn);
|
||||
} else |_| {
|
||||
// Can happen if we're in a perform() call, so we'll queue this
|
||||
// for cleanup later.
|
||||
self.dirty.append(&conn.node);
|
||||
}
|
||||
}
|
||||
|
||||
fn releaseConn(self: *Client, conn: *Net.Connection) void {
|
||||
self.network.releaseConnection(conn);
|
||||
}
|
||||
|
||||
fn ensureNoActiveConnection(self: *const Client) !void {
|
||||
if (self.active > 0) {
|
||||
return error.InflightConnection;
|
||||
@@ -915,7 +1015,7 @@ pub const RequestCookie = struct {
|
||||
|
||||
if (arr.items.len > 0) {
|
||||
try arr.append(temp, 0); //null terminate
|
||||
headers.cookies = @ptrCast(arr.items.ptr);
|
||||
headers.cookies = @as([*c]const u8, @ptrCast(arr.items.ptr));
|
||||
}
|
||||
}
|
||||
};
|
||||
@@ -942,9 +1042,9 @@ pub const Request = struct {
|
||||
// arbitrary data that can be associated with this request
|
||||
ctx: *anyopaque = undefined,
|
||||
|
||||
start_callback: ?*const fn (transfer: *Transfer) anyerror!void = null,
|
||||
header_callback: *const fn (transfer: *Transfer) anyerror!bool,
|
||||
data_callback: *const fn (transfer: *Transfer, data: []const u8) anyerror!void,
|
||||
start_callback: ?*const fn (response: Response) anyerror!void = null,
|
||||
header_callback: *const fn (response: Response) anyerror!bool,
|
||||
data_callback: *const fn (response: Response, data: []const u8) anyerror!void,
|
||||
done_callback: *const fn (ctx: *anyopaque) anyerror!void,
|
||||
error_callback: *const fn (ctx: *anyopaque, err: anyerror) void,
|
||||
shutdown_callback: ?*const fn (ctx: *anyopaque) void = null,
|
||||
@@ -972,16 +1072,92 @@ pub const Request = struct {
|
||||
|
||||
const AuthChallenge = Net.AuthChallenge;
|
||||
|
||||
pub const Response = struct {
|
||||
ctx: *anyopaque,
|
||||
inner: union(enum) {
|
||||
live: *Transfer,
|
||||
cached: *const CachedResponse,
|
||||
},
|
||||
|
||||
pub fn fromLive(transfer: *Transfer) Response {
|
||||
return .{ .ctx = transfer.req.ctx, .inner = .{ .live = transfer } };
|
||||
}
|
||||
|
||||
pub fn fromCached(ctx: *anyopaque, resp: *const CachedResponse) Response {
|
||||
return .{ .ctx = ctx, .inner = .{ .cached = resp } };
|
||||
}
|
||||
|
||||
pub fn status(self: Response) ?u16 {
|
||||
return switch (self.inner) {
|
||||
.live => |live| if (live.response_header) |rh| rh.status else null,
|
||||
.cached => |c| c.metadata.status,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn contentType(self: Response) ?[]const u8 {
|
||||
return switch (self.inner) {
|
||||
.live => |live| if (live.response_header) |*rh| rh.contentType() else null,
|
||||
.cached => |c| c.metadata.content_type,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn contentLength(self: Response) ?u32 {
|
||||
return switch (self.inner) {
|
||||
.live => |live| live.getContentLength(),
|
||||
.cached => |c| switch (c.data) {
|
||||
.buffer => |buf| @intCast(buf.len),
|
||||
.file => |f| @intCast(f.getEndPos() catch 0),
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
pub fn redirectCount(self: Response) ?u32 {
|
||||
return switch (self.inner) {
|
||||
.live => |live| if (live.response_header) |rh| rh.redirect_count else null,
|
||||
.cached => 0,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn url(self: Response) [:0]const u8 {
|
||||
return switch (self.inner) {
|
||||
.live => |live| live.url,
|
||||
.cached => |c| c.metadata.url,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn headerIterator(self: Response) HeaderIterator {
|
||||
return switch (self.inner) {
|
||||
.live => |live| live.responseHeaderIterator(),
|
||||
.cached => |c| HeaderIterator{ .list = .{ .list = c.metadata.headers } },
|
||||
};
|
||||
}
|
||||
|
||||
pub fn abort(self: Response, err: anyerror) void {
|
||||
switch (self.inner) {
|
||||
.live => |live| live.abort(err),
|
||||
.cached => {},
|
||||
}
|
||||
}
|
||||
|
||||
pub fn terminate(self: Response) void {
|
||||
switch (self.inner) {
|
||||
.live => |live| live.terminate(),
|
||||
.cached => {},
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
pub const Transfer = struct {
|
||||
arena: ArenaAllocator,
|
||||
id: u32 = 0,
|
||||
req: Request,
|
||||
url: [:0]const u8,
|
||||
ctx: *anyopaque, // copied from req.ctx to make it easier for callback handlers
|
||||
client: *Client,
|
||||
// total bytes received in the response, including the response status line,
|
||||
// the headers, and the [encoded] body.
|
||||
bytes_received: usize = 0,
|
||||
pending_cache_body: std.ArrayList(u8) = .empty,
|
||||
pending_cache_metadata: ?CacheMetadata = null,
|
||||
|
||||
aborted: bool = false,
|
||||
|
||||
@@ -1033,6 +1209,8 @@ pub const Transfer = struct {
|
||||
self._notified_fail = false;
|
||||
self.response_header = null;
|
||||
self.bytes_received = 0;
|
||||
self.pending_cache_metadata = null;
|
||||
self.pending_cache_body = .empty;
|
||||
|
||||
self._tries += 1;
|
||||
}
|
||||
@@ -1040,7 +1218,7 @@ pub const Transfer = struct {
|
||||
fn deinit(self: *Transfer) void {
|
||||
self.req.headers.deinit();
|
||||
if (self._conn) |conn| {
|
||||
self.client.handles.remove(conn);
|
||||
self.client.removeConn(conn);
|
||||
}
|
||||
self.arena.deinit();
|
||||
self.client.transfer_pool.destroy(self);
|
||||
@@ -1110,7 +1288,7 @@ pub const Transfer = struct {
|
||||
requestFailed(self, err, true);
|
||||
|
||||
const client = self.client;
|
||||
if (self._performing or client.handles.performing) {
|
||||
if (self._performing or client.performing) {
|
||||
// We're currently in a curl_multi_perform. We cannot call endTransfer
|
||||
// as that calls curl_multi_remove_handle, and you can't do that
|
||||
// from a curl callback. Instead, we flag this transfer and all of
|
||||
@@ -1141,7 +1319,7 @@ pub const Transfer = struct {
|
||||
self.client.endTransfer(self);
|
||||
}
|
||||
if (self.req.shutdown_callback) |cb| {
|
||||
cb(self.ctx);
|
||||
cb(self.req.ctx);
|
||||
}
|
||||
self.deinit();
|
||||
}
|
||||
@@ -1243,11 +1421,48 @@ pub const Transfer = struct {
|
||||
}
|
||||
}
|
||||
|
||||
const proceed = transfer.req.header_callback(transfer) catch |err| {
|
||||
const proceed = transfer.req.header_callback(Response.fromLive(transfer)) catch |err| {
|
||||
log.err(.http, "header_callback", .{ .err = err, .req = transfer });
|
||||
return err;
|
||||
};
|
||||
|
||||
if (transfer.client.network.cache != null and transfer.req.method == .GET) {
|
||||
const rh = &transfer.response_header.?;
|
||||
const allocator = transfer.arena.allocator();
|
||||
|
||||
const maybe_cm = try Cache.tryCache(
|
||||
allocator,
|
||||
std.time.timestamp(),
|
||||
transfer.url,
|
||||
rh.status,
|
||||
rh.contentType(),
|
||||
if (conn.getResponseHeader("cache-control", 0)) |h| h.value else null,
|
||||
if (conn.getResponseHeader("vary", 0)) |h| h.value else null,
|
||||
if (conn.getResponseHeader("etag", 0)) |h| h.value else null,
|
||||
if (conn.getResponseHeader("last-modified", 0)) |h| h.value else null,
|
||||
if (conn.getResponseHeader("age", 0)) |h| h.value else null,
|
||||
conn.getResponseHeader("set-cookie", 0) != null,
|
||||
conn.getResponseHeader("authorization", 0) != null,
|
||||
);
|
||||
|
||||
if (maybe_cm) |cm| {
|
||||
var header_list: std.ArrayList(Net.Header) = .empty;
|
||||
var it = transfer.responseHeaderIterator();
|
||||
while (it.next()) |hdr| {
|
||||
try header_list.append(allocator, .{
|
||||
.name = try allocator.dupe(u8, hdr.name),
|
||||
.value = try allocator.dupe(u8, hdr.value),
|
||||
});
|
||||
}
|
||||
|
||||
transfer.pending_cache_metadata = cm;
|
||||
transfer.pending_cache_metadata.?.headers = header_list.items;
|
||||
if (transfer.getContentLength()) |cl| {
|
||||
try transfer.pending_cache_body.ensureTotalCapacity(allocator, cl);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
transfer.req.notification.dispatch(.http_response_header_done, &.{
|
||||
.transfer = transfer,
|
||||
});
|
||||
@@ -1275,6 +1490,16 @@ pub const Transfer = struct {
|
||||
|
||||
if (buf_len < 3) {
|
||||
// could be \r\n or \n.
|
||||
// We get the last header line.
|
||||
if (transfer._redirecting) {
|
||||
// parse and set cookies for the redirection.
|
||||
redirectionCookies(transfer, &conn) catch |err| {
|
||||
if (comptime IS_DEBUG) {
|
||||
log.debug(.http, "redirection cookies", .{ .err = err });
|
||||
}
|
||||
return 0;
|
||||
};
|
||||
}
|
||||
return buf_len;
|
||||
}
|
||||
|
||||
@@ -1320,7 +1545,7 @@ pub const Transfer = struct {
|
||||
}
|
||||
transfer._redirecting = false;
|
||||
|
||||
if ((status == 401 or status == 407) and transfer.client.use_proxy) {
|
||||
if (status == 401 or status == 407) {
|
||||
// The auth challenge must be parsed from a following
|
||||
// WWW-Authenticate or Proxy-Authenticate header.
|
||||
transfer._auth_challenge = .{
|
||||
@@ -1341,7 +1566,6 @@ pub const Transfer = struct {
|
||||
transfer.bytes_received += buf_len;
|
||||
}
|
||||
|
||||
if (buf_len > 2) {
|
||||
if (transfer._auth_challenge != null) {
|
||||
// try to parse auth challenge.
|
||||
if (std.ascii.startsWithIgnoreCase(header, "WWW-Authenticate") or
|
||||
@@ -1359,21 +1583,6 @@ pub const Transfer = struct {
|
||||
transfer._auth_challenge = ac;
|
||||
}
|
||||
}
|
||||
return buf_len;
|
||||
}
|
||||
|
||||
// Starting here, we get the last header line.
|
||||
|
||||
if (transfer._redirecting) {
|
||||
// parse and set cookies for the redirection.
|
||||
redirectionCookies(transfer, &conn) catch |err| {
|
||||
if (comptime IS_DEBUG) {
|
||||
log.debug(.http, "redirection cookies", .{ .err = err });
|
||||
}
|
||||
return 0;
|
||||
};
|
||||
return buf_len;
|
||||
}
|
||||
|
||||
return buf_len;
|
||||
}
|
||||
@@ -1414,7 +1623,14 @@ pub const Transfer = struct {
|
||||
}
|
||||
|
||||
const chunk = buffer[0..chunk_len];
|
||||
transfer.req.data_callback(transfer, chunk) catch |err| {
|
||||
if (transfer.pending_cache_metadata != null) {
|
||||
transfer.pending_cache_body.appendSlice(transfer.arena.allocator(), chunk) catch |err| {
|
||||
log.err(.http, "cache body append", .{ .err = err, .req = transfer });
|
||||
return Net.writefunc_error;
|
||||
};
|
||||
}
|
||||
|
||||
transfer.req.data_callback(Response.fromLive(transfer), chunk) catch |err| {
|
||||
log.err(.http, "data_callback", .{ .err = err, .req = transfer });
|
||||
return Net.writefunc_error;
|
||||
};
|
||||
@@ -1466,7 +1682,7 @@ pub const Transfer = struct {
|
||||
fn _fulfill(transfer: *Transfer, status: u16, headers: []const Net.Header, body: ?[]const u8) !void {
|
||||
const req = &transfer.req;
|
||||
if (req.start_callback) |cb| {
|
||||
try cb(transfer);
|
||||
try cb(Response.fromLive(transfer));
|
||||
}
|
||||
|
||||
transfer.response_header = .{
|
||||
@@ -1485,13 +1701,13 @@ pub const Transfer = struct {
|
||||
}
|
||||
|
||||
lp.assert(transfer._header_done_called == false, "Transfer.fulfill header_done_called", .{});
|
||||
if (try req.header_callback(transfer) == false) {
|
||||
if (try req.header_callback(Response.fromLive(transfer)) == false) {
|
||||
transfer.abort(error.Abort);
|
||||
return;
|
||||
}
|
||||
|
||||
if (body) |b| {
|
||||
try req.data_callback(transfer, b);
|
||||
try req.data_callback(Response.fromLive(transfer), b);
|
||||
}
|
||||
|
||||
try req.done_callback(req.ctx);
|
||||
@@ -25,6 +25,10 @@ params: []const u8 = "",
|
||||
// We keep 41 for null-termination since HTML parser expects in this format.
|
||||
charset: [41]u8 = default_charset,
|
||||
charset_len: usize = default_charset_len,
|
||||
is_default_charset: bool = true,
|
||||
|
||||
type_buf: [127]u8 = @splat(0),
|
||||
sub_type_buf: [127]u8 = @splat(0),
|
||||
|
||||
/// String "UTF-8" continued by null characters.
|
||||
const default_charset = .{ 'U', 'T', 'F', '-', '8' } ++ .{0} ** 36;
|
||||
@@ -60,7 +64,10 @@ pub const ContentType = union(ContentTypeEnum) {
|
||||
image_webp: void,
|
||||
application_json: void,
|
||||
unknown: void,
|
||||
other: struct { type: []const u8, sub_type: []const u8 },
|
||||
other: struct {
|
||||
type: []const u8,
|
||||
sub_type: []const u8,
|
||||
},
|
||||
};
|
||||
|
||||
pub fn contentTypeString(mime: *const Mime) []const u8 {
|
||||
@@ -111,17 +118,18 @@ fn parseCharset(value: []const u8) error{ CharsetTooBig, Invalid }![]const u8 {
|
||||
return value;
|
||||
}
|
||||
|
||||
pub fn parse(input: []u8) !Mime {
|
||||
pub fn parse(input: []const u8) !Mime {
|
||||
if (input.len > 255) {
|
||||
return error.TooBig;
|
||||
}
|
||||
|
||||
// Zig's trim API is broken. The return type is always `[]const u8`,
|
||||
// even if the input type is `[]u8`. @constCast is safe here.
|
||||
var normalized = @constCast(std.mem.trim(u8, input, &std.ascii.whitespace));
|
||||
var buf: [255]u8 = undefined;
|
||||
const normalized = std.ascii.lowerString(&buf, std.mem.trim(u8, input, &std.ascii.whitespace));
|
||||
_ = std.ascii.lowerString(normalized, normalized);
|
||||
|
||||
const content_type, const type_len = try parseContentType(normalized);
|
||||
var mime = Mime{ .content_type = undefined };
|
||||
|
||||
const content_type, const type_len = try parseContentType(normalized, &mime.type_buf, &mime.sub_type_buf);
|
||||
if (type_len >= normalized.len) {
|
||||
return .{ .content_type = content_type };
|
||||
}
|
||||
@@ -130,6 +138,7 @@ pub fn parse(input: []u8) !Mime {
|
||||
|
||||
var charset: [41]u8 = default_charset;
|
||||
var charset_len: usize = default_charset_len;
|
||||
var has_explicit_charset = false;
|
||||
|
||||
var it = std.mem.splitScalar(u8, params, ';');
|
||||
while (it.next()) |attr| {
|
||||
@@ -156,16 +165,144 @@ pub fn parse(input: []u8) !Mime {
|
||||
// Null-terminate right after attribute value.
|
||||
charset[attribute_value.len] = 0;
|
||||
charset_len = attribute_value.len;
|
||||
has_explicit_charset = true;
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
return .{
|
||||
.params = params,
|
||||
.charset = charset,
|
||||
.charset_len = charset_len,
|
||||
.content_type = content_type,
|
||||
mime.params = params;
|
||||
mime.charset = charset;
|
||||
mime.charset_len = charset_len;
|
||||
mime.content_type = content_type;
|
||||
mime.is_default_charset = !has_explicit_charset;
|
||||
return mime;
|
||||
}
|
||||
|
||||
/// Prescan the first 1024 bytes of an HTML document for a charset declaration.
|
||||
/// Looks for `<meta charset="X">` and `<meta http-equiv="Content-Type" content="...;charset=X">`.
|
||||
/// Returns the charset value or null if none found.
|
||||
/// See: https://www.w3.org/International/questions/qa-html-encoding-declarations
|
||||
pub fn prescanCharset(html: []const u8) ?[]const u8 {
|
||||
const limit = @min(html.len, 1024);
|
||||
const data = html[0..limit];
|
||||
|
||||
// Scan for <meta tags
|
||||
var pos: usize = 0;
|
||||
while (pos < data.len) {
|
||||
// Find next '<'
|
||||
pos = std.mem.indexOfScalarPos(u8, data, pos, '<') orelse return null;
|
||||
pos += 1;
|
||||
if (pos >= data.len) return null;
|
||||
|
||||
// Check for "meta" (case-insensitive)
|
||||
if (pos + 4 >= data.len) return null;
|
||||
var tag_buf: [4]u8 = undefined;
|
||||
_ = std.ascii.lowerString(&tag_buf, data[pos..][0..4]);
|
||||
if (!std.mem.eql(u8, &tag_buf, "meta")) {
|
||||
continue;
|
||||
}
|
||||
pos += 4;
|
||||
|
||||
// Must be followed by whitespace or end of tag
|
||||
if (pos >= data.len) return null;
|
||||
if (data[pos] != ' ' and data[pos] != '\t' and data[pos] != '\n' and
|
||||
data[pos] != '\r' and data[pos] != '/')
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
// Scan attributes within this meta tag
|
||||
const tag_end = std.mem.indexOfScalarPos(u8, data, pos, '>') orelse return null;
|
||||
const attrs = data[pos..tag_end];
|
||||
|
||||
// Look for charset= attribute directly
|
||||
if (findAttrValue(attrs, "charset")) |charset| {
|
||||
if (charset.len > 0 and charset.len <= 40) return charset;
|
||||
}
|
||||
|
||||
// Look for http-equiv="content-type" with content="...;charset=X"
|
||||
if (findAttrValue(attrs, "http-equiv")) |he| {
|
||||
if (std.ascii.eqlIgnoreCase(he, "content-type")) {
|
||||
if (findAttrValue(attrs, "content")) |content| {
|
||||
if (extractCharsetFromContentType(content)) |charset| {
|
||||
return charset;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pos = tag_end + 1;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
fn findAttrValue(attrs: []const u8, name: []const u8) ?[]const u8 {
|
||||
var pos: usize = 0;
|
||||
while (pos < attrs.len) {
|
||||
// Skip whitespace
|
||||
while (pos < attrs.len and (attrs[pos] == ' ' or attrs[pos] == '\t' or
|
||||
attrs[pos] == '\n' or attrs[pos] == '\r'))
|
||||
{
|
||||
pos += 1;
|
||||
}
|
||||
if (pos >= attrs.len) return null;
|
||||
|
||||
// Read attribute name
|
||||
const attr_start = pos;
|
||||
while (pos < attrs.len and attrs[pos] != '=' and attrs[pos] != ' ' and
|
||||
attrs[pos] != '\t' and attrs[pos] != '>' and attrs[pos] != '/')
|
||||
{
|
||||
pos += 1;
|
||||
}
|
||||
const attr_name = attrs[attr_start..pos];
|
||||
|
||||
// Skip whitespace around =
|
||||
while (pos < attrs.len and (attrs[pos] == ' ' or attrs[pos] == '\t')) pos += 1;
|
||||
if (pos >= attrs.len or attrs[pos] != '=') {
|
||||
// No '=' found - skip this token. Advance at least one byte to avoid infinite loop.
|
||||
if (pos == attr_start) pos += 1;
|
||||
continue;
|
||||
}
|
||||
pos += 1; // skip '='
|
||||
while (pos < attrs.len and (attrs[pos] == ' ' or attrs[pos] == '\t')) pos += 1;
|
||||
if (pos >= attrs.len) return null;
|
||||
|
||||
// Read attribute value
|
||||
const value = blk: {
|
||||
if (attrs[pos] == '"' or attrs[pos] == '\'') {
|
||||
const quote = attrs[pos];
|
||||
pos += 1;
|
||||
const val_start = pos;
|
||||
while (pos < attrs.len and attrs[pos] != quote) pos += 1;
|
||||
const val = attrs[val_start..pos];
|
||||
if (pos < attrs.len) pos += 1; // skip closing quote
|
||||
break :blk val;
|
||||
} else {
|
||||
const val_start = pos;
|
||||
while (pos < attrs.len and attrs[pos] != ' ' and attrs[pos] != '\t' and
|
||||
attrs[pos] != '>' and attrs[pos] != '/')
|
||||
{
|
||||
pos += 1;
|
||||
}
|
||||
break :blk attrs[val_start..pos];
|
||||
}
|
||||
};
|
||||
|
||||
if (std.ascii.eqlIgnoreCase(attr_name, name)) return value;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
fn extractCharsetFromContentType(content: []const u8) ?[]const u8 {
|
||||
var it = std.mem.splitScalar(u8, content, ';');
|
||||
while (it.next()) |part| {
|
||||
const trimmed = std.mem.trimLeft(u8, part, &.{ ' ', '\t' });
|
||||
if (trimmed.len > 8 and std.ascii.eqlIgnoreCase(trimmed[0..8], "charset=")) {
|
||||
const val = std.mem.trim(u8, trimmed[8..], &.{ ' ', '\t', '"', '\'' });
|
||||
if (val.len > 0 and val.len <= 40) return val;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
pub fn sniff(body: []const u8) ?Mime {
|
||||
@@ -178,15 +315,30 @@ pub fn sniff(body: []const u8) ?Mime {
|
||||
if (content[0] != '<') {
|
||||
if (std.mem.startsWith(u8, content, &.{ 0xEF, 0xBB, 0xBF })) {
|
||||
// UTF-8 BOM
|
||||
return .{ .content_type = .{ .text_plain = {} } };
|
||||
return .{
|
||||
.content_type = .{ .text_plain = {} },
|
||||
.charset = default_charset,
|
||||
.charset_len = default_charset_len,
|
||||
.is_default_charset = false,
|
||||
};
|
||||
}
|
||||
if (std.mem.startsWith(u8, content, &.{ 0xFE, 0xFF })) {
|
||||
// UTF-16 big-endian BOM
|
||||
return .{ .content_type = .{ .text_plain = {} } };
|
||||
return .{
|
||||
.content_type = .{ .text_plain = {} },
|
||||
.charset = .{ 'U', 'T', 'F', '-', '1', '6', 'B', 'E' } ++ .{0} ** 33,
|
||||
.charset_len = 8,
|
||||
.is_default_charset = false,
|
||||
};
|
||||
}
|
||||
if (std.mem.startsWith(u8, content, &.{ 0xFF, 0xFE })) {
|
||||
// UTF-16 little-endian BOM
|
||||
return .{ .content_type = .{ .text_plain = {} } };
|
||||
return .{
|
||||
.content_type = .{ .text_plain = {} },
|
||||
.charset = .{ 'U', 'T', 'F', '-', '1', '6', 'L', 'E' } ++ .{0} ** 33,
|
||||
.charset_len = 8,
|
||||
.is_default_charset = false,
|
||||
};
|
||||
}
|
||||
return null;
|
||||
}
|
||||
@@ -241,7 +393,7 @@ pub fn isHTML(self: *const Mime) bool {
|
||||
}
|
||||
|
||||
// we expect value to be lowercase
|
||||
fn parseContentType(value: []const u8) !struct { ContentType, usize } {
|
||||
fn parseContentType(value: []const u8, type_buf: []u8, sub_type_buf: []u8) !struct { ContentType, usize } {
|
||||
const end = std.mem.indexOfScalarPos(u8, value, 0, ';') orelse value.len;
|
||||
const type_name = trimRight(value[0..end]);
|
||||
const attribute_start = end + 1;
|
||||
@@ -290,10 +442,18 @@ fn parseContentType(value: []const u8) !struct { ContentType, usize } {
|
||||
return error.Invalid;
|
||||
}
|
||||
|
||||
return .{ .{ .other = .{
|
||||
.type = main_type,
|
||||
.sub_type = sub_type,
|
||||
} }, attribute_start };
|
||||
@memcpy(type_buf[0..main_type.len], main_type);
|
||||
@memcpy(sub_type_buf[0..sub_type.len], sub_type);
|
||||
|
||||
return .{
|
||||
.{
|
||||
.other = .{
|
||||
.type = type_buf[0..main_type.len],
|
||||
.sub_type = sub_type_buf[0..sub_type.len],
|
||||
},
|
||||
},
|
||||
attribute_start,
|
||||
};
|
||||
}
|
||||
|
||||
const VALID_CODEPOINTS = blk: {
|
||||
@@ -307,6 +467,13 @@ const VALID_CODEPOINTS = blk: {
|
||||
break :blk v;
|
||||
};
|
||||
|
||||
pub fn typeString(self: *const Mime) []const u8 {
|
||||
return switch (self.content_type) {
|
||||
.other => |o| o.type[0..o.type_len],
|
||||
else => "",
|
||||
};
|
||||
}
|
||||
|
||||
fn validType(value: []const u8) bool {
|
||||
for (value) |b| {
|
||||
if (VALID_CODEPOINTS[b] == false) {
|
||||
@@ -540,6 +707,24 @@ test "Mime: sniff" {
|
||||
|
||||
try expectHTML("<!-->");
|
||||
try expectHTML(" \n\t <!-->");
|
||||
|
||||
{
|
||||
const mime = Mime.sniff(&.{ 0xEF, 0xBB, 0xBF }).?;
|
||||
try testing.expectEqual(.text_plain, std.meta.activeTag(mime.content_type));
|
||||
try testing.expectEqual("UTF-8", mime.charsetString());
|
||||
}
|
||||
|
||||
{
|
||||
const mime = Mime.sniff(&.{ 0xFE, 0xFF }).?;
|
||||
try testing.expectEqual(.text_plain, std.meta.activeTag(mime.content_type));
|
||||
try testing.expectEqual("UTF-16BE", mime.charsetString());
|
||||
}
|
||||
|
||||
{
|
||||
const mime = Mime.sniff(&.{ 0xFF, 0xFE }).?;
|
||||
try testing.expectEqual(.text_plain, std.meta.activeTag(mime.content_type));
|
||||
try testing.expectEqual("UTF-16LE", mime.charsetString());
|
||||
}
|
||||
}
|
||||
|
||||
const Expectation = struct {
|
||||
@@ -576,3 +761,35 @@ fn expect(expected: Expectation, input: []const u8) !void {
|
||||
try testing.expectEqual(m.charsetStringZ(), actual.charsetStringZ());
|
||||
}
|
||||
}
|
||||
|
||||
test "Mime: prescanCharset" {
|
||||
// <meta charset="X">
|
||||
try testing.expectEqual("utf-8", Mime.prescanCharset("<html><head><meta charset=\"utf-8\">").?);
|
||||
try testing.expectEqual("iso-8859-1", Mime.prescanCharset("<html><head><meta charset=\"iso-8859-1\">").?);
|
||||
try testing.expectEqual("shift_jis", Mime.prescanCharset("<meta charset='shift_jis'>").?);
|
||||
|
||||
// Case-insensitive tag matching
|
||||
try testing.expectEqual("utf-8", Mime.prescanCharset("<META charset=\"utf-8\">").?);
|
||||
try testing.expectEqual("utf-8", Mime.prescanCharset("<Meta charset=\"utf-8\">").?);
|
||||
|
||||
// <meta http-equiv="Content-Type" content="text/html; charset=X">
|
||||
try testing.expectEqual(
|
||||
"iso-8859-1",
|
||||
Mime.prescanCharset("<meta http-equiv=\"Content-Type\" content=\"text/html; charset=iso-8859-1\">").?,
|
||||
);
|
||||
|
||||
// No charset found
|
||||
try testing.expectEqual(null, Mime.prescanCharset("<html><head><title>Test</title>"));
|
||||
try testing.expectEqual(null, Mime.prescanCharset(""));
|
||||
try testing.expectEqual(null, Mime.prescanCharset("no html here"));
|
||||
|
||||
// Self-closing meta without charset must not loop forever
|
||||
try testing.expectEqual(null, Mime.prescanCharset("<meta foo=\"bar\"/>"));
|
||||
|
||||
// Charset after 1024 bytes should not be found
|
||||
var long_html: [1100]u8 = undefined;
|
||||
@memset(&long_html, ' ');
|
||||
const suffix = "<meta charset=\"windows-1252\">";
|
||||
@memcpy(long_html[1050 .. 1050 + suffix.len], suffix);
|
||||
try testing.expectEqual(null, Mime.prescanCharset(&long_html));
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -21,7 +21,8 @@ const lp = @import("lightpanda");
|
||||
const builtin = @import("builtin");
|
||||
|
||||
const log = @import("../log.zig");
|
||||
const Http = @import("../http/Http.zig");
|
||||
const HttpClient = @import("HttpClient.zig");
|
||||
const net_http = @import("../network/http.zig");
|
||||
const String = @import("../string.zig").String;
|
||||
|
||||
const js = @import("js/js.zig");
|
||||
@@ -60,11 +61,8 @@ ready_scripts: std.DoublyLinkedList,
|
||||
|
||||
shutdown: bool = false,
|
||||
|
||||
client: *Http.Client,
|
||||
client: *HttpClient,
|
||||
allocator: Allocator,
|
||||
buffer_pool: BufferPool,
|
||||
|
||||
script_pool: std.heap.MemoryPool(Script),
|
||||
|
||||
// We can download multiple sync modules in parallel, but we want to process
|
||||
// them in order. We can't use an std.DoublyLinkedList, like the other script types,
|
||||
@@ -88,7 +86,7 @@ importmap: std.StringHashMapUnmanaged([:0]const u8),
|
||||
// event).
|
||||
page_notified_of_completion: bool,
|
||||
|
||||
pub fn init(allocator: Allocator, http_client: *Http.Client, page: *Page) ScriptManager {
|
||||
pub fn init(allocator: Allocator, http_client: *HttpClient, page: *Page) ScriptManager {
|
||||
return .{
|
||||
.page = page,
|
||||
.async_scripts = .{},
|
||||
@@ -100,18 +98,14 @@ pub fn init(allocator: Allocator, http_client: *Http.Client, page: *Page) Script
|
||||
.imported_modules = .empty,
|
||||
.client = http_client,
|
||||
.static_scripts_done = false,
|
||||
.buffer_pool = BufferPool.init(allocator, 5),
|
||||
.page_notified_of_completion = false,
|
||||
.script_pool = std.heap.MemoryPool(Script).init(allocator),
|
||||
};
|
||||
}
|
||||
|
||||
pub fn deinit(self: *ScriptManager) void {
|
||||
// necessary to free any buffers scripts may be referencing
|
||||
// necessary to free any arenas scripts may be referencing
|
||||
self.reset();
|
||||
|
||||
self.buffer_pool.deinit();
|
||||
self.script_pool.deinit();
|
||||
self.imported_modules.deinit(self.allocator);
|
||||
// we don't deinit self.importmap b/c we use the page's arena for its
|
||||
// allocations.
|
||||
@@ -120,7 +114,10 @@ pub fn deinit(self: *ScriptManager) void {
|
||||
pub fn reset(self: *ScriptManager) void {
|
||||
var it = self.imported_modules.valueIterator();
|
||||
while (it.next()) |value_ptr| {
|
||||
self.buffer_pool.release(value_ptr.buffer);
|
||||
switch (value_ptr.state) {
|
||||
.done => |script| script.deinit(),
|
||||
else => {},
|
||||
}
|
||||
}
|
||||
self.imported_modules.clearRetainingCapacity();
|
||||
|
||||
@@ -137,13 +134,13 @@ pub fn reset(self: *ScriptManager) void {
|
||||
fn clearList(list: *std.DoublyLinkedList) void {
|
||||
while (list.popFirst()) |n| {
|
||||
const script: *Script = @fieldParentPtr("node", n);
|
||||
script.deinit(true);
|
||||
script.deinit();
|
||||
}
|
||||
}
|
||||
|
||||
pub fn getHeaders(self: *ScriptManager, url: [:0]const u8) !Http.Headers {
|
||||
fn getHeaders(self: *ScriptManager, arena: Allocator, url: [:0]const u8) !net_http.Headers {
|
||||
var headers = try self.client.newHeaders();
|
||||
try self.page.headersForRequest(self.page.arena, url, &headers);
|
||||
try self.page.headersForRequest(arena, url, &headers);
|
||||
return headers;
|
||||
}
|
||||
|
||||
@@ -158,7 +155,6 @@ pub fn addFromElement(self: *ScriptManager, comptime from_parser: bool, script_e
|
||||
// <script> has already been processed.
|
||||
return;
|
||||
}
|
||||
script_element._executed = true;
|
||||
|
||||
const element = script_element.asElement();
|
||||
if (element.getAttributeSafe(comptime .wrap("nomodule")) != null) {
|
||||
@@ -191,30 +187,48 @@ pub fn addFromElement(self: *ScriptManager, comptime from_parser: bool, script_e
|
||||
return;
|
||||
};
|
||||
|
||||
var handover = false;
|
||||
const page = self.page;
|
||||
|
||||
const arena = try page.getArena(.{ .debug = "addFromElement" });
|
||||
errdefer if (!handover) {
|
||||
page.releaseArena(arena);
|
||||
};
|
||||
|
||||
var source: Script.Source = undefined;
|
||||
var remote_url: ?[:0]const u8 = null;
|
||||
const base_url = page.base();
|
||||
if (element.getAttributeSafe(comptime .wrap("src"))) |src| {
|
||||
if (try parseDataURI(page.arena, src)) |data_uri| {
|
||||
if (try parseDataURI(arena, src)) |data_uri| {
|
||||
source = .{ .@"inline" = data_uri };
|
||||
} else {
|
||||
remote_url = try URL.resolve(page.arena, base_url, src, .{});
|
||||
remote_url = try URL.resolve(arena, base_url, src, .{});
|
||||
source = .{ .remote = .{} };
|
||||
}
|
||||
} else {
|
||||
const inline_source = try element.asNode().getTextContentAlloc(page.arena);
|
||||
var buf = std.Io.Writer.Allocating.init(arena);
|
||||
try element.asNode().getChildTextContent(&buf.writer);
|
||||
try buf.writer.writeByte(0);
|
||||
const data = buf.written();
|
||||
const inline_source: [:0]const u8 = data[0 .. data.len - 1 :0];
|
||||
if (inline_source.len == 0) {
|
||||
// we haven't set script_element._executed = true yet, which is good.
|
||||
// If content is appended to the script, we will execute it then.
|
||||
page.releaseArena(arena);
|
||||
return;
|
||||
}
|
||||
source = .{ .@"inline" = inline_source };
|
||||
}
|
||||
|
||||
const script = try self.script_pool.create();
|
||||
errdefer self.script_pool.destroy(script);
|
||||
|
||||
// Only set _executed (already-started) when we actually have content to execute
|
||||
script_element._executed = true;
|
||||
const is_inline = source == .@"inline";
|
||||
|
||||
const script = try arena.create(Script);
|
||||
script.* = .{
|
||||
.kind = kind,
|
||||
.node = .{},
|
||||
.arena = arena,
|
||||
.manager = self,
|
||||
.source = source,
|
||||
.script_element = script_element,
|
||||
@@ -258,7 +272,7 @@ pub fn addFromElement(self: *ScriptManager, comptime from_parser: bool, script_e
|
||||
if (is_blocking == false) {
|
||||
self.scriptList(script).remove(&script.node);
|
||||
}
|
||||
script.deinit(true);
|
||||
// Let the outer errdefer handle releasing the arena if client.request fails
|
||||
}
|
||||
|
||||
try self.client.request(.{
|
||||
@@ -266,7 +280,7 @@ pub fn addFromElement(self: *ScriptManager, comptime from_parser: bool, script_e
|
||||
.ctx = script,
|
||||
.method = .GET,
|
||||
.frame_id = page._frame_id,
|
||||
.headers = try self.getHeaders(url),
|
||||
.headers = try self.getHeaders(arena, url),
|
||||
.blocking = is_blocking,
|
||||
.cookie_jar = &page._session.cookie_jar,
|
||||
.resource_type = .script,
|
||||
@@ -277,6 +291,7 @@ pub fn addFromElement(self: *ScriptManager, comptime from_parser: bool, script_e
|
||||
.done_callback = Script.doneCallback,
|
||||
.error_callback = Script.errorCallback,
|
||||
});
|
||||
handover = true;
|
||||
|
||||
if (comptime IS_DEBUG) {
|
||||
var ls: js.Local.Scope = undefined;
|
||||
@@ -306,7 +321,7 @@ pub fn addFromElement(self: *ScriptManager, comptime from_parser: bool, script_e
|
||||
}
|
||||
if (script.status == 0) {
|
||||
// an error (that we already logged)
|
||||
script.deinit(true);
|
||||
script.deinit();
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -315,7 +330,7 @@ pub fn addFromElement(self: *ScriptManager, comptime from_parser: bool, script_e
|
||||
self.is_evaluating = true;
|
||||
defer {
|
||||
self.is_evaluating = was_evaluating;
|
||||
script.deinit(true);
|
||||
script.deinit();
|
||||
}
|
||||
return script.eval(page);
|
||||
}
|
||||
@@ -347,11 +362,14 @@ pub fn preloadImport(self: *ScriptManager, url: [:0]const u8, referrer: []const
|
||||
}
|
||||
errdefer _ = self.imported_modules.remove(url);
|
||||
|
||||
const script = try self.script_pool.create();
|
||||
errdefer self.script_pool.destroy(script);
|
||||
const page = self.page;
|
||||
const arena = try page.getArena(.{ .debug = "preloadImport" });
|
||||
errdefer page.releaseArena(arena);
|
||||
|
||||
const script = try arena.create(Script);
|
||||
script.* = .{
|
||||
.kind = .module,
|
||||
.arena = arena,
|
||||
.url = url,
|
||||
.node = .{},
|
||||
.manager = self,
|
||||
@@ -361,11 +379,7 @@ pub fn preloadImport(self: *ScriptManager, url: [:0]const u8, referrer: []const
|
||||
.mode = .import,
|
||||
};
|
||||
|
||||
gop.value_ptr.* = ImportedModule{
|
||||
.manager = self,
|
||||
};
|
||||
|
||||
const page = self.page;
|
||||
gop.value_ptr.* = ImportedModule{};
|
||||
|
||||
if (comptime IS_DEBUG) {
|
||||
var ls: js.Local.Scope = undefined;
|
||||
@@ -380,12 +394,18 @@ pub fn preloadImport(self: *ScriptManager, url: [:0]const u8, referrer: []const
|
||||
});
|
||||
}
|
||||
|
||||
try self.client.request(.{
|
||||
// This seems wrong since we're not dealing with an async import (unlike
|
||||
// getAsyncModule below), but all we're trying to do here is pre-load the
|
||||
// script for execution at some point in the future (when waitForImport is
|
||||
// called).
|
||||
self.async_scripts.append(&script.node);
|
||||
|
||||
self.client.request(.{
|
||||
.url = url,
|
||||
.ctx = script,
|
||||
.method = .GET,
|
||||
.frame_id = page._frame_id,
|
||||
.headers = try self.getHeaders(url),
|
||||
.headers = try self.getHeaders(arena, url),
|
||||
.cookie_jar = &page._session.cookie_jar,
|
||||
.resource_type = .script,
|
||||
.notification = page._session.notification,
|
||||
@@ -394,13 +414,10 @@ pub fn preloadImport(self: *ScriptManager, url: [:0]const u8, referrer: []const
|
||||
.data_callback = Script.dataCallback,
|
||||
.done_callback = Script.doneCallback,
|
||||
.error_callback = Script.errorCallback,
|
||||
});
|
||||
|
||||
// This seems wrong since we're not dealing with an async import (unlike
|
||||
// getAsyncModule below), but all we're trying to do here is pre-load the
|
||||
// script for execution at some point in the future (when waitForImport is
|
||||
// called).
|
||||
self.async_scripts.append(&script.node);
|
||||
}) catch |err| {
|
||||
self.async_scripts.remove(&script.node);
|
||||
return err;
|
||||
};
|
||||
}
|
||||
|
||||
pub fn waitForImport(self: *ScriptManager, url: [:0]const u8) !ModuleSource {
|
||||
@@ -421,12 +438,12 @@ pub fn waitForImport(self: *ScriptManager, url: [:0]const u8) !ModuleSource {
|
||||
_ = try client.tick(200);
|
||||
continue;
|
||||
},
|
||||
.done => {
|
||||
.done => |script| {
|
||||
var shared = false;
|
||||
const buffer = entry.value_ptr.buffer;
|
||||
const waiters = entry.value_ptr.waiters;
|
||||
|
||||
if (waiters == 0) {
|
||||
if (waiters == 1) {
|
||||
self.imported_modules.removeByPtr(entry.key_ptr);
|
||||
} else {
|
||||
shared = true;
|
||||
@@ -435,7 +452,7 @@ pub fn waitForImport(self: *ScriptManager, url: [:0]const u8) !ModuleSource {
|
||||
return .{
|
||||
.buffer = buffer,
|
||||
.shared = shared,
|
||||
.buffer_pool = &self.buffer_pool,
|
||||
.script = script,
|
||||
};
|
||||
},
|
||||
.err => return error.Failed,
|
||||
@@ -444,11 +461,14 @@ pub fn waitForImport(self: *ScriptManager, url: [:0]const u8) !ModuleSource {
|
||||
}
|
||||
|
||||
pub fn getAsyncImport(self: *ScriptManager, url: [:0]const u8, cb: ImportAsync.Callback, cb_data: *anyopaque, referrer: []const u8) !void {
|
||||
const script = try self.script_pool.create();
|
||||
errdefer self.script_pool.destroy(script);
|
||||
const page = self.page;
|
||||
const arena = try page.getArena(.{ .debug = "getAsyncImport" });
|
||||
errdefer page.releaseArena(arena);
|
||||
|
||||
const script = try arena.create(Script);
|
||||
script.* = .{
|
||||
.kind = .module,
|
||||
.arena = arena,
|
||||
.url = url,
|
||||
.node = .{},
|
||||
.manager = self,
|
||||
@@ -461,7 +481,6 @@ pub fn getAsyncImport(self: *ScriptManager, url: [:0]const u8, cb: ImportAsync.C
|
||||
} },
|
||||
};
|
||||
|
||||
const page = self.page;
|
||||
if (comptime IS_DEBUG) {
|
||||
var ls: js.Local.Scope = undefined;
|
||||
page.js.localScope(&ls);
|
||||
@@ -484,11 +503,12 @@ pub fn getAsyncImport(self: *ScriptManager, url: [:0]const u8, cb: ImportAsync.C
|
||||
self.is_evaluating = true;
|
||||
defer self.is_evaluating = was_evaluating;
|
||||
|
||||
try self.client.request(.{
|
||||
self.async_scripts.append(&script.node);
|
||||
self.client.request(.{
|
||||
.url = url,
|
||||
.method = .GET,
|
||||
.frame_id = page._frame_id,
|
||||
.headers = try self.getHeaders(url),
|
||||
.headers = try self.getHeaders(arena, url),
|
||||
.ctx = script,
|
||||
.resource_type = .script,
|
||||
.cookie_jar = &page._session.cookie_jar,
|
||||
@@ -498,9 +518,10 @@ pub fn getAsyncImport(self: *ScriptManager, url: [:0]const u8, cb: ImportAsync.C
|
||||
.data_callback = Script.dataCallback,
|
||||
.done_callback = Script.doneCallback,
|
||||
.error_callback = Script.errorCallback,
|
||||
});
|
||||
|
||||
self.async_scripts.append(&script.node);
|
||||
}) catch |err| {
|
||||
self.async_scripts.remove(&script.node);
|
||||
return err;
|
||||
};
|
||||
}
|
||||
|
||||
// Called from the Page to let us know it's done parsing the HTML. Necessary that
|
||||
@@ -525,18 +546,18 @@ fn evaluate(self: *ScriptManager) void {
|
||||
var script: *Script = @fieldParentPtr("node", n);
|
||||
switch (script.mode) {
|
||||
.async => {
|
||||
defer script.deinit(true);
|
||||
defer script.deinit();
|
||||
script.eval(page);
|
||||
},
|
||||
.import_async => |ia| {
|
||||
defer script.deinit(false);
|
||||
if (script.status < 200 or script.status > 299) {
|
||||
script.deinit();
|
||||
ia.callback(ia.data, error.FailedToLoad);
|
||||
} else {
|
||||
ia.callback(ia.data, .{
|
||||
.shared = false,
|
||||
.script = script,
|
||||
.buffer = script.source.remote,
|
||||
.buffer_pool = &self.buffer_pool,
|
||||
});
|
||||
}
|
||||
},
|
||||
@@ -562,7 +583,7 @@ fn evaluate(self: *ScriptManager) void {
|
||||
}
|
||||
defer {
|
||||
_ = self.defer_scripts.popFirst();
|
||||
script.deinit(true);
|
||||
script.deinit();
|
||||
}
|
||||
script.eval(page);
|
||||
}
|
||||
@@ -613,11 +634,12 @@ fn parseImportmap(self: *ScriptManager, script: *const Script) !void {
|
||||
}
|
||||
|
||||
pub const Script = struct {
|
||||
complete: bool,
|
||||
kind: Kind,
|
||||
complete: bool,
|
||||
status: u16 = 0,
|
||||
source: Source,
|
||||
url: []const u8,
|
||||
arena: Allocator,
|
||||
mode: ExecutionMode,
|
||||
node: std.DoublyLinkedList.Node,
|
||||
script_element: ?*Element.Html.Script,
|
||||
@@ -668,93 +690,91 @@ pub const Script = struct {
|
||||
import_async: ImportAsync,
|
||||
};
|
||||
|
||||
fn deinit(self: *Script, comptime release_buffer: bool) void {
|
||||
if ((comptime release_buffer) and self.source == .remote) {
|
||||
self.manager.buffer_pool.release(self.source.remote);
|
||||
}
|
||||
self.manager.script_pool.destroy(self);
|
||||
fn deinit(self: *Script) void {
|
||||
self.manager.page.releaseArena(self.arena);
|
||||
}
|
||||
|
||||
fn startCallback(transfer: *Http.Transfer) !void {
|
||||
log.debug(.http, "script fetch start", .{ .req = transfer });
|
||||
fn startCallback(response: HttpClient.Response) !void {
|
||||
log.debug(.http, "script fetch start", .{ .req = response });
|
||||
}
|
||||
|
||||
fn headerCallback(transfer: *Http.Transfer) !bool {
|
||||
const self: *Script = @ptrCast(@alignCast(transfer.ctx));
|
||||
const header = &transfer.response_header.?;
|
||||
self.status = header.status;
|
||||
if (header.status != 200) {
|
||||
fn headerCallback(response: HttpClient.Response) !bool {
|
||||
const self: *Script = @ptrCast(@alignCast(response.ctx));
|
||||
|
||||
self.status = response.status().?;
|
||||
if (response.status() != 200) {
|
||||
log.info(.http, "script header", .{
|
||||
.req = transfer,
|
||||
.status = header.status,
|
||||
.content_type = header.contentType(),
|
||||
.req = response,
|
||||
.status = response.status(),
|
||||
.content_type = response.contentType(),
|
||||
});
|
||||
return false;
|
||||
}
|
||||
|
||||
if (comptime IS_DEBUG) {
|
||||
log.debug(.http, "script header", .{
|
||||
.req = transfer,
|
||||
.status = header.status,
|
||||
.content_type = header.contentType(),
|
||||
.req = response,
|
||||
.status = response.status(),
|
||||
.content_type = response.contentType(),
|
||||
});
|
||||
}
|
||||
|
||||
{
|
||||
// temp debug, trying to figure out why the next assert sometimes
|
||||
// fails. Is the buffer just corrupt or is headerCallback really
|
||||
// being called twice?
|
||||
lp.assert(self.header_callback_called == false, "ScriptManager.Header recall", .{
|
||||
.m = @tagName(std.meta.activeTag(self.mode)),
|
||||
.a1 = self.debug_transfer_id,
|
||||
.a2 = self.debug_transfer_tries,
|
||||
.a3 = self.debug_transfer_aborted,
|
||||
.a4 = self.debug_transfer_bytes_received,
|
||||
.a5 = self.debug_transfer_notified_fail,
|
||||
.a6 = self.debug_transfer_redirecting,
|
||||
.a7 = self.debug_transfer_intercept_state,
|
||||
.a8 = self.debug_transfer_auth_challenge,
|
||||
.a9 = self.debug_transfer_easy_id,
|
||||
.b1 = transfer.id,
|
||||
.b2 = transfer._tries,
|
||||
.b3 = transfer.aborted,
|
||||
.b4 = transfer.bytes_received,
|
||||
.b5 = transfer._notified_fail,
|
||||
.b6 = transfer._redirecting,
|
||||
.b7 = @intFromEnum(transfer._intercept_state),
|
||||
.b8 = transfer._auth_challenge != null,
|
||||
.b9 = if (transfer._conn) |c| @intFromPtr(c.easy) else 0,
|
||||
});
|
||||
self.header_callback_called = true;
|
||||
self.debug_transfer_id = transfer.id;
|
||||
self.debug_transfer_tries = transfer._tries;
|
||||
self.debug_transfer_aborted = transfer.aborted;
|
||||
self.debug_transfer_bytes_received = transfer.bytes_received;
|
||||
self.debug_transfer_notified_fail = transfer._notified_fail;
|
||||
self.debug_transfer_redirecting = transfer._redirecting;
|
||||
self.debug_transfer_intercept_state = @intFromEnum(transfer._intercept_state);
|
||||
self.debug_transfer_auth_challenge = transfer._auth_challenge != null;
|
||||
self.debug_transfer_easy_id = if (transfer._conn) |c| @intFromPtr(c.easy) else 0;
|
||||
}
|
||||
// {
|
||||
// // temp debug, trying to figure out why the next assert sometimes
|
||||
// // fails. Is the buffer just corrupt or is headerCallback really
|
||||
// // being called twice?
|
||||
// lp.assert(self.header_callback_called == false, "ScriptManager.Header recall", .{
|
||||
// .m = @tagName(std.meta.activeTag(self.mode)),
|
||||
// .a1 = self.debug_transfer_id,
|
||||
// .a2 = self.debug_transfer_tries,
|
||||
// .a3 = self.debug_transfer_aborted,
|
||||
// .a4 = self.debug_transfer_bytes_received,
|
||||
// .a5 = self.debug_transfer_notified_fail,
|
||||
// .a6 = self.debug_transfer_redirecting,
|
||||
// .a7 = self.debug_transfer_intercept_state,
|
||||
// .a8 = self.debug_transfer_auth_challenge,
|
||||
// .a9 = self.debug_transfer_easy_id,
|
||||
// .b1 = transfer.id,
|
||||
// .b2 = transfer._tries,
|
||||
// .b3 = transfer.aborted,
|
||||
// .b4 = transfer.bytes_received,
|
||||
// .b5 = transfer._notified_fail,
|
||||
// .b6 = transfer._redirecting,
|
||||
// .b7 = @intFromEnum(transfer._intercept_state),
|
||||
// .b8 = transfer._auth_challenge != null,
|
||||
// .b9 = if (transfer._conn) |c| @intFromPtr(c.easy) else 0,
|
||||
// });
|
||||
// self.header_callback_called = true;
|
||||
// self.debug_transfer_id = transfer.id;
|
||||
// self.debug_transfer_tries = transfer._tries;
|
||||
// self.debug_transfer_aborted = transfer.aborted;
|
||||
// self.debug_transfer_bytes_received = transfer.bytes_received;
|
||||
// self.debug_transfer_notified_fail = transfer._notified_fail;
|
||||
// self.debug_transfer_redirecting = transfer._redirecting;
|
||||
// self.debug_transfer_intercept_state = @intFromEnum(transfer._intercept_state);
|
||||
// self.debug_transfer_auth_challenge = transfer._auth_challenge != null;
|
||||
// self.debug_transfer_easy_id = if (transfer._conn) |c| @intFromPtr(c.easy) else 0;
|
||||
// }
|
||||
|
||||
lp.assert(self.source.remote.capacity == 0, "ScriptManager.Header buffer", .{ .capacity = self.source.remote.capacity });
|
||||
var buffer = self.manager.buffer_pool.get();
|
||||
if (transfer.getContentLength()) |cl| {
|
||||
try buffer.ensureTotalCapacity(self.manager.allocator, cl);
|
||||
var buffer: std.ArrayList(u8) = .empty;
|
||||
if (response.contentLength()) |cl| {
|
||||
try buffer.ensureTotalCapacity(self.arena, cl);
|
||||
}
|
||||
self.source = .{ .remote = buffer };
|
||||
return true;
|
||||
}
|
||||
|
||||
fn dataCallback(transfer: *Http.Transfer, data: []const u8) !void {
|
||||
const self: *Script = @ptrCast(@alignCast(transfer.ctx));
|
||||
self._dataCallback(transfer, data) catch |err| {
|
||||
log.err(.http, "SM.dataCallback", .{ .err = err, .transfer = transfer, .len = data.len });
|
||||
fn dataCallback(response: HttpClient.Response, data: []const u8) !void {
|
||||
const self: *Script = @ptrCast(@alignCast(response.ctx));
|
||||
self._dataCallback(response, data) catch |err| {
|
||||
log.err(.http, "SM.dataCallback", .{ .err = err, .transfer = response, .len = data.len });
|
||||
return err;
|
||||
};
|
||||
}
|
||||
fn _dataCallback(self: *Script, _: *Http.Transfer, data: []const u8) !void {
|
||||
try self.source.remote.appendSlice(self.manager.allocator, data);
|
||||
|
||||
fn _dataCallback(self: *Script, _: HttpClient.Response, data: []const u8) !void {
|
||||
try self.source.remote.appendSlice(self.arena, data);
|
||||
}
|
||||
|
||||
fn doneCallback(ctx: *anyopaque) !void {
|
||||
@@ -771,9 +791,8 @@ pub const Script = struct {
|
||||
} else if (self.mode == .import) {
|
||||
manager.async_scripts.remove(&self.node);
|
||||
const entry = manager.imported_modules.getPtr(self.url).?;
|
||||
entry.state = .done;
|
||||
entry.state = .{ .done = self };
|
||||
entry.buffer = self.source.remote;
|
||||
self.deinit(false);
|
||||
}
|
||||
manager.evaluate();
|
||||
}
|
||||
@@ -799,7 +818,7 @@ pub const Script = struct {
|
||||
const manager = self.manager;
|
||||
manager.scriptList(self).remove(&self.node);
|
||||
if (manager.shutdown) {
|
||||
self.deinit(true);
|
||||
self.deinit();
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -811,7 +830,7 @@ pub const Script = struct {
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
self.deinit(true);
|
||||
self.deinit();
|
||||
manager.evaluate();
|
||||
}
|
||||
|
||||
@@ -939,76 +958,6 @@ pub const Script = struct {
|
||||
}
|
||||
};
|
||||
|
||||
const BufferPool = struct {
|
||||
count: usize,
|
||||
available: List = .{},
|
||||
allocator: Allocator,
|
||||
max_concurrent_transfers: u8,
|
||||
mem_pool: std.heap.MemoryPool(Container),
|
||||
|
||||
const List = std.SinglyLinkedList;
|
||||
|
||||
const Container = struct {
|
||||
node: List.Node,
|
||||
buf: std.ArrayList(u8),
|
||||
};
|
||||
|
||||
fn init(allocator: Allocator, max_concurrent_transfers: u8) BufferPool {
|
||||
return .{
|
||||
.available = .{},
|
||||
.count = 0,
|
||||
.allocator = allocator,
|
||||
.max_concurrent_transfers = max_concurrent_transfers,
|
||||
.mem_pool = std.heap.MemoryPool(Container).init(allocator),
|
||||
};
|
||||
}
|
||||
|
||||
fn deinit(self: *BufferPool) void {
|
||||
const allocator = self.allocator;
|
||||
|
||||
var node = self.available.first;
|
||||
while (node) |n| {
|
||||
const container: *Container = @fieldParentPtr("node", n);
|
||||
container.buf.deinit(allocator);
|
||||
node = n.next;
|
||||
}
|
||||
self.mem_pool.deinit();
|
||||
}
|
||||
|
||||
fn get(self: *BufferPool) std.ArrayList(u8) {
|
||||
const node = self.available.popFirst() orelse {
|
||||
// return a new buffer
|
||||
return .{};
|
||||
};
|
||||
|
||||
self.count -= 1;
|
||||
const container: *Container = @fieldParentPtr("node", node);
|
||||
defer self.mem_pool.destroy(container);
|
||||
return container.buf;
|
||||
}
|
||||
|
||||
fn release(self: *BufferPool, buffer: ArrayList(u8)) void {
|
||||
// create mutable copy
|
||||
var b = buffer;
|
||||
|
||||
if (self.count == self.max_concurrent_transfers) {
|
||||
b.deinit(self.allocator);
|
||||
return;
|
||||
}
|
||||
|
||||
const container = self.mem_pool.create() catch |err| {
|
||||
b.deinit(self.allocator);
|
||||
log.err(.http, "SM BufferPool release", .{ .err = err });
|
||||
return;
|
||||
};
|
||||
|
||||
b.clearRetainingCapacity();
|
||||
container.* = .{ .buf = b, .node = .{} };
|
||||
self.count += 1;
|
||||
self.available.prepend(&container.node);
|
||||
}
|
||||
};
|
||||
|
||||
const ImportAsync = struct {
|
||||
data: *anyopaque,
|
||||
callback: ImportAsync.Callback,
|
||||
@@ -1018,12 +967,12 @@ const ImportAsync = struct {
|
||||
|
||||
pub const ModuleSource = struct {
|
||||
shared: bool,
|
||||
buffer_pool: *BufferPool,
|
||||
script: *Script,
|
||||
buffer: std.ArrayList(u8),
|
||||
|
||||
pub fn deinit(self: *ModuleSource) void {
|
||||
if (self.shared == false) {
|
||||
self.buffer_pool.release(self.buffer);
|
||||
self.script.deinit();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1033,15 +982,14 @@ pub const ModuleSource = struct {
|
||||
};
|
||||
|
||||
const ImportedModule = struct {
|
||||
manager: *ScriptManager,
|
||||
waiters: u16 = 1,
|
||||
state: State = .loading,
|
||||
buffer: std.ArrayList(u8) = .{},
|
||||
waiters: u16 = 1,
|
||||
|
||||
const State = enum {
|
||||
const State = union(enum) {
|
||||
err,
|
||||
done,
|
||||
loading,
|
||||
done: *Script,
|
||||
};
|
||||
};
|
||||
|
||||
|
||||
@@ -21,6 +21,7 @@ const lp = @import("lightpanda");
|
||||
const builtin = @import("builtin");
|
||||
|
||||
const log = @import("../log.zig");
|
||||
const App = @import("../App.zig");
|
||||
|
||||
const js = @import("js/js.zig");
|
||||
const storage = @import("webapi/storage/storage.zig");
|
||||
@@ -29,47 +30,88 @@ const History = @import("webapi/History.zig");
|
||||
|
||||
const Page = @import("Page.zig");
|
||||
const Browser = @import("Browser.zig");
|
||||
const Factory = @import("Factory.zig");
|
||||
const Notification = @import("../Notification.zig");
|
||||
const QueuedNavigation = Page.QueuedNavigation;
|
||||
|
||||
const Allocator = std.mem.Allocator;
|
||||
const ArenaPool = App.ArenaPool;
|
||||
const IS_DEBUG = builtin.mode == .Debug;
|
||||
|
||||
// Session is like a browser's tab.
|
||||
// It owns the js env and the loader for all the pages of the session.
|
||||
// You can create successively multiple pages for a session, but you must
|
||||
// deinit a page before running another one.
|
||||
// deinit a page before running another one. It manages two distinct lifetimes.
|
||||
//
|
||||
// The first is the lifetime of the Session itself, where pages are created and
|
||||
// removed, but share the same cookie jar and navigation history (etc...)
|
||||
//
|
||||
// The second is as a container the data needed by the full page hierarchy, i.e. \
|
||||
// the root page and all of its frames (and all of their frames.)
|
||||
const Session = @This();
|
||||
|
||||
// These are the fields that remain intact for the duration of the Session
|
||||
browser: *Browser,
|
||||
notification: *Notification,
|
||||
|
||||
// Used to create our Inspector and in the BrowserContext.
|
||||
arena: Allocator,
|
||||
|
||||
cookie_jar: storage.Cookie.Jar,
|
||||
storage_shed: storage.Shed,
|
||||
|
||||
history: History,
|
||||
navigation: Navigation,
|
||||
storage_shed: storage.Shed,
|
||||
notification: *Notification,
|
||||
cookie_jar: storage.Cookie.Jar,
|
||||
|
||||
// These are the fields that get reset whenever the Session's page (the root) is reset.
|
||||
factory: Factory,
|
||||
|
||||
page_arena: Allocator,
|
||||
|
||||
// Origin map for same-origin context sharing. Scoped to the root page lifetime.
|
||||
origins: std.StringHashMapUnmanaged(*js.Origin) = .empty,
|
||||
|
||||
// Shared resources for all pages in this session.
|
||||
// These live for the duration of the page tree (root + frames).
|
||||
arena_pool: *ArenaPool,
|
||||
|
||||
// In Debug, we use this to see if anything fails to release an arena back to
|
||||
// the pool.
|
||||
_arena_pool_leak_track: if (IS_DEBUG) std.AutoHashMapUnmanaged(usize, struct {
|
||||
owner: []const u8,
|
||||
count: usize,
|
||||
}) else void = if (IS_DEBUG) .empty else {},
|
||||
|
||||
page: ?Page,
|
||||
|
||||
queued_navigation: std.ArrayList(*Page),
|
||||
// Temporary buffer for about:blank navigations during processing.
|
||||
// We process async navigations first (safe from re-entrance), then sync
|
||||
// about:blank navigations (which may add to queued_navigation).
|
||||
queued_queued_navigation: std.ArrayList(*Page),
|
||||
|
||||
page_id_gen: u32,
|
||||
frame_id_gen: u32,
|
||||
|
||||
pub fn init(self: *Session, browser: *Browser, notification: *Notification) !void {
|
||||
const allocator = browser.app.allocator;
|
||||
const arena = try browser.arena_pool.acquire();
|
||||
errdefer browser.arena_pool.release(arena);
|
||||
const arena_pool = browser.arena_pool;
|
||||
|
||||
const arena = try arena_pool.acquire();
|
||||
errdefer arena_pool.release(arena);
|
||||
|
||||
const page_arena = try arena_pool.acquire();
|
||||
errdefer arena_pool.release(page_arena);
|
||||
|
||||
self.* = .{
|
||||
.page = null,
|
||||
.arena = arena,
|
||||
.arena_pool = arena_pool,
|
||||
.page_arena = page_arena,
|
||||
.factory = Factory.init(page_arena),
|
||||
.history = .{},
|
||||
.page_id_gen = 0,
|
||||
.frame_id_gen = 0,
|
||||
// The prototype (EventTarget) for Navigation is created when a Page is created.
|
||||
.navigation = .{ ._proto = undefined },
|
||||
.storage_shed = .{},
|
||||
.browser = browser,
|
||||
.queued_navigation = .{},
|
||||
.queued_queued_navigation = .{},
|
||||
.notification = notification,
|
||||
.cookie_jar = storage.Cookie.Jar.init(allocator),
|
||||
};
|
||||
@@ -79,11 +121,11 @@ pub fn deinit(self: *Session) void {
|
||||
if (self.page != null) {
|
||||
self.removePage();
|
||||
}
|
||||
const browser = self.browser;
|
||||
|
||||
self.cookie_jar.deinit();
|
||||
self.storage_shed.deinit(browser.app.allocator);
|
||||
browser.arena_pool.release(self.arena);
|
||||
|
||||
self.storage_shed.deinit(self.browser.app.allocator);
|
||||
self.arena_pool.release(self.page_arena);
|
||||
self.arena_pool.release(self.arena);
|
||||
}
|
||||
|
||||
// NOTE: the caller is not the owner of the returned value,
|
||||
@@ -113,33 +155,137 @@ pub fn removePage(self: *Session) void {
|
||||
self.notification.dispatch(.page_remove, .{});
|
||||
lp.assert(self.page != null, "Session.removePage - page is null", .{});
|
||||
|
||||
self.page.?.deinit();
|
||||
self.page.?.deinit(false);
|
||||
self.page = null;
|
||||
|
||||
self.navigation.onRemovePage();
|
||||
self.resetPageResources();
|
||||
|
||||
if (comptime IS_DEBUG) {
|
||||
log.debug(.browser, "remove page", .{});
|
||||
}
|
||||
}
|
||||
|
||||
pub const GetArenaOpts = struct {
|
||||
debug: []const u8,
|
||||
};
|
||||
|
||||
pub fn getArena(self: *Session, opts: GetArenaOpts) !Allocator {
|
||||
const allocator = try self.arena_pool.acquire();
|
||||
if (comptime IS_DEBUG) {
|
||||
// Use session's arena (not page_arena) since page_arena gets reset between pages
|
||||
const gop = try self._arena_pool_leak_track.getOrPut(self.arena, @intFromPtr(allocator.ptr));
|
||||
if (gop.found_existing and gop.value_ptr.count != 0) {
|
||||
log.err(.bug, "ArenaPool Double Use", .{ .owner = gop.value_ptr.*.owner });
|
||||
@panic("ArenaPool Double Use");
|
||||
}
|
||||
gop.value_ptr.* = .{ .owner = opts.debug, .count = 1 };
|
||||
}
|
||||
return allocator;
|
||||
}
|
||||
|
||||
pub fn releaseArena(self: *Session, allocator: Allocator) void {
|
||||
if (comptime IS_DEBUG) {
|
||||
const found = self._arena_pool_leak_track.getPtr(@intFromPtr(allocator.ptr)).?;
|
||||
if (found.count != 1) {
|
||||
log.err(.bug, "ArenaPool Double Free", .{ .owner = found.owner, .count = found.count });
|
||||
if (comptime builtin.is_test) {
|
||||
@panic("ArenaPool Double Free");
|
||||
}
|
||||
return;
|
||||
}
|
||||
found.count = 0;
|
||||
}
|
||||
return self.arena_pool.release(allocator);
|
||||
}
|
||||
|
||||
pub fn getOrCreateOrigin(self: *Session, key_: ?[]const u8) !*js.Origin {
|
||||
const key = key_ orelse {
|
||||
var opaque_origin: [36]u8 = undefined;
|
||||
@import("../id.zig").uuidv4(&opaque_origin);
|
||||
// Origin.init will dupe opaque_origin. It's fine that this doesn't
|
||||
// get added to self.origins. In fact, it further isolates it. When the
|
||||
// context is freed, it'll call session.releaseOrigin which will free it.
|
||||
return js.Origin.init(self.browser.app, self.browser.env.isolate, &opaque_origin);
|
||||
};
|
||||
|
||||
const gop = try self.origins.getOrPut(self.arena, key);
|
||||
if (gop.found_existing) {
|
||||
const origin = gop.value_ptr.*;
|
||||
origin.rc += 1;
|
||||
return origin;
|
||||
}
|
||||
|
||||
errdefer _ = self.origins.remove(key);
|
||||
|
||||
const origin = try js.Origin.init(self.browser.app, self.browser.env.isolate, key);
|
||||
gop.key_ptr.* = origin.key;
|
||||
gop.value_ptr.* = origin;
|
||||
return origin;
|
||||
}
|
||||
|
||||
pub fn releaseOrigin(self: *Session, origin: *js.Origin) void {
|
||||
const rc = origin.rc;
|
||||
if (rc == 1) {
|
||||
_ = self.origins.remove(origin.key);
|
||||
origin.deinit(self.browser.app);
|
||||
} else {
|
||||
origin.rc = rc - 1;
|
||||
}
|
||||
}
|
||||
|
||||
/// Reset page_arena and factory for a clean slate.
|
||||
/// Called when root page is removed.
|
||||
fn resetPageResources(self: *Session) void {
|
||||
// Check for arena leaks before releasing
|
||||
if (comptime IS_DEBUG) {
|
||||
var it = self._arena_pool_leak_track.valueIterator();
|
||||
while (it.next()) |value_ptr| {
|
||||
if (value_ptr.count > 0) {
|
||||
log.err(.bug, "ArenaPool Leak", .{ .owner = value_ptr.owner });
|
||||
}
|
||||
}
|
||||
self._arena_pool_leak_track.clearRetainingCapacity();
|
||||
}
|
||||
|
||||
// All origins should have been released when contexts were destroyed
|
||||
if (comptime IS_DEBUG) {
|
||||
std.debug.assert(self.origins.count() == 0);
|
||||
}
|
||||
// Defensive cleanup in case origins leaked
|
||||
{
|
||||
const app = self.browser.app;
|
||||
var it = self.origins.valueIterator();
|
||||
while (it.next()) |value| {
|
||||
value.*.deinit(app);
|
||||
}
|
||||
self.origins.clearRetainingCapacity();
|
||||
}
|
||||
|
||||
// Release old page_arena and acquire fresh one
|
||||
self.frame_id_gen = 0;
|
||||
self.arena_pool.reset(self.page_arena, 64 * 1024);
|
||||
self.factory = Factory.init(self.page_arena);
|
||||
}
|
||||
|
||||
pub fn replacePage(self: *Session) !*Page {
|
||||
if (comptime IS_DEBUG) {
|
||||
log.debug(.browser, "replace page", .{});
|
||||
}
|
||||
|
||||
lp.assert(self.page != null, "Session.replacePage null page", .{});
|
||||
lp.assert(self.page.?.parent == null, "Session.replacePage with parent", .{});
|
||||
|
||||
var current = self.page.?;
|
||||
const frame_id = current._frame_id;
|
||||
const parent = current.parent;
|
||||
current.deinit();
|
||||
current.deinit(true);
|
||||
|
||||
self.resetPageResources();
|
||||
self.browser.env.memoryPressureNotification(.moderate);
|
||||
|
||||
self.page = @as(Page, undefined);
|
||||
const page = &self.page.?;
|
||||
try Page.init(page, frame_id, self, parent);
|
||||
try Page.init(page, frame_id, self, null);
|
||||
return page;
|
||||
}
|
||||
|
||||
@@ -153,9 +299,24 @@ pub const WaitResult = enum {
|
||||
cdp_socket,
|
||||
};
|
||||
|
||||
pub fn findPage(self: *Session, frame_id: u32) ?*Page {
|
||||
pub fn findPageByFrameId(self: *Session, frame_id: u32) ?*Page {
|
||||
const page = self.currentPage() orelse return null;
|
||||
return if (page._frame_id == frame_id) page else null;
|
||||
return findPageBy(page, "_frame_id", frame_id);
|
||||
}
|
||||
|
||||
pub fn findPageById(self: *Session, id: u32) ?*Page {
|
||||
const page = self.currentPage() orelse return null;
|
||||
return findPageBy(page, "id", id);
|
||||
}
|
||||
|
||||
fn findPageBy(page: *Page, comptime field: []const u8, id: u32) ?*Page {
|
||||
if (@field(page, field) == id) return page;
|
||||
for (page.frames.items) |f| {
|
||||
if (findPageBy(f, field, id)) |found| {
|
||||
return found;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
pub fn wait(self: *Session, wait_ms: u32) WaitResult {
|
||||
@@ -174,10 +335,11 @@ pub fn wait(self: *Session, wait_ms: u32) WaitResult {
|
||||
|
||||
switch (wait_result) {
|
||||
.done => {
|
||||
if (page._queued_navigation == null) {
|
||||
if (self.queued_navigation.items.len == 0) {
|
||||
return .done;
|
||||
}
|
||||
page = self.processScheduledNavigation(page) catch return .done;
|
||||
self.processQueuedNavigation() catch return .done;
|
||||
page = &self.page.?; // might have changed
|
||||
},
|
||||
else => |result| return result,
|
||||
}
|
||||
@@ -229,7 +391,7 @@ fn _wait(self: *Session, page: *Page, wait_ms: u32) !WaitResult {
|
||||
}
|
||||
},
|
||||
.html, .complete => {
|
||||
if (page._queued_navigation != null) {
|
||||
if (self.queued_navigation.items.len != 0) {
|
||||
return .done;
|
||||
}
|
||||
|
||||
@@ -239,7 +401,7 @@ fn _wait(self: *Session, page: *Page, wait_ms: u32) !WaitResult {
|
||||
// scheduler.run could trigger new http transfers, so do not
|
||||
// store http_client.active BEFORE this call and then use
|
||||
// it AFTER.
|
||||
const ms_to_next_task = try browser.runMacrotasks();
|
||||
try browser.runMacrotasks();
|
||||
|
||||
// Each call to this runs scheduled load events.
|
||||
try page.dispatchLoad();
|
||||
@@ -261,16 +423,16 @@ fn _wait(self: *Session, page: *Page, wait_ms: u32) !WaitResult {
|
||||
std.debug.assert(http_client.intercepted == 0);
|
||||
}
|
||||
|
||||
var ms: u64 = ms_to_next_task orelse blk: {
|
||||
if (wait_ms - ms_remaining < 100) {
|
||||
if (comptime builtin.is_test) {
|
||||
return .done;
|
||||
}
|
||||
// Look, we want to exit ASAP, but we don't want
|
||||
// to exit so fast that we've run none of the
|
||||
// background jobs.
|
||||
break :blk 50;
|
||||
}
|
||||
var ms = blk: {
|
||||
// if (wait_ms - ms_remaining < 100) {
|
||||
// if (comptime builtin.is_test) {
|
||||
// return .done;
|
||||
// }
|
||||
// // Look, we want to exit ASAP, but we don't want
|
||||
// // to exit so fast that we've run none of the
|
||||
// // background jobs.
|
||||
// break :blk 50;
|
||||
// }
|
||||
|
||||
if (browser.hasBackgroundTasks()) {
|
||||
// _we_ have nothing to run, but v8 is working on
|
||||
@@ -279,9 +441,7 @@ fn _wait(self: *Session, page: *Page, wait_ms: u32) !WaitResult {
|
||||
break :blk 20;
|
||||
}
|
||||
|
||||
// No http transfers, no cdp extra socket, no
|
||||
// scheduled tasks, we're done.
|
||||
return .done;
|
||||
break :blk browser.msToNextMacrotask() orelse return .done;
|
||||
};
|
||||
|
||||
if (ms > ms_remaining) {
|
||||
@@ -308,9 +468,9 @@ fn _wait(self: *Session, page: *Page, wait_ms: u32) !WaitResult {
|
||||
// We're here because we either have active HTTP
|
||||
// connections, or exit_when_done == false (aka, there's
|
||||
// an cdp_socket registered with the http client).
|
||||
// We should continue to run lowPriority tasks, so we
|
||||
// minimize how long we'll poll for network I/O.
|
||||
var ms_to_wait = @min(200, ms_to_next_task orelse 200);
|
||||
// We should continue to run tasks, so we minimize how long
|
||||
// we'll poll for network I/O.
|
||||
var ms_to_wait = @min(200, browser.msToNextMacrotask() orelse 200);
|
||||
if (ms_to_wait > 10 and browser.hasBackgroundTasks()) {
|
||||
// if we have background tasks, we don't want to wait too
|
||||
// long for a message from the client. We want to go back
|
||||
@@ -345,42 +505,160 @@ fn _wait(self: *Session, page: *Page, wait_ms: u32) !WaitResult {
|
||||
}
|
||||
}
|
||||
|
||||
fn processScheduledNavigation(self: *Session, current_page: *Page) !*Page {
|
||||
const browser = self.browser;
|
||||
pub fn scheduleNavigation(self: *Session, page: *Page) !void {
|
||||
const list = &self.queued_navigation;
|
||||
|
||||
const qn = current_page._queued_navigation.?;
|
||||
// take ownership of the page's queued navigation
|
||||
current_page._queued_navigation = null;
|
||||
defer browser.arena_pool.release(qn.arena);
|
||||
// Check if page is already queued
|
||||
for (list.items) |existing| {
|
||||
if (existing == page) {
|
||||
// Already queued
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
return list.append(self.arena, page);
|
||||
}
|
||||
|
||||
fn processQueuedNavigation(self: *Session) !void {
|
||||
const navigations = &self.queued_navigation;
|
||||
|
||||
if (self.page.?._queued_navigation != null) {
|
||||
// This is both an optimization and a simplification of sorts. If the
|
||||
// root page is navigating, then we don't need to process any other
|
||||
// navigation. Also, the navigation for the root page and for a frame
|
||||
// is different enough that have two distinct code blocks is, imo,
|
||||
// better. Yes, there will be duplication.
|
||||
navigations.clearRetainingCapacity();
|
||||
return self.processRootQueuedNavigation();
|
||||
}
|
||||
|
||||
const about_blank_queue = &self.queued_queued_navigation;
|
||||
defer about_blank_queue.clearRetainingCapacity();
|
||||
|
||||
// First pass: process async navigations (non-about:blank)
|
||||
// These cannot cause re-entrant navigation scheduling
|
||||
for (navigations.items) |page| {
|
||||
const qn = page._queued_navigation.?;
|
||||
|
||||
if (qn.is_about_blank) {
|
||||
// Defer about:blank to second pass
|
||||
try about_blank_queue.append(self.arena, page);
|
||||
continue;
|
||||
}
|
||||
|
||||
self.processFrameNavigation(page, qn) catch |err| {
|
||||
log.warn(.page, "frame navigation", .{ .url = qn.url, .err = err });
|
||||
};
|
||||
}
|
||||
|
||||
// Clear the queue after first pass
|
||||
navigations.clearRetainingCapacity();
|
||||
|
||||
// Second pass: process synchronous navigations (about:blank)
|
||||
// These may trigger new navigations which go into queued_navigation
|
||||
for (about_blank_queue.items) |page| {
|
||||
const qn = page._queued_navigation.?;
|
||||
try self.processFrameNavigation(page, qn);
|
||||
}
|
||||
|
||||
// Safety: Remove any about:blank navigations that were queued during the
|
||||
// second pass to prevent infinite loops
|
||||
var i: usize = 0;
|
||||
while (i < navigations.items.len) {
|
||||
const page = navigations.items[i];
|
||||
if (page._queued_navigation) |qn| {
|
||||
if (qn.is_about_blank) {
|
||||
log.warn(.page, "recursive about blank", .{});
|
||||
_ = navigations.swapRemove(i);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
i += 1;
|
||||
}
|
||||
}
|
||||
|
||||
fn processFrameNavigation(self: *Session, page: *Page, qn: *QueuedNavigation) !void {
|
||||
lp.assert(page.parent != null, "root queued navigation", .{});
|
||||
|
||||
const iframe = page.iframe.?;
|
||||
const parent = page.parent.?;
|
||||
|
||||
page._queued_navigation = null;
|
||||
defer self.releaseArena(qn.arena);
|
||||
|
||||
errdefer iframe._window = null;
|
||||
|
||||
const parent_notified = page._parent_notified;
|
||||
if (parent_notified) {
|
||||
// we already notified the parent that we had loaded
|
||||
parent._pending_loads += 1;
|
||||
}
|
||||
|
||||
const frame_id, const parent = blk: {
|
||||
const page = &self.page.?;
|
||||
const frame_id = page._frame_id;
|
||||
const parent = page.parent;
|
||||
page.deinit(true);
|
||||
page.* = undefined;
|
||||
|
||||
try Page.init(page, frame_id, self, parent);
|
||||
errdefer {
|
||||
for (parent.frames.items, 0..) |frame, i| {
|
||||
if (frame == page) {
|
||||
parent.frames_sorted = false;
|
||||
_ = parent.frames.swapRemove(i);
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (parent_notified) {
|
||||
parent._pending_loads -= 1;
|
||||
}
|
||||
page.deinit(true);
|
||||
}
|
||||
|
||||
page.iframe = iframe;
|
||||
iframe._window = page.window;
|
||||
|
||||
page.navigate(qn.url, qn.opts) catch |err| {
|
||||
log.err(.browser, "queued frame navigation error", .{ .err = err });
|
||||
return err;
|
||||
};
|
||||
}
|
||||
|
||||
fn processRootQueuedNavigation(self: *Session) !void {
|
||||
const current_page = &self.page.?;
|
||||
const frame_id = current_page._frame_id;
|
||||
|
||||
// create a copy before the page is cleared
|
||||
const qn = current_page._queued_navigation.?;
|
||||
current_page._queued_navigation = null;
|
||||
|
||||
defer self.arena_pool.release(qn.arena);
|
||||
|
||||
// HACK
|
||||
// Mark as released in tracking BEFORE removePage clears the map.
|
||||
// We can't call releaseArena() because that would also return the arena
|
||||
// to the pool, making the memory invalid before we use qn.url/qn.opts.
|
||||
if (comptime IS_DEBUG) {
|
||||
if (self._arena_pool_leak_track.getPtr(@intFromPtr(qn.arena.ptr))) |found| {
|
||||
found.count = 0;
|
||||
}
|
||||
}
|
||||
|
||||
browser.http_client.abort();
|
||||
self.removePage();
|
||||
|
||||
break :blk .{ frame_id, parent };
|
||||
};
|
||||
|
||||
self.page = @as(Page, undefined);
|
||||
const page = &self.page.?;
|
||||
try Page.init(page, frame_id, self, parent);
|
||||
const new_page = &self.page.?;
|
||||
try Page.init(new_page, frame_id, self, null);
|
||||
|
||||
// Creates a new NavigationEventTarget for this page.
|
||||
try self.navigation.onNewPage(page);
|
||||
try self.navigation.onNewPage(new_page);
|
||||
|
||||
// start JS env
|
||||
// Inform CDP the main page has been created such that additional context for other Worlds can be created as well
|
||||
self.notification.dispatch(.page_created, page);
|
||||
self.notification.dispatch(.page_created, new_page);
|
||||
|
||||
page.navigate(qn.url, qn.opts) catch |err| {
|
||||
log.err(.browser, "queued navigation error", .{ .err = err, .url = qn.url });
|
||||
new_page.navigate(qn.url, qn.opts) catch |err| {
|
||||
log.err(.browser, "queued navigation error", .{ .err = err });
|
||||
return err;
|
||||
};
|
||||
|
||||
return page;
|
||||
}
|
||||
|
||||
pub fn nextFrameId(self: *Session) u32 {
|
||||
@@ -388,3 +666,9 @@ pub fn nextFrameId(self: *Session) u32 {
|
||||
self.frame_id_gen = id;
|
||||
return id;
|
||||
}
|
||||
|
||||
pub fn nextPageId(self: *Session) u32 {
|
||||
const id = self.page_id_gen +% 1;
|
||||
self.page_id_gen = id;
|
||||
return id;
|
||||
}
|
||||
|
||||
@@ -167,17 +167,17 @@ pub fn ensureEncoded(allocator: Allocator, url: [:0]const u8) ![:0]const u8 {
|
||||
const query_end = if (query_start) |_| (fragment_start orelse url.len) else path_end;
|
||||
|
||||
const path_to_encode = url[path_start..path_end];
|
||||
const encoded_path = try percentEncodeSegment(allocator, path_to_encode, true);
|
||||
const encoded_path = try percentEncodeSegment(allocator, path_to_encode, .path);
|
||||
|
||||
const encoded_query = if (query_start) |qs| blk: {
|
||||
const query_to_encode = url[qs + 1 .. query_end];
|
||||
const encoded = try percentEncodeSegment(allocator, query_to_encode, false);
|
||||
const encoded = try percentEncodeSegment(allocator, query_to_encode, .query);
|
||||
break :blk encoded;
|
||||
} else null;
|
||||
|
||||
const encoded_fragment = if (fragment_start) |fs| blk: {
|
||||
const fragment_to_encode = url[fs + 1 ..];
|
||||
const encoded = try percentEncodeSegment(allocator, fragment_to_encode, false);
|
||||
const encoded = try percentEncodeSegment(allocator, fragment_to_encode, .query);
|
||||
break :blk encoded;
|
||||
} else null;
|
||||
|
||||
@@ -204,11 +204,13 @@ pub fn ensureEncoded(allocator: Allocator, url: [:0]const u8) ![:0]const u8 {
|
||||
return buf.items[0 .. buf.items.len - 1 :0];
|
||||
}
|
||||
|
||||
fn percentEncodeSegment(allocator: Allocator, segment: []const u8, comptime is_path: bool) ![]const u8 {
|
||||
const EncodeSet = enum { path, query, userinfo };
|
||||
|
||||
fn percentEncodeSegment(allocator: Allocator, segment: []const u8, comptime encode_set: EncodeSet) ![]const u8 {
|
||||
// Check if encoding is needed
|
||||
var needs_encoding = false;
|
||||
for (segment) |c| {
|
||||
if (shouldPercentEncode(c, is_path)) {
|
||||
if (shouldPercentEncode(c, encode_set)) {
|
||||
needs_encoding = true;
|
||||
break;
|
||||
}
|
||||
@@ -235,7 +237,7 @@ fn percentEncodeSegment(allocator: Allocator, segment: []const u8, comptime is_p
|
||||
}
|
||||
}
|
||||
|
||||
if (shouldPercentEncode(c, is_path)) {
|
||||
if (shouldPercentEncode(c, encode_set)) {
|
||||
try buf.writer(allocator).print("%{X:0>2}", .{c});
|
||||
} else {
|
||||
try buf.append(allocator, c);
|
||||
@@ -245,16 +247,17 @@ fn percentEncodeSegment(allocator: Allocator, segment: []const u8, comptime is_p
|
||||
return buf.items;
|
||||
}
|
||||
|
||||
fn shouldPercentEncode(c: u8, comptime is_path: bool) bool {
|
||||
fn shouldPercentEncode(c: u8, comptime encode_set: EncodeSet) bool {
|
||||
return switch (c) {
|
||||
// Unreserved characters (RFC 3986)
|
||||
'A'...'Z', 'a'...'z', '0'...'9', '-', '.', '_', '~' => false,
|
||||
// sub-delims allowed in both path and query
|
||||
'!', '$', '&', '\'', '(', ')', '*', '+', ',', ';', '=' => false,
|
||||
// Separators allowed in both path and query
|
||||
'/', ':', '@' => false,
|
||||
// Query-specific: '?' is allowed in queries but not in paths
|
||||
'?' => comptime is_path,
|
||||
// sub-delims allowed in path/query but some must be encoded in userinfo
|
||||
'!', '$', '&', '\'', '(', ')', '*', '+', ',' => false,
|
||||
';', '=' => encode_set == .userinfo,
|
||||
// Separators: userinfo must encode these
|
||||
'/', ':', '@' => encode_set == .userinfo,
|
||||
// '?' is allowed in queries but not in paths or userinfo
|
||||
'?' => encode_set != .query,
|
||||
// Everything else needs encoding (including space)
|
||||
else => true,
|
||||
};
|
||||
@@ -274,6 +277,11 @@ pub fn isCompleteHTTPUrl(url: []const u8) bool {
|
||||
return false;
|
||||
}
|
||||
|
||||
// blob: and data: URLs are complete but don't follow scheme:// pattern
|
||||
if (std.mem.startsWith(u8, url, "blob:") or std.mem.startsWith(u8, url, "data:")) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// Check if there's a scheme (protocol) ending with ://
|
||||
const colon_pos = std.mem.indexOfScalar(u8, url, ':') orelse return false;
|
||||
|
||||
@@ -514,7 +522,7 @@ pub fn setHost(current: [:0]const u8, value: []const u8, allocator: Allocator) !
|
||||
const search = getSearch(current);
|
||||
const hash = getHash(current);
|
||||
|
||||
// Check if the host includes a port
|
||||
// Check if the new value includes a port
|
||||
const colon_pos = std.mem.lastIndexOfScalar(u8, value, ':');
|
||||
const clean_host = if (colon_pos) |pos| blk: {
|
||||
const port_str = value[pos + 1 ..];
|
||||
@@ -526,7 +534,14 @@ pub fn setHost(current: [:0]const u8, value: []const u8, allocator: Allocator) !
|
||||
break :blk value[0..pos];
|
||||
}
|
||||
break :blk value;
|
||||
} else value;
|
||||
} else blk: {
|
||||
// No port in new value - preserve existing port
|
||||
const current_port = getPort(current);
|
||||
if (current_port.len > 0) {
|
||||
break :blk try std.fmt.allocPrint(allocator, "{s}:{s}", .{ value, current_port });
|
||||
}
|
||||
break :blk value;
|
||||
};
|
||||
|
||||
return buildUrl(allocator, protocol, clean_host, pathname, search, hash);
|
||||
}
|
||||
@@ -544,6 +559,9 @@ pub fn setHostname(current: [:0]const u8, value: []const u8, allocator: Allocato
|
||||
pub fn setPort(current: [:0]const u8, value: ?[]const u8, allocator: Allocator) ![:0]const u8 {
|
||||
const hostname = getHostname(current);
|
||||
const protocol = getProtocol(current);
|
||||
const pathname = getPathname(current);
|
||||
const search = getSearch(current);
|
||||
const hash = getHash(current);
|
||||
|
||||
// Handle null or default ports
|
||||
const new_host = if (value) |port_str| blk: {
|
||||
@@ -560,7 +578,7 @@ pub fn setPort(current: [:0]const u8, value: ?[]const u8, allocator: Allocator)
|
||||
break :blk try std.fmt.allocPrint(allocator, "{s}:{s}", .{ hostname, port_str });
|
||||
} else hostname;
|
||||
|
||||
return setHost(current, new_host, allocator);
|
||||
return buildUrl(allocator, protocol, new_host, pathname, search, hash);
|
||||
}
|
||||
|
||||
pub fn setPathname(current: [:0]const u8, value: []const u8, allocator: Allocator) ![:0]const u8 {
|
||||
@@ -608,6 +626,64 @@ pub fn setHash(current: [:0]const u8, value: []const u8, allocator: Allocator) !
|
||||
return buildUrl(allocator, protocol, host, pathname, search, hash);
|
||||
}
|
||||
|
||||
pub fn setUsername(current: [:0]const u8, value: []const u8, allocator: Allocator) ![:0]const u8 {
|
||||
const protocol = getProtocol(current);
|
||||
const host = getHost(current);
|
||||
const pathname = getPathname(current);
|
||||
const search = getSearch(current);
|
||||
const hash = getHash(current);
|
||||
const password = getPassword(current);
|
||||
|
||||
const encoded_username = try percentEncodeSegment(allocator, value, .userinfo);
|
||||
return buildUrlWithUserInfo(allocator, protocol, encoded_username, password, host, pathname, search, hash);
|
||||
}
|
||||
|
||||
pub fn setPassword(current: [:0]const u8, value: []const u8, allocator: Allocator) ![:0]const u8 {
|
||||
const protocol = getProtocol(current);
|
||||
const host = getHost(current);
|
||||
const pathname = getPathname(current);
|
||||
const search = getSearch(current);
|
||||
const hash = getHash(current);
|
||||
const username = getUsername(current);
|
||||
|
||||
const encoded_password = try percentEncodeSegment(allocator, value, .userinfo);
|
||||
return buildUrlWithUserInfo(allocator, protocol, username, encoded_password, host, pathname, search, hash);
|
||||
}
|
||||
|
||||
fn buildUrlWithUserInfo(
|
||||
allocator: Allocator,
|
||||
protocol: []const u8,
|
||||
username: []const u8,
|
||||
password: []const u8,
|
||||
host: []const u8,
|
||||
pathname: []const u8,
|
||||
search: []const u8,
|
||||
hash: []const u8,
|
||||
) ![:0]const u8 {
|
||||
if (username.len == 0 and password.len == 0) {
|
||||
return buildUrl(allocator, protocol, host, pathname, search, hash);
|
||||
} else if (password.len == 0) {
|
||||
return std.fmt.allocPrintSentinel(allocator, "{s}//{s}@{s}{s}{s}{s}", .{
|
||||
protocol,
|
||||
username,
|
||||
host,
|
||||
pathname,
|
||||
search,
|
||||
hash,
|
||||
}, 0);
|
||||
} else {
|
||||
return std.fmt.allocPrintSentinel(allocator, "{s}//{s}:{s}@{s}{s}{s}{s}", .{
|
||||
protocol,
|
||||
username,
|
||||
password,
|
||||
host,
|
||||
pathname,
|
||||
search,
|
||||
hash,
|
||||
}, 0);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn concatQueryString(arena: Allocator, url: []const u8, query_string: []const u8) ![:0]const u8 {
|
||||
if (query_string.len == 0) {
|
||||
return arena.dupeZ(u8, url);
|
||||
@@ -1329,3 +1405,12 @@ test "URL: unescape" {
|
||||
try testing.expectEqual("hello%2", result);
|
||||
}
|
||||
}
|
||||
|
||||
test "URL: getHost" {
|
||||
try testing.expectEqualSlices(u8, "example.com:8080", getHost("https://example.com:8080/path"));
|
||||
try testing.expectEqualSlices(u8, "example.com", getHost("https://example.com/path"));
|
||||
try testing.expectEqualSlices(u8, "example.com:443", getHost("https://example.com:443/"));
|
||||
try testing.expectEqualSlices(u8, "example.com", getHost("https://user:pass@example.com/page"));
|
||||
try testing.expectEqualSlices(u8, "example.com:8080", getHost("https://user:pass@example.com:8080/page"));
|
||||
try testing.expectEqualSlices(u8, "", getHost("not-a-url"));
|
||||
}
|
||||
|
||||
104
src/browser/actions.zig
Normal file
104
src/browser/actions.zig
Normal file
@@ -0,0 +1,104 @@
|
||||
// Copyright (C) 2023-2026 Lightpanda (Selecy SAS)
|
||||
//
|
||||
// Francis Bouvier <francis@lightpanda.io>
|
||||
// Pierre Tachoire <pierre@lightpanda.io>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU Affero General Public License as
|
||||
// published by the Free Software Foundation, either version 3 of the
|
||||
// License, or (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU Affero General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU Affero General Public License
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
const std = @import("std");
|
||||
const lp = @import("../lightpanda.zig");
|
||||
const DOMNode = @import("webapi/Node.zig");
|
||||
const Element = @import("webapi/Element.zig");
|
||||
const Event = @import("webapi/Event.zig");
|
||||
const MouseEvent = @import("webapi/event/MouseEvent.zig");
|
||||
const Page = @import("Page.zig");
|
||||
|
||||
pub fn click(node: *DOMNode, page: *Page) !void {
|
||||
const el = node.is(Element) orelse return error.InvalidNodeType;
|
||||
|
||||
const mouse_event: *MouseEvent = try .initTrusted(comptime .wrap("click"), .{
|
||||
.bubbles = true,
|
||||
.cancelable = true,
|
||||
.composed = true,
|
||||
.clientX = 0,
|
||||
.clientY = 0,
|
||||
}, page);
|
||||
|
||||
page._event_manager.dispatch(el.asEventTarget(), mouse_event.asEvent()) catch |err| {
|
||||
lp.log.err(.app, "click failed", .{ .err = err });
|
||||
return error.ActionFailed;
|
||||
};
|
||||
}
|
||||
|
||||
pub fn fill(node: *DOMNode, text: []const u8, page: *Page) !void {
|
||||
const el = node.is(Element) orelse return error.InvalidNodeType;
|
||||
|
||||
if (el.is(Element.Html.Input)) |input| {
|
||||
input.setValue(text, page) catch |err| {
|
||||
lp.log.err(.app, "fill input failed", .{ .err = err });
|
||||
return error.ActionFailed;
|
||||
};
|
||||
} else if (el.is(Element.Html.TextArea)) |textarea| {
|
||||
textarea.setValue(text, page) catch |err| {
|
||||
lp.log.err(.app, "fill textarea failed", .{ .err = err });
|
||||
return error.ActionFailed;
|
||||
};
|
||||
} else if (el.is(Element.Html.Select)) |select| {
|
||||
select.setValue(text, page) catch |err| {
|
||||
lp.log.err(.app, "fill select failed", .{ .err = err });
|
||||
return error.ActionFailed;
|
||||
};
|
||||
} else {
|
||||
return error.InvalidNodeType;
|
||||
}
|
||||
|
||||
const input_evt: *Event = try .initTrusted(comptime .wrap("input"), .{ .bubbles = true }, page);
|
||||
page._event_manager.dispatch(el.asEventTarget(), input_evt) catch |err| {
|
||||
lp.log.err(.app, "dispatch input event failed", .{ .err = err });
|
||||
};
|
||||
|
||||
const change_evt: *Event = try .initTrusted(comptime .wrap("change"), .{ .bubbles = true }, page);
|
||||
page._event_manager.dispatch(el.asEventTarget(), change_evt) catch |err| {
|
||||
lp.log.err(.app, "dispatch change event failed", .{ .err = err });
|
||||
};
|
||||
}
|
||||
|
||||
pub fn scroll(node: ?*DOMNode, x: ?i32, y: ?i32, page: *Page) !void {
|
||||
if (node) |n| {
|
||||
const el = n.is(Element) orelse return error.InvalidNodeType;
|
||||
|
||||
if (x) |val| {
|
||||
el.setScrollLeft(val, page) catch |err| {
|
||||
lp.log.err(.app, "setScrollLeft failed", .{ .err = err });
|
||||
return error.ActionFailed;
|
||||
};
|
||||
}
|
||||
if (y) |val| {
|
||||
el.setScrollTop(val, page) catch |err| {
|
||||
lp.log.err(.app, "setScrollTop failed", .{ .err = err });
|
||||
return error.ActionFailed;
|
||||
};
|
||||
}
|
||||
|
||||
const scroll_evt: *Event = try .initTrusted(comptime .wrap("scroll"), .{ .bubbles = true }, page);
|
||||
page._event_manager.dispatch(el.asEventTarget(), scroll_evt) catch |err| {
|
||||
lp.log.err(.app, "dispatch scroll event failed", .{ .err = err });
|
||||
};
|
||||
} else {
|
||||
page.window.scrollTo(.{ .x = x orelse 0 }, y, page) catch |err| {
|
||||
lp.log.err(.app, "scroll failed", .{ .err = err });
|
||||
return error.ActionFailed;
|
||||
};
|
||||
}
|
||||
}
|
||||
581
src/browser/interactive.zig
Normal file
581
src/browser/interactive.zig
Normal file
@@ -0,0 +1,581 @@
|
||||
// Copyright (C) 2023-2026 Lightpanda (Selecy SAS)
|
||||
//
|
||||
// Francis Bouvier <francis@lightpanda.io>
|
||||
// Pierre Tachoire <pierre@lightpanda.io>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU Affero General Public License as
|
||||
// published by the Free Software Foundation, either version 3 of the
|
||||
// License, or (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU Affero General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU Affero General Public License
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
const std = @import("std");
|
||||
|
||||
const Page = @import("Page.zig");
|
||||
const URL = @import("URL.zig");
|
||||
const TreeWalker = @import("webapi/TreeWalker.zig");
|
||||
const Element = @import("webapi/Element.zig");
|
||||
const Node = @import("webapi/Node.zig");
|
||||
const EventTarget = @import("webapi/EventTarget.zig");
|
||||
|
||||
const Allocator = std.mem.Allocator;
|
||||
|
||||
pub const InteractivityType = enum {
|
||||
native,
|
||||
aria,
|
||||
contenteditable,
|
||||
listener,
|
||||
focusable,
|
||||
};
|
||||
|
||||
pub const InteractiveElement = struct {
|
||||
node: *Node,
|
||||
tag_name: []const u8,
|
||||
role: ?[]const u8,
|
||||
name: ?[]const u8,
|
||||
interactivity_type: InteractivityType,
|
||||
listener_types: []const []const u8,
|
||||
disabled: bool,
|
||||
tab_index: i32,
|
||||
id: ?[]const u8,
|
||||
class: ?[]const u8,
|
||||
href: ?[]const u8,
|
||||
input_type: ?[]const u8,
|
||||
value: ?[]const u8,
|
||||
element_name: ?[]const u8,
|
||||
placeholder: ?[]const u8,
|
||||
|
||||
pub fn jsonStringify(self: *const InteractiveElement, jw: anytype) !void {
|
||||
try jw.beginObject();
|
||||
|
||||
try jw.objectField("tagName");
|
||||
try jw.write(self.tag_name);
|
||||
|
||||
try jw.objectField("role");
|
||||
try jw.write(self.role);
|
||||
|
||||
try jw.objectField("name");
|
||||
try jw.write(self.name);
|
||||
|
||||
try jw.objectField("type");
|
||||
try jw.write(@tagName(self.interactivity_type));
|
||||
|
||||
if (self.listener_types.len > 0) {
|
||||
try jw.objectField("listeners");
|
||||
try jw.beginArray();
|
||||
for (self.listener_types) |lt| {
|
||||
try jw.write(lt);
|
||||
}
|
||||
try jw.endArray();
|
||||
}
|
||||
|
||||
if (self.disabled) {
|
||||
try jw.objectField("disabled");
|
||||
try jw.write(true);
|
||||
}
|
||||
|
||||
try jw.objectField("tabIndex");
|
||||
try jw.write(self.tab_index);
|
||||
|
||||
if (self.id) |v| {
|
||||
try jw.objectField("id");
|
||||
try jw.write(v);
|
||||
}
|
||||
|
||||
if (self.class) |v| {
|
||||
try jw.objectField("class");
|
||||
try jw.write(v);
|
||||
}
|
||||
|
||||
if (self.href) |v| {
|
||||
try jw.objectField("href");
|
||||
try jw.write(v);
|
||||
}
|
||||
|
||||
if (self.input_type) |v| {
|
||||
try jw.objectField("inputType");
|
||||
try jw.write(v);
|
||||
}
|
||||
|
||||
if (self.value) |v| {
|
||||
try jw.objectField("value");
|
||||
try jw.write(v);
|
||||
}
|
||||
|
||||
if (self.element_name) |v| {
|
||||
try jw.objectField("elementName");
|
||||
try jw.write(v);
|
||||
}
|
||||
|
||||
if (self.placeholder) |v| {
|
||||
try jw.objectField("placeholder");
|
||||
try jw.write(v);
|
||||
}
|
||||
|
||||
try jw.endObject();
|
||||
}
|
||||
};
|
||||
|
||||
/// Collect all interactive elements under `root`.
|
||||
pub fn collectInteractiveElements(
|
||||
root: *Node,
|
||||
arena: Allocator,
|
||||
page: *Page,
|
||||
) ![]InteractiveElement {
|
||||
// Pre-build a map of event_target pointer → event type names,
|
||||
// so classify and getListenerTypes are both O(1) per element.
|
||||
const listener_targets = try buildListenerTargetMap(page, arena);
|
||||
|
||||
var results: std.ArrayList(InteractiveElement) = .empty;
|
||||
|
||||
var tw = TreeWalker.Full.init(root, .{});
|
||||
while (tw.next()) |node| {
|
||||
const el = node.is(Element) orelse continue;
|
||||
const html_el = el.is(Element.Html) orelse continue;
|
||||
|
||||
// Skip non-visual elements that are never user-interactive.
|
||||
switch (el.getTag()) {
|
||||
.script, .style, .link, .meta, .head, .noscript, .template => continue,
|
||||
else => {},
|
||||
}
|
||||
|
||||
const itype = classifyInteractivity(el, html_el, listener_targets) orelse continue;
|
||||
|
||||
const listener_types = getListenerTypes(
|
||||
el.asEventTarget(),
|
||||
listener_targets,
|
||||
);
|
||||
|
||||
try results.append(arena, .{
|
||||
.node = node,
|
||||
.tag_name = el.getTagNameLower(),
|
||||
.role = getRole(el),
|
||||
.name = try getAccessibleName(el, arena),
|
||||
.interactivity_type = itype,
|
||||
.listener_types = listener_types,
|
||||
.disabled = isDisabled(el),
|
||||
.tab_index = html_el.getTabIndex(),
|
||||
.id = el.getAttributeSafe(comptime .wrap("id")),
|
||||
.class = el.getAttributeSafe(comptime .wrap("class")),
|
||||
.href = if (el.getAttributeSafe(comptime .wrap("href"))) |href|
|
||||
URL.resolve(arena, page.base(), href, .{ .encode = true }) catch href
|
||||
else
|
||||
null,
|
||||
.input_type = getInputType(el),
|
||||
.value = getInputValue(el),
|
||||
.element_name = el.getAttributeSafe(comptime .wrap("name")),
|
||||
.placeholder = el.getAttributeSafe(comptime .wrap("placeholder")),
|
||||
});
|
||||
}
|
||||
|
||||
return results.items;
|
||||
}
|
||||
|
||||
pub const ListenerTargetMap = std.AutoHashMapUnmanaged(usize, std.ArrayList([]const u8));
|
||||
|
||||
/// Pre-build a map from event_target pointer → list of event type names.
|
||||
/// This lets both classifyInteractivity (O(1) "has any?") and
|
||||
/// getListenerTypes (O(1) "which ones?") avoid re-iterating per element.
|
||||
pub fn buildListenerTargetMap(page: *Page, arena: Allocator) !ListenerTargetMap {
|
||||
var map = ListenerTargetMap{};
|
||||
|
||||
// addEventListener registrations
|
||||
var it = page._event_manager.lookup.iterator();
|
||||
while (it.next()) |entry| {
|
||||
const list = entry.value_ptr.*;
|
||||
if (list.first != null) {
|
||||
const gop = try map.getOrPut(arena, entry.key_ptr.event_target);
|
||||
if (!gop.found_existing) gop.value_ptr.* = .empty;
|
||||
try gop.value_ptr.append(arena, entry.key_ptr.type_string.str());
|
||||
}
|
||||
}
|
||||
|
||||
// Inline handlers (onclick, onmousedown, etc.)
|
||||
var attr_it = page._event_target_attr_listeners.iterator();
|
||||
while (attr_it.next()) |entry| {
|
||||
const gop = try map.getOrPut(arena, @intFromPtr(entry.key_ptr.target));
|
||||
if (!gop.found_existing) gop.value_ptr.* = .empty;
|
||||
// Strip "on" prefix to get the event type name.
|
||||
try gop.value_ptr.append(arena, @tagName(entry.key_ptr.handler)[2..]);
|
||||
}
|
||||
|
||||
return map;
|
||||
}
|
||||
|
||||
pub fn classifyInteractivity(
|
||||
el: *Element,
|
||||
html_el: *Element.Html,
|
||||
listener_targets: ListenerTargetMap,
|
||||
) ?InteractivityType {
|
||||
// 1. Native interactive by tag
|
||||
switch (el.getTag()) {
|
||||
.button, .summary, .details, .select, .textarea => return .native,
|
||||
.anchor, .area => {
|
||||
if (el.getAttributeSafe(comptime .wrap("href")) != null) return .native;
|
||||
},
|
||||
.input => {
|
||||
if (el.is(Element.Html.Input)) |input| {
|
||||
if (input._input_type != .hidden) return .native;
|
||||
}
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
|
||||
// 2. ARIA interactive role
|
||||
if (el.getAttributeSafe(comptime .wrap("role"))) |role| {
|
||||
if (isInteractiveRole(role)) return .aria;
|
||||
}
|
||||
|
||||
// 3. contenteditable (15 bytes, exceeds SSO limit for comptime)
|
||||
if (el.getAttributeSafe(.wrap("contenteditable"))) |ce| {
|
||||
if (ce.len == 0 or std.ascii.eqlIgnoreCase(ce, "true")) return .contenteditable;
|
||||
}
|
||||
|
||||
// 4. Event listeners (addEventListener or inline handlers)
|
||||
const et_ptr = @intFromPtr(html_el.asEventTarget());
|
||||
if (listener_targets.get(et_ptr) != null) return .listener;
|
||||
|
||||
// 5. Explicitly focusable via tabindex.
|
||||
// Only count elements with an EXPLICIT tabindex attribute,
|
||||
// since getTabIndex() returns 0 for all interactive tags by default
|
||||
// (including anchors without href and hidden inputs).
|
||||
if (el.getAttributeSafe(comptime .wrap("tabindex"))) |_| {
|
||||
if (html_el.getTabIndex() >= 0) return .focusable;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
pub fn isInteractiveRole(role: []const u8) bool {
|
||||
const MAX_LEN = "menuitemcheckbox".len;
|
||||
if (role.len > MAX_LEN) return false;
|
||||
var buf: [MAX_LEN]u8 = undefined;
|
||||
const lowered = std.ascii.lowerString(&buf, role);
|
||||
const interactive_roles = std.StaticStringMap(void).initComptime(.{
|
||||
.{ "button", {} },
|
||||
.{ "checkbox", {} },
|
||||
.{ "combobox", {} },
|
||||
.{ "iframe", {} },
|
||||
.{ "link", {} },
|
||||
.{ "listbox", {} },
|
||||
.{ "menuitem", {} },
|
||||
.{ "menuitemcheckbox", {} },
|
||||
.{ "menuitemradio", {} },
|
||||
.{ "option", {} },
|
||||
.{ "radio", {} },
|
||||
.{ "searchbox", {} },
|
||||
.{ "slider", {} },
|
||||
.{ "spinbutton", {} },
|
||||
.{ "switch", {} },
|
||||
.{ "tab", {} },
|
||||
.{ "textbox", {} },
|
||||
.{ "treeitem", {} },
|
||||
});
|
||||
return interactive_roles.has(lowered);
|
||||
}
|
||||
|
||||
pub fn isContentRole(role: []const u8) bool {
|
||||
const MAX_LEN = "columnheader".len;
|
||||
if (role.len > MAX_LEN) return false;
|
||||
var buf: [MAX_LEN]u8 = undefined;
|
||||
const lowered = std.ascii.lowerString(&buf, role);
|
||||
const content_roles = std.StaticStringMap(void).initComptime(.{
|
||||
.{ "article", {} },
|
||||
.{ "cell", {} },
|
||||
.{ "columnheader", {} },
|
||||
.{ "gridcell", {} },
|
||||
.{ "heading", {} },
|
||||
.{ "listitem", {} },
|
||||
.{ "main", {} },
|
||||
.{ "navigation", {} },
|
||||
.{ "region", {} },
|
||||
.{ "rowheader", {} },
|
||||
});
|
||||
return content_roles.has(lowered);
|
||||
}
|
||||
|
||||
fn getRole(el: *Element) ?[]const u8 {
|
||||
// Explicit role attribute takes precedence
|
||||
if (el.getAttributeSafe(comptime .wrap("role"))) |role| return role;
|
||||
|
||||
// Implicit role from tag
|
||||
return switch (el.getTag()) {
|
||||
.button, .summary => "button",
|
||||
.anchor, .area => if (el.getAttributeSafe(comptime .wrap("href")) != null) "link" else null,
|
||||
.input => blk: {
|
||||
if (el.is(Element.Html.Input)) |input| {
|
||||
break :blk switch (input._input_type) {
|
||||
.text, .tel, .url, .email => "textbox",
|
||||
.checkbox => "checkbox",
|
||||
.radio => "radio",
|
||||
.button, .submit, .reset, .image => "button",
|
||||
.range => "slider",
|
||||
.number => "spinbutton",
|
||||
.search => "searchbox",
|
||||
else => null,
|
||||
};
|
||||
}
|
||||
break :blk null;
|
||||
},
|
||||
.select => "combobox",
|
||||
.textarea => "textbox",
|
||||
.details => "group",
|
||||
else => null,
|
||||
};
|
||||
}
|
||||
|
||||
fn getAccessibleName(el: *Element, arena: Allocator) !?[]const u8 {
|
||||
// aria-label
|
||||
if (el.getAttributeSafe(comptime .wrap("aria-label"))) |v| {
|
||||
if (v.len > 0) return v;
|
||||
}
|
||||
|
||||
// alt (for img, input[type=image])
|
||||
if (el.getAttributeSafe(comptime .wrap("alt"))) |v| {
|
||||
if (v.len > 0) return v;
|
||||
}
|
||||
|
||||
// title
|
||||
if (el.getAttributeSafe(comptime .wrap("title"))) |v| {
|
||||
if (v.len > 0) return v;
|
||||
}
|
||||
|
||||
// placeholder
|
||||
if (el.getAttributeSafe(comptime .wrap("placeholder"))) |v| {
|
||||
if (v.len > 0) return v;
|
||||
}
|
||||
|
||||
// value (for buttons)
|
||||
if (el.getTag() == .input) {
|
||||
if (el.getAttributeSafe(comptime .wrap("value"))) |v| {
|
||||
if (v.len > 0) return v;
|
||||
}
|
||||
}
|
||||
|
||||
// Text content (first non-empty text node, trimmed)
|
||||
return try getTextContent(el.asNode(), arena);
|
||||
}
|
||||
|
||||
fn getTextContent(node: *Node, arena: Allocator) !?[]const u8 {
|
||||
var tw: TreeWalker.FullExcludeSelf = .init(node, .{});
|
||||
|
||||
var arr: std.ArrayList(u8) = .empty;
|
||||
var single_chunk: ?[]const u8 = null;
|
||||
|
||||
while (tw.next()) |child| {
|
||||
// Skip text inside script/style elements.
|
||||
if (child.is(Element)) |el| {
|
||||
switch (el.getTag()) {
|
||||
.script, .style => {
|
||||
tw.skipChildren();
|
||||
continue;
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
}
|
||||
if (child.is(Node.CData)) |cdata| {
|
||||
if (cdata.is(Node.CData.Text)) |text| {
|
||||
const content = std.mem.trim(u8, text.getWholeText(), &std.ascii.whitespace);
|
||||
if (content.len > 0) {
|
||||
if (single_chunk == null and arr.items.len == 0) {
|
||||
single_chunk = content;
|
||||
} else {
|
||||
if (single_chunk) |sc| {
|
||||
try arr.appendSlice(arena, sc);
|
||||
try arr.append(arena, ' ');
|
||||
single_chunk = null;
|
||||
}
|
||||
try arr.appendSlice(arena, content);
|
||||
try arr.append(arena, ' ');
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (single_chunk) |sc| return sc;
|
||||
if (arr.items.len == 0) return null;
|
||||
|
||||
// strip out trailing space
|
||||
return arr.items[0 .. arr.items.len - 1];
|
||||
}
|
||||
fn isDisabled(el: *Element) bool {
|
||||
if (el.getAttributeSafe(comptime .wrap("disabled")) != null) return true;
|
||||
return isDisabledByFieldset(el);
|
||||
}
|
||||
|
||||
/// Check if an element is disabled by an ancestor <fieldset disabled>.
|
||||
/// Per spec, elements inside the first <legend> child of a disabled fieldset
|
||||
/// are NOT disabled by that fieldset.
|
||||
fn isDisabledByFieldset(el: *Element) bool {
|
||||
const element_node = el.asNode();
|
||||
var current: ?*Node = element_node._parent;
|
||||
while (current) |node| {
|
||||
current = node._parent;
|
||||
const ancestor = node.is(Element) orelse continue;
|
||||
|
||||
if (ancestor.getTag() == .fieldset and ancestor.getAttributeSafe(comptime .wrap("disabled")) != null) {
|
||||
// Check if element is inside the first <legend> child of this fieldset
|
||||
var child = ancestor.firstElementChild();
|
||||
while (child) |c| {
|
||||
if (c.getTag() == .legend) {
|
||||
if (c.asNode().contains(element_node)) return false;
|
||||
break;
|
||||
}
|
||||
child = c.nextElementSibling();
|
||||
}
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
fn getInputType(el: *Element) ?[]const u8 {
|
||||
if (el.is(Element.Html.Input)) |input| {
|
||||
return input._input_type.toString();
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
fn getInputValue(el: *Element) ?[]const u8 {
|
||||
if (el.is(Element.Html.Input)) |input| {
|
||||
return input.getValue();
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/// Get all event listener types registered on this target.
|
||||
fn getListenerTypes(target: *EventTarget, listener_targets: ListenerTargetMap) []const []const u8 {
|
||||
if (listener_targets.get(@intFromPtr(target))) |types| return types.items;
|
||||
return &.{};
|
||||
}
|
||||
|
||||
const testing = @import("../testing.zig");
|
||||
|
||||
fn testInteractive(html: []const u8) ![]InteractiveElement {
|
||||
const page = try testing.test_session.createPage();
|
||||
defer testing.test_session.removePage();
|
||||
|
||||
const doc = page.window._document;
|
||||
const div = try doc.createElement("div", null, page);
|
||||
try page.parseHtmlAsChildren(div.asNode(), html);
|
||||
|
||||
return collectInteractiveElements(div.asNode(), page.call_arena, page);
|
||||
}
|
||||
|
||||
test "browser.interactive: button" {
|
||||
const elements = try testInteractive("<button>Click me</button>");
|
||||
try testing.expectEqual(1, elements.len);
|
||||
try testing.expectEqual("button", elements[0].tag_name);
|
||||
try testing.expectEqual("button", elements[0].role.?);
|
||||
try testing.expectEqual("Click me", elements[0].name.?);
|
||||
try testing.expectEqual(InteractivityType.native, elements[0].interactivity_type);
|
||||
}
|
||||
|
||||
test "browser.interactive: anchor with href" {
|
||||
const elements = try testInteractive("<a href=\"/page\">Link</a>");
|
||||
try testing.expectEqual(1, elements.len);
|
||||
try testing.expectEqual("a", elements[0].tag_name);
|
||||
try testing.expectEqual("link", elements[0].role.?);
|
||||
try testing.expectEqual("Link", elements[0].name.?);
|
||||
}
|
||||
|
||||
test "browser.interactive: anchor without href" {
|
||||
const elements = try testInteractive("<a>Not a link</a>");
|
||||
try testing.expectEqual(0, elements.len);
|
||||
}
|
||||
|
||||
test "browser.interactive: input types" {
|
||||
const elements = try testInteractive(
|
||||
\\<input type="text" placeholder="Search">
|
||||
\\<input type="hidden" name="csrf">
|
||||
);
|
||||
try testing.expectEqual(1, elements.len);
|
||||
try testing.expectEqual("input", elements[0].tag_name);
|
||||
try testing.expectEqual("text", elements[0].input_type.?);
|
||||
try testing.expectEqual("Search", elements[0].placeholder.?);
|
||||
}
|
||||
|
||||
test "browser.interactive: select and textarea" {
|
||||
const elements = try testInteractive(
|
||||
\\<select name="color"><option>Red</option></select>
|
||||
\\<textarea name="msg"></textarea>
|
||||
);
|
||||
try testing.expectEqual(2, elements.len);
|
||||
try testing.expectEqual("select", elements[0].tag_name);
|
||||
try testing.expectEqual("textarea", elements[1].tag_name);
|
||||
}
|
||||
|
||||
test "browser.interactive: aria role" {
|
||||
const elements = try testInteractive("<div role=\"button\">Custom</div>");
|
||||
try testing.expectEqual(1, elements.len);
|
||||
try testing.expectEqual("div", elements[0].tag_name);
|
||||
try testing.expectEqual("button", elements[0].role.?);
|
||||
try testing.expectEqual(InteractivityType.aria, elements[0].interactivity_type);
|
||||
}
|
||||
|
||||
test "browser.interactive: contenteditable" {
|
||||
const elements = try testInteractive("<div contenteditable=\"true\">Edit me</div>");
|
||||
try testing.expectEqual(1, elements.len);
|
||||
try testing.expectEqual(InteractivityType.contenteditable, elements[0].interactivity_type);
|
||||
}
|
||||
|
||||
test "browser.interactive: tabindex" {
|
||||
const elements = try testInteractive("<div tabindex=\"0\">Focusable</div>");
|
||||
try testing.expectEqual(1, elements.len);
|
||||
try testing.expectEqual(InteractivityType.focusable, elements[0].interactivity_type);
|
||||
try testing.expectEqual(@as(i32, 0), elements[0].tab_index);
|
||||
}
|
||||
|
||||
test "browser.interactive: disabled" {
|
||||
const elements = try testInteractive("<button disabled>Off</button>");
|
||||
try testing.expectEqual(1, elements.len);
|
||||
try testing.expect(elements[0].disabled);
|
||||
}
|
||||
|
||||
test "browser.interactive: disabled by fieldset" {
|
||||
const elements = try testInteractive(
|
||||
\\<fieldset disabled>
|
||||
\\ <button>Disabled</button>
|
||||
\\ <legend><button>In legend</button></legend>
|
||||
\\</fieldset>
|
||||
);
|
||||
try testing.expectEqual(2, elements.len);
|
||||
// Button outside legend is disabled by fieldset
|
||||
try testing.expect(elements[0].disabled);
|
||||
// Button inside first legend is NOT disabled
|
||||
try testing.expect(!elements[1].disabled);
|
||||
}
|
||||
|
||||
test "browser.interactive: non-interactive div" {
|
||||
const elements = try testInteractive("<div>Just text</div>");
|
||||
try testing.expectEqual(0, elements.len);
|
||||
}
|
||||
|
||||
test "browser.interactive: details and summary" {
|
||||
const elements = try testInteractive("<details><summary>More</summary><p>Content</p></details>");
|
||||
try testing.expectEqual(2, elements.len);
|
||||
try testing.expectEqual("details", elements[0].tag_name);
|
||||
try testing.expectEqual("summary", elements[1].tag_name);
|
||||
}
|
||||
|
||||
test "browser.interactive: mixed elements" {
|
||||
const elements = try testInteractive(
|
||||
\\<div>
|
||||
\\ <a href="/home">Home</a>
|
||||
\\ <p>Some text</p>
|
||||
\\ <button id="btn1">Submit</button>
|
||||
\\ <input type="email" placeholder="Email">
|
||||
\\ <div>Not interactive</div>
|
||||
\\ <div role="tab">Tab</div>
|
||||
\\</div>
|
||||
);
|
||||
try testing.expectEqual(4, elements.len);
|
||||
}
|
||||
@@ -40,8 +40,8 @@ prev_context: *Context,
|
||||
|
||||
// Takes the raw v8 isolate and extracts the context from it.
|
||||
pub fn init(self: *Caller, v8_isolate: *v8.Isolate) void {
|
||||
const v8_context = v8.v8__Isolate__GetCurrentContext(v8_isolate).?;
|
||||
initWithContext(self, Context.fromC(v8_context), v8_context);
|
||||
const ctx, const v8_context = Context.fromIsolate(.{ .handle = v8_isolate });
|
||||
initWithContext(self, ctx, v8_context);
|
||||
}
|
||||
|
||||
fn initWithContext(self: *Caller, ctx: *Context, v8_context: *const v8.Context) void {
|
||||
@@ -537,9 +537,7 @@ pub const Function = struct {
|
||||
|
||||
pub fn call(comptime T: type, info_handle: *const v8.FunctionCallbackInfo, func: anytype, comptime opts: Opts) void {
|
||||
const v8_isolate = v8.v8__FunctionCallbackInfo__GetIsolate(info_handle).?;
|
||||
const v8_context = v8.v8__Isolate__GetCurrentContext(v8_isolate).?;
|
||||
|
||||
const ctx = Context.fromC(v8_context);
|
||||
const ctx, const v8_context = Context.fromIsolate(.{ .handle = v8_isolate });
|
||||
const info = FunctionCallbackInfo{ .handle = info_handle };
|
||||
|
||||
var hs: js.HandleScope = undefined;
|
||||
@@ -734,7 +732,7 @@ fn getArgs(comptime F: type, comptime offset: usize, local: *const Local, info:
|
||||
if (last_parameter_type_info == .pointer and last_parameter_type_info.pointer.size == .slice) {
|
||||
const slice_type = last_parameter_type_info.pointer.child;
|
||||
const corresponding_js_value = info.getArg(@intCast(last_js_parameter), local);
|
||||
if (corresponding_js_value.isArray() == false and corresponding_js_value.isTypedArray() == false and slice_type != u8) {
|
||||
if (slice_type == js.Value or (corresponding_js_value.isArray() == false and corresponding_js_value.isTypedArray() == false and slice_type != u8)) {
|
||||
is_variadic = true;
|
||||
if (js_parameter_count == 0) {
|
||||
@field(args, tupleFieldName(params_to_map.len + offset - 1)) = &.{};
|
||||
|
||||
@@ -23,9 +23,11 @@ const log = @import("../../log.zig");
|
||||
const js = @import("js.zig");
|
||||
const Env = @import("Env.zig");
|
||||
const bridge = @import("bridge.zig");
|
||||
const Origin = @import("Origin.zig");
|
||||
const Scheduler = @import("Scheduler.zig");
|
||||
|
||||
const Page = @import("../Page.zig");
|
||||
const Session = @import("../Session.zig");
|
||||
const ScriptManager = @import("../ScriptManager.zig");
|
||||
|
||||
const v8 = js.v8;
|
||||
@@ -41,6 +43,7 @@ const Context = @This();
|
||||
id: usize,
|
||||
env: *Env,
|
||||
page: *Page,
|
||||
session: *Session,
|
||||
isolate: js.Isolate,
|
||||
|
||||
// Per-context microtask queue for isolation between contexts
|
||||
@@ -74,39 +77,11 @@ call_depth: usize = 0,
|
||||
// context.localScope
|
||||
local: ?*const js.Local = null,
|
||||
|
||||
// Serves two purposes. Like `global_objects`, this is used to free
|
||||
// every Global(Object) we've created during the lifetime of the context.
|
||||
// More importantly, it serves as an identity map - for a given Zig
|
||||
// instance, we map it to the same Global(Object).
|
||||
// The key is the @intFromPtr of the Zig value
|
||||
identity_map: std.AutoHashMapUnmanaged(usize, v8.Global) = .empty,
|
||||
origin: *Origin,
|
||||
|
||||
// Any type that is stored in the identity_map which has a finalizer declared
|
||||
// will have its finalizer stored here. This is only used when shutting down
|
||||
// if v8 hasn't called the finalizer directly itself.
|
||||
finalizer_callbacks: std.AutoHashMapUnmanaged(usize, *FinalizerCallback) = .empty,
|
||||
finalizer_callback_pool: std.heap.MemoryPool(FinalizerCallback),
|
||||
|
||||
// Some web APIs have to manage opaque values. Ideally, they use an
|
||||
// js.Object, but the js.Object has no lifetime guarantee beyond the
|
||||
// current call. They can call .persist() on their js.Object to get
|
||||
// a `Global(Object)`. We need to track these to free them.
|
||||
// This used to be a map and acted like identity_map; the key was
|
||||
// the @intFromPtr(js_obj.handle). But v8 can re-use address. Without
|
||||
// a reliable way to know if an object has already been persisted,
|
||||
// we now simply persist every time persist() is called.
|
||||
global_values: std.ArrayList(v8.Global) = .empty,
|
||||
global_objects: std.ArrayList(v8.Global) = .empty,
|
||||
// Unlike other v8 types, like functions or objects, modules are not shared
|
||||
// across origins.
|
||||
global_modules: std.ArrayList(v8.Global) = .empty,
|
||||
global_promises: std.ArrayList(v8.Global) = .empty,
|
||||
global_functions: std.ArrayList(v8.Global) = .empty,
|
||||
global_promise_resolvers: std.ArrayList(v8.Global) = .empty,
|
||||
|
||||
// Temp variants stored in HashMaps for O(1) early cleanup.
|
||||
// Key is global.data_ptr.
|
||||
global_values_temp: std.AutoHashMapUnmanaged(usize, v8.Global) = .empty,
|
||||
global_promises_temp: std.AutoHashMapUnmanaged(usize, v8.Global) = .empty,
|
||||
global_functions_temp: std.AutoHashMapUnmanaged(usize, v8.Global) = .empty,
|
||||
|
||||
// Our module cache: normalized module specifier => module.
|
||||
module_cache: std.StringHashMapUnmanaged(ModuleEntry) = .empty,
|
||||
@@ -144,16 +119,26 @@ const ModuleEntry = struct {
|
||||
resolver_promise: ?js.Promise.Global = null,
|
||||
};
|
||||
|
||||
pub fn fromC(c_context: *const v8.Context) *Context {
|
||||
pub fn fromC(c_context: *const v8.Context) ?*Context {
|
||||
return @ptrCast(@alignCast(v8.v8__Context__GetAlignedPointerFromEmbedderData(c_context, 1)));
|
||||
}
|
||||
|
||||
pub fn fromIsolate(isolate: js.Isolate) *Context {
|
||||
return fromC(v8.v8__Isolate__GetCurrentContext(isolate.handle).?);
|
||||
/// Returns the Context and v8::Context for the given isolate.
|
||||
/// If the current context is from a destroyed Context (e.g., navigated-away iframe),
|
||||
/// falls back to the incumbent context (the calling context).
|
||||
pub fn fromIsolate(isolate: js.Isolate) struct { *Context, *const v8.Context } {
|
||||
const v8_context = v8.v8__Isolate__GetCurrentContext(isolate.handle).?;
|
||||
if (fromC(v8_context)) |ctx| {
|
||||
return .{ ctx, v8_context };
|
||||
}
|
||||
// The current context's Context struct has been freed (e.g., iframe navigated away).
|
||||
// Fall back to the incumbent context (the calling context).
|
||||
const v8_incumbent = v8.v8__Isolate__GetIncumbentContext(isolate.handle).?;
|
||||
return .{ fromC(v8_incumbent).?, v8_incumbent };
|
||||
}
|
||||
|
||||
pub fn deinit(self: *Context) void {
|
||||
if (comptime IS_DEBUG) {
|
||||
if (comptime IS_DEBUG and @import("builtin").is_test == false) {
|
||||
var it = self.unknown_properties.iterator();
|
||||
while (it.next()) |kv| {
|
||||
log.debug(.unknown_prop, "unknown property", .{
|
||||
@@ -174,64 +159,16 @@ pub fn deinit(self: *Context) void {
|
||||
// this can release objects
|
||||
self.scheduler.deinit();
|
||||
|
||||
{
|
||||
var it = self.identity_map.valueIterator();
|
||||
while (it.next()) |global| {
|
||||
v8.v8__Global__Reset(global);
|
||||
}
|
||||
}
|
||||
{
|
||||
var it = self.finalizer_callbacks.valueIterator();
|
||||
while (it.next()) |finalizer| {
|
||||
finalizer.*.deinit();
|
||||
}
|
||||
self.finalizer_callback_pool.deinit();
|
||||
}
|
||||
|
||||
for (self.global_values.items) |*global| {
|
||||
v8.v8__Global__Reset(global);
|
||||
}
|
||||
|
||||
for (self.global_objects.items) |*global| {
|
||||
v8.v8__Global__Reset(global);
|
||||
}
|
||||
|
||||
for (self.global_modules.items) |*global| {
|
||||
v8.v8__Global__Reset(global);
|
||||
}
|
||||
|
||||
for (self.global_functions.items) |*global| {
|
||||
v8.v8__Global__Reset(global);
|
||||
}
|
||||
self.session.releaseOrigin(self.origin);
|
||||
|
||||
for (self.global_promises.items) |*global| {
|
||||
v8.v8__Global__Reset(global);
|
||||
}
|
||||
|
||||
for (self.global_promise_resolvers.items) |*global| {
|
||||
v8.v8__Global__Reset(global);
|
||||
}
|
||||
|
||||
{
|
||||
var it = self.global_values_temp.valueIterator();
|
||||
while (it.next()) |global| {
|
||||
v8.v8__Global__Reset(global);
|
||||
}
|
||||
}
|
||||
|
||||
{
|
||||
var it = self.global_promises_temp.valueIterator();
|
||||
while (it.next()) |global| {
|
||||
v8.v8__Global__Reset(global);
|
||||
}
|
||||
}
|
||||
|
||||
{
|
||||
var it = self.global_functions_temp.valueIterator();
|
||||
while (it.next()) |global| {
|
||||
v8.v8__Global__Reset(global);
|
||||
}
|
||||
}
|
||||
// Clear the embedder data so that if V8 keeps this context alive
|
||||
// (because objects created in it are still referenced), we don't
|
||||
// have a dangling pointer to our freed Context struct.
|
||||
v8.v8__Context__SetAlignedPointerInEmbedderData(entered.handle, 1, null);
|
||||
|
||||
v8.v8__Global__Reset(&self.handle);
|
||||
env.isolate.notifyContextDisposed();
|
||||
@@ -241,19 +178,53 @@ pub fn deinit(self: *Context) void {
|
||||
v8.v8__MicrotaskQueue__DELETE(self.microtask_queue);
|
||||
}
|
||||
|
||||
pub fn setOrigin(self: *Context, key: ?[]const u8) !void {
|
||||
const env = self.env;
|
||||
const isolate = env.isolate;
|
||||
|
||||
lp.assert(self.origin.rc == 1, "Ref opaque origin", .{ .rc = self.origin.rc });
|
||||
|
||||
const origin = try self.session.getOrCreateOrigin(key);
|
||||
errdefer self.session.releaseOrigin(origin);
|
||||
try origin.takeover(self.origin);
|
||||
|
||||
self.origin = origin;
|
||||
|
||||
{
|
||||
var ls: js.Local.Scope = undefined;
|
||||
self.localScope(&ls);
|
||||
defer ls.deinit();
|
||||
|
||||
// Set the V8::Context SecurityToken, which is a big part of what allows
|
||||
// one context to access another.
|
||||
const token_local = v8.v8__Global__Get(&origin.security_token, isolate.handle);
|
||||
v8.v8__Context__SetSecurityToken(ls.local.handle, token_local);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn trackGlobal(self: *Context, global: v8.Global) !void {
|
||||
return self.origin.trackGlobal(global);
|
||||
}
|
||||
|
||||
pub fn trackTemp(self: *Context, global: v8.Global) !void {
|
||||
return self.origin.trackTemp(global);
|
||||
}
|
||||
|
||||
pub fn weakRef(self: *Context, obj: anytype) void {
|
||||
const fc = self.finalizer_callbacks.get(@intFromPtr(obj)) orelse {
|
||||
const resolved = js.Local.resolveValue(obj);
|
||||
const fc = self.origin.finalizer_callbacks.get(@intFromPtr(resolved.ptr)) orelse {
|
||||
if (comptime IS_DEBUG) {
|
||||
// should not be possible
|
||||
std.debug.assert(false);
|
||||
}
|
||||
return;
|
||||
};
|
||||
v8.v8__Global__SetWeakFinalizer(&fc.global, fc, bridge.Struct(@TypeOf(obj)).JsApi.Meta.finalizer.from_v8, v8.kParameter);
|
||||
v8.v8__Global__SetWeakFinalizer(&fc.global, fc, resolved.finalizer_from_v8, v8.kParameter);
|
||||
}
|
||||
|
||||
pub fn safeWeakRef(self: *Context, obj: anytype) void {
|
||||
const fc = self.finalizer_callbacks.get(@intFromPtr(obj)) orelse {
|
||||
const resolved = js.Local.resolveValue(obj);
|
||||
const fc = self.origin.finalizer_callbacks.get(@intFromPtr(resolved.ptr)) orelse {
|
||||
if (comptime IS_DEBUG) {
|
||||
// should not be possible
|
||||
std.debug.assert(false);
|
||||
@@ -261,11 +232,12 @@ pub fn safeWeakRef(self: *Context, obj: anytype) void {
|
||||
return;
|
||||
};
|
||||
v8.v8__Global__ClearWeak(&fc.global);
|
||||
v8.v8__Global__SetWeakFinalizer(&fc.global, fc, bridge.Struct(@TypeOf(obj)).JsApi.Meta.finalizer.from_v8, v8.kParameter);
|
||||
v8.v8__Global__SetWeakFinalizer(&fc.global, fc, resolved.finalizer_from_v8, v8.kParameter);
|
||||
}
|
||||
|
||||
pub fn strongRef(self: *Context, obj: anytype) void {
|
||||
const fc = self.finalizer_callbacks.get(@intFromPtr(obj)) orelse {
|
||||
const resolved = js.Local.resolveValue(obj);
|
||||
const fc = self.origin.finalizer_callbacks.get(@intFromPtr(resolved.ptr)) orelse {
|
||||
if (comptime IS_DEBUG) {
|
||||
// should not be possible
|
||||
std.debug.assert(false);
|
||||
@@ -275,45 +247,6 @@ pub fn strongRef(self: *Context, obj: anytype) void {
|
||||
v8.v8__Global__ClearWeak(&fc.global);
|
||||
}
|
||||
|
||||
pub fn release(self: *Context, item: anytype) void {
|
||||
if (@TypeOf(item) == *anyopaque) {
|
||||
// Existing *anyopaque path for identity_map. Called internally from
|
||||
// finalizers
|
||||
var global = self.identity_map.fetchRemove(@intFromPtr(item)) orelse {
|
||||
if (comptime IS_DEBUG) {
|
||||
// should not be possible
|
||||
std.debug.assert(false);
|
||||
}
|
||||
return;
|
||||
};
|
||||
v8.v8__Global__Reset(&global.value);
|
||||
|
||||
// The item has been fianalized, remove it for the finalizer callback so that
|
||||
// we don't try to call it again on shutdown.
|
||||
const fc = self.finalizer_callbacks.fetchRemove(@intFromPtr(item)) orelse {
|
||||
if (comptime IS_DEBUG) {
|
||||
// should not be possible
|
||||
std.debug.assert(false);
|
||||
}
|
||||
return;
|
||||
};
|
||||
self.finalizer_callback_pool.destroy(fc.value);
|
||||
return;
|
||||
}
|
||||
|
||||
var map = switch (@TypeOf(item)) {
|
||||
js.Value.Temp => &self.global_values_temp,
|
||||
js.Promise.Temp => &self.global_promises_temp,
|
||||
js.Function.Temp => &self.global_functions_temp,
|
||||
else => |T| @compileError("Context.release cannot be called with a " ++ @typeName(T)),
|
||||
};
|
||||
|
||||
if (map.fetchRemove(item.handle.data_ptr)) |kv| {
|
||||
var global = kv.value;
|
||||
v8.v8__Global__Reset(&global);
|
||||
}
|
||||
}
|
||||
|
||||
// Any operation on the context have to be made from a local.
|
||||
pub fn localScope(self: *Context, ls: *js.Local.Scope) void {
|
||||
const isolate = self.isolate;
|
||||
@@ -336,28 +269,22 @@ pub fn toLocal(self: *Context, global: anytype) js.Local.ToLocalReturnType(@Type
|
||||
return l.toLocal(global);
|
||||
}
|
||||
|
||||
// This isn't expected to be called often. It's for converting attributes into
|
||||
// function calls, e.g. <body onload="doSomething"> will turn that "doSomething"
|
||||
// string into a js.Function which looks like: function(e) { doSomething(e) }
|
||||
// There might be more efficient ways to do this, but doing it this way means
|
||||
// our code only has to worry about js.Funtion, not some union of a js.Function
|
||||
// or a string.
|
||||
pub fn stringToPersistedFunction(self: *Context, str: []const u8) !js.Function.Global {
|
||||
pub fn getIncumbent(self: *Context) *Page {
|
||||
return fromC(v8.v8__Isolate__GetIncumbentContext(self.env.isolate.handle).?).?.page;
|
||||
}
|
||||
|
||||
pub fn stringToPersistedFunction(
|
||||
self: *Context,
|
||||
function_body: []const u8,
|
||||
comptime parameter_names: []const []const u8,
|
||||
extensions: []const v8.Object,
|
||||
) !js.Function.Global {
|
||||
var ls: js.Local.Scope = undefined;
|
||||
self.localScope(&ls);
|
||||
defer ls.deinit();
|
||||
|
||||
var extra: []const u8 = "";
|
||||
const normalized = std.mem.trim(u8, str, &std.ascii.whitespace);
|
||||
if (normalized.len > 0 and normalized[normalized.len - 1] != ')') {
|
||||
extra = "(e)";
|
||||
}
|
||||
const full = try std.fmt.allocPrintSentinel(self.call_arena, "(function(e) {{ {s}{s} }})", .{ normalized, extra }, 0);
|
||||
const js_val = try ls.local.compileAndRun(full, null);
|
||||
if (!js_val.isFunction()) {
|
||||
return error.StringFunctionError;
|
||||
}
|
||||
return try (js.Function{ .local = &ls.local, .handle = @ptrCast(js_val.handle) }).persist();
|
||||
const js_function = try ls.local.compileFunction(function_body, parameter_names, extensions);
|
||||
return js_function.persist();
|
||||
}
|
||||
|
||||
pub fn module(self: *Context, comptime want_result: bool, local: *const js.Local, src: []const u8, url: []const u8, cacheable: bool) !(if (want_result) ModuleEntry else void) {
|
||||
@@ -397,15 +324,15 @@ pub fn module(self: *Context, comptime want_result: bool, local: *const js.Local
|
||||
}
|
||||
|
||||
const owned_url = try arena.dupeZ(u8, url);
|
||||
if (cacheable and !gop.found_existing) {
|
||||
gop.key_ptr.* = owned_url;
|
||||
}
|
||||
const m = try compileModule(local, src, owned_url);
|
||||
|
||||
if (cacheable) {
|
||||
// compileModule is synchronous - nothing can modify the cache during compilation
|
||||
lp.assert(gop.value_ptr.module == null, "Context.module has module", .{});
|
||||
gop.value_ptr.module = try m.persist();
|
||||
if (!gop.found_existing) {
|
||||
gop.key_ptr.* = owned_url;
|
||||
}
|
||||
}
|
||||
|
||||
break :blk .{ m, owned_url };
|
||||
@@ -535,6 +462,14 @@ fn postCompileModule(self: *Context, mod: js.Module, url: [:0]const u8, local: *
|
||||
nested_gop.key_ptr.* = owned_specifier;
|
||||
nested_gop.value_ptr.* = .{};
|
||||
try script_manager.preloadImport(owned_specifier, url);
|
||||
} else if (nested_gop.value_ptr.module == null) {
|
||||
// Entry exists but module failed to compile previously.
|
||||
// The imported_modules entry may have been consumed, so
|
||||
// re-preload to ensure waitForImport can find it.
|
||||
// Key was stored via dupeZ so it has a sentinel in memory.
|
||||
const key = nested_gop.key_ptr.*;
|
||||
const key_z: [:0]const u8 = key.ptr[0..key.len :0];
|
||||
try script_manager.preloadImport(key_z, url);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -559,7 +494,7 @@ fn resolveModuleCallback(
|
||||
) callconv(.c) ?*const v8.Module {
|
||||
_ = import_attributes;
|
||||
|
||||
const self = fromC(c_context.?);
|
||||
const self = fromC(c_context.?).?;
|
||||
const local = js.Local{
|
||||
.ctx = self,
|
||||
.handle = c_context.?,
|
||||
@@ -592,7 +527,7 @@ pub fn dynamicModuleCallback(
|
||||
_ = host_defined_options;
|
||||
_ = import_attrs;
|
||||
|
||||
const self = fromC(c_context.?);
|
||||
const self = fromC(c_context.?).?;
|
||||
const local = js.Local{
|
||||
.ctx = self,
|
||||
.handle = c_context.?,
|
||||
@@ -639,7 +574,7 @@ pub fn dynamicModuleCallback(
|
||||
|
||||
pub fn metaObjectCallback(c_context: ?*v8.Context, c_module: ?*v8.Module, c_meta: ?*v8.Value) callconv(.c) void {
|
||||
// @HandleScope implement this without a fat context/local..
|
||||
const self = fromC(c_context.?);
|
||||
const self = fromC(c_context.?).?;
|
||||
var local = js.Local{
|
||||
.ctx = self,
|
||||
.handle = c_context.?,
|
||||
@@ -683,7 +618,15 @@ fn _resolveModuleCallback(self: *Context, referrer: js.Module, specifier: [:0]co
|
||||
return local.toLocal(m).handle;
|
||||
}
|
||||
|
||||
var source = try self.script_manager.?.waitForImport(normalized_specifier);
|
||||
var source = self.script_manager.?.waitForImport(normalized_specifier) catch |err| switch (err) {
|
||||
error.UnknownModule => blk: {
|
||||
// Module is in cache but was consumed from imported_modules
|
||||
// (e.g., by a previous failed resolution). Re-preload and retry.
|
||||
try self.script_manager.?.preloadImport(normalized_specifier, referrer_path);
|
||||
break :blk try self.script_manager.?.waitForImport(normalized_specifier);
|
||||
},
|
||||
else => return err,
|
||||
};
|
||||
defer source.deinit();
|
||||
|
||||
var try_catch: js.TryCatch = undefined;
|
||||
@@ -1023,34 +966,6 @@ pub fn queueMicrotaskFunc(self: *Context, cb: js.Function) void {
|
||||
v8.v8__MicrotaskQueue__EnqueueMicrotaskFunc(self.microtask_queue, self.isolate.handle, cb.handle);
|
||||
}
|
||||
|
||||
pub fn createFinalizerCallback(self: *Context, global: v8.Global, ptr: *anyopaque, finalizerFn: *const fn (ptr: *anyopaque, page: *Page) void) !*FinalizerCallback {
|
||||
const fc = try self.finalizer_callback_pool.create();
|
||||
fc.* = .{
|
||||
.ctx = self,
|
||||
.ptr = ptr,
|
||||
.global = global,
|
||||
.finalizerFn = finalizerFn,
|
||||
};
|
||||
return fc;
|
||||
}
|
||||
|
||||
// == Misc ==
|
||||
// A type that has a finalizer can have its finalizer called one of two ways.
|
||||
// The first is from V8 via the WeakCallback we give to weakRef. But that isn't
|
||||
// guaranteed to fire, so we track this in ctx._finalizers and call them on
|
||||
// context shutdown.
|
||||
pub const FinalizerCallback = struct {
|
||||
ctx: *Context,
|
||||
ptr: *anyopaque,
|
||||
global: v8.Global,
|
||||
finalizerFn: *const fn (ptr: *anyopaque, page: *Page) void,
|
||||
|
||||
pub fn deinit(self: *FinalizerCallback) void {
|
||||
self.finalizerFn(self.ptr, self.ctx.page);
|
||||
self.ctx.finalizer_callback_pool.destroy(self);
|
||||
}
|
||||
};
|
||||
|
||||
// == Profiler ==
|
||||
pub fn startCpuProfiler(self: *Context) void {
|
||||
if (comptime !IS_DEBUG) {
|
||||
|
||||
@@ -26,6 +26,7 @@ const App = @import("../../App.zig");
|
||||
const log = @import("../../log.zig");
|
||||
|
||||
const bridge = @import("bridge.zig");
|
||||
const Origin = @import("Origin.zig");
|
||||
const Context = @import("Context.zig");
|
||||
const Isolate = @import("Isolate.zig");
|
||||
const Platform = @import("Platform.zig");
|
||||
@@ -57,6 +58,8 @@ const Env = @This();
|
||||
|
||||
app: *App,
|
||||
|
||||
allocator: Allocator,
|
||||
|
||||
platform: *const Platform,
|
||||
|
||||
// the global isolate
|
||||
@@ -70,6 +73,11 @@ isolate_params: *v8.CreateParams,
|
||||
|
||||
context_id: usize,
|
||||
|
||||
// Maps origin -> shared Origin contains, for v8 values shared across
|
||||
// same-origin Contexts. There's a mismatch here between our JS model and our
|
||||
// Browser model. Origins only live as long as the root page of a session exists.
|
||||
// It would be wrong/dangerous to re-use an Origin across root page navigations.
|
||||
|
||||
// Global handles that need to be freed on deinit
|
||||
eternal_function_templates: []v8.Eternal,
|
||||
|
||||
@@ -206,6 +214,7 @@ pub fn init(app: *App, opts: InitOpts) !Env {
|
||||
return .{
|
||||
.app = app,
|
||||
.context_id = 0,
|
||||
.allocator = allocator,
|
||||
.contexts = undefined,
|
||||
.context_count = 0,
|
||||
.isolate = isolate,
|
||||
@@ -228,7 +237,9 @@ pub fn deinit(self: *Env) void {
|
||||
ctx.deinit();
|
||||
}
|
||||
|
||||
const allocator = self.app.allocator;
|
||||
const app = self.app;
|
||||
const allocator = app.allocator;
|
||||
|
||||
if (self.inspector) |i| {
|
||||
i.deinit(allocator);
|
||||
}
|
||||
@@ -272,6 +283,7 @@ pub fn createContext(self: *Env, page: *Page) !*Context {
|
||||
|
||||
// get the global object for the context, this maps to our Window
|
||||
const global_obj = v8.v8__Context__Global(v8_context).?;
|
||||
|
||||
{
|
||||
// Store our TAO inside the internal field of the global object. This
|
||||
// maps the v8::Object -> Zig instance. Almost all objects have this, and
|
||||
@@ -287,6 +299,7 @@ pub fn createContext(self: *Env, page: *Page) !*Context {
|
||||
};
|
||||
v8.v8__Object__SetAlignedPointerInInternalField(global_obj, 0, tao);
|
||||
}
|
||||
|
||||
// our window wrapped in a v8::Global
|
||||
var global_global: v8.Global = undefined;
|
||||
v8.v8__Global__New(isolate.handle, global_obj, &global_global);
|
||||
@@ -294,10 +307,15 @@ pub fn createContext(self: *Env, page: *Page) !*Context {
|
||||
const context_id = self.context_id;
|
||||
self.context_id = context_id + 1;
|
||||
|
||||
const origin = try page._session.getOrCreateOrigin(null);
|
||||
errdefer page._session.releaseOrigin(origin);
|
||||
|
||||
const context = try context_arena.create(Context);
|
||||
context.* = .{
|
||||
.env = self,
|
||||
.page = page,
|
||||
.session = page._session,
|
||||
.origin = origin,
|
||||
.id = context_id,
|
||||
.isolate = isolate,
|
||||
.arena = context_arena,
|
||||
@@ -307,9 +325,8 @@ pub fn createContext(self: *Env, page: *Page) !*Context {
|
||||
.microtask_queue = microtask_queue,
|
||||
.script_manager = &page._script_manager,
|
||||
.scheduler = .init(context_arena),
|
||||
.finalizer_callback_pool = std.heap.MemoryPool(Context.FinalizerCallback).init(self.app.allocator),
|
||||
};
|
||||
try context.identity_map.putNoClobber(context_arena, @intFromPtr(page.window), global_global);
|
||||
try context.origin.identity_map.putNoClobber(origin.arena, @intFromPtr(page.window), global_global);
|
||||
|
||||
// Store a pointer to our context inside the v8 context so that, given
|
||||
// a v8 context, we can get our context out
|
||||
@@ -365,8 +382,7 @@ pub fn runMicrotasks(self: *Env) void {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn runMacrotasks(self: *Env) !?u64 {
|
||||
var ms_to_next_task: ?u64 = null;
|
||||
pub fn runMacrotasks(self: *Env) !void {
|
||||
for (self.contexts[0..self.context_count]) |ctx| {
|
||||
if (comptime builtin.is_test == false) {
|
||||
// I hate this comptime check as much as you do. But we have tests
|
||||
@@ -381,13 +397,17 @@ pub fn runMacrotasks(self: *Env) !?u64 {
|
||||
var hs: js.HandleScope = undefined;
|
||||
const entered = ctx.enter(&hs);
|
||||
defer entered.exit();
|
||||
try ctx.scheduler.run();
|
||||
}
|
||||
}
|
||||
|
||||
const ms = (try ctx.scheduler.run()) orelse continue;
|
||||
if (ms_to_next_task == null or ms < ms_to_next_task.?) {
|
||||
ms_to_next_task = ms;
|
||||
pub fn msToNextMacrotask(self: *Env) ?u64 {
|
||||
var next_task: u64 = std.math.maxInt(u64);
|
||||
for (self.contexts[0..self.context_count]) |ctx| {
|
||||
const candidate = ctx.scheduler.msToNextHigh() orelse continue;
|
||||
next_task = @min(candidate, next_task);
|
||||
}
|
||||
}
|
||||
return ms_to_next_task;
|
||||
return if (next_task == std.math.maxInt(u64)) null else next_task;
|
||||
}
|
||||
|
||||
pub fn pumpMessageLoop(self: *const Env) void {
|
||||
@@ -475,20 +495,25 @@ pub fn terminate(self: *const Env) void {
|
||||
}
|
||||
|
||||
fn promiseRejectCallback(message_handle: v8.PromiseRejectMessage) callconv(.c) void {
|
||||
const promise_event = v8.v8__PromiseRejectMessage__GetEvent(&message_handle);
|
||||
if (promise_event != v8.kPromiseRejectWithNoHandler and promise_event != v8.kPromiseHandlerAddedAfterReject) {
|
||||
return;
|
||||
}
|
||||
|
||||
const promise_handle = v8.v8__PromiseRejectMessage__GetPromise(&message_handle).?;
|
||||
const v8_isolate = v8.v8__Object__GetIsolate(@ptrCast(promise_handle)).?;
|
||||
const js_isolate = js.Isolate{ .handle = v8_isolate };
|
||||
const ctx = Context.fromIsolate(js_isolate);
|
||||
const isolate = js.Isolate{ .handle = v8_isolate };
|
||||
const ctx, const v8_context = Context.fromIsolate(isolate);
|
||||
|
||||
const local = js.Local{
|
||||
.ctx = ctx,
|
||||
.isolate = js_isolate,
|
||||
.handle = v8.v8__Isolate__GetCurrentContext(v8_isolate).?,
|
||||
.isolate = isolate,
|
||||
.handle = v8_context,
|
||||
.call_arena = ctx.call_arena,
|
||||
};
|
||||
|
||||
const page = ctx.page;
|
||||
page.window.unhandledPromiseRejection(.{
|
||||
page.window.unhandledPromiseRejection(promise_event == v8.kPromiseRejectWithNoHandler, .{
|
||||
.local = &local,
|
||||
.handle = &message_handle,
|
||||
}, page) catch |err| {
|
||||
|
||||
@@ -160,8 +160,8 @@ fn _tryCallWithThis(self: *const Function, comptime T: type, this: anytype, args
|
||||
try_catch.rethrow();
|
||||
return error.TryCatchRethrow;
|
||||
}
|
||||
caught.* = try_catch.caughtOrError(local.call_arena, error.JSExecCallback);
|
||||
return error.JSExecCallback;
|
||||
caught.* = try_catch.caughtOrError(local.call_arena, error.JsException);
|
||||
return error.JsException;
|
||||
};
|
||||
|
||||
if (@typeInfo(T) == .void) {
|
||||
@@ -209,11 +209,11 @@ fn _persist(self: *const Function, comptime is_global: bool) !(if (is_global) Gl
|
||||
var global: v8.Global = undefined;
|
||||
v8.v8__Global__New(ctx.isolate.handle, self.handle, &global);
|
||||
if (comptime is_global) {
|
||||
try ctx.global_functions.append(ctx.arena, global);
|
||||
} else {
|
||||
try ctx.global_functions_temp.put(ctx.arena, global.data_ptr, global);
|
||||
try ctx.trackGlobal(global);
|
||||
return .{ .handle = global, .origin = {} };
|
||||
}
|
||||
return .{ .handle = global };
|
||||
try ctx.trackTemp(global);
|
||||
return .{ .handle = global, .origin = ctx.origin };
|
||||
}
|
||||
|
||||
pub fn tempWithThis(self: *const Function, value: anytype) !Temp {
|
||||
@@ -226,15 +226,18 @@ pub fn persistWithThis(self: *const Function, value: anytype) !Global {
|
||||
return with_this.persist();
|
||||
}
|
||||
|
||||
pub const Temp = G(0);
|
||||
pub const Global = G(1);
|
||||
pub const Temp = G(.temp);
|
||||
pub const Global = G(.global);
|
||||
|
||||
fn G(comptime discriminator: u8) type {
|
||||
const GlobalType = enum(u8) {
|
||||
temp,
|
||||
global,
|
||||
};
|
||||
|
||||
fn G(comptime global_type: GlobalType) type {
|
||||
return struct {
|
||||
handle: v8.Global,
|
||||
|
||||
// makes the types different (G(0) != G(1)), without taking up space
|
||||
comptime _: u8 = discriminator,
|
||||
origin: if (global_type == .temp) *js.Origin else void,
|
||||
|
||||
const Self = @This();
|
||||
|
||||
@@ -252,5 +255,9 @@ fn G(comptime discriminator: u8) type {
|
||||
pub fn isEqual(self: *const Self, other: Function) bool {
|
||||
return v8.v8__Global__IsEqual(&self.handle, other.handle);
|
||||
}
|
||||
|
||||
pub fn release(self: *const Self) void {
|
||||
self.origin.releaseTemp(self.handle);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@@ -130,6 +130,12 @@ pub fn contextCreated(
|
||||
|
||||
pub fn contextDestroyed(self: *Inspector, context: *const v8.Context) void {
|
||||
v8.v8_inspector__Inspector__ContextDestroyed(self.handle, context);
|
||||
|
||||
if (self.default_context) |*dc| {
|
||||
if (v8.v8__Global__IsEqual(dc, context)) {
|
||||
self.default_context = null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn resetContextGroup(self: *const Inspector) void {
|
||||
|
||||
@@ -18,6 +18,7 @@
|
||||
|
||||
const std = @import("std");
|
||||
const Page = @import("../Page.zig");
|
||||
const Session = @import("../Session.zig");
|
||||
const log = @import("../../log.zig");
|
||||
const string = @import("../../string.zig");
|
||||
|
||||
@@ -115,6 +116,49 @@ pub fn exec(self: *const Local, src: []const u8, name: ?[]const u8) !js.Value {
|
||||
return self.compileAndRun(src, name);
|
||||
}
|
||||
|
||||
/// Compiles a function body as function.
|
||||
///
|
||||
/// https://v8.github.io/api/head/classv8_1_1ScriptCompiler.html#a3a15bb5a7dfc3f998e6ac789e6b4646a
|
||||
pub fn compileFunction(
|
||||
self: *const Local,
|
||||
function_body: []const u8,
|
||||
/// We tend to know how many params we'll pass; can remove the comptime if necessary.
|
||||
comptime parameter_names: []const []const u8,
|
||||
extensions: []const v8.Object,
|
||||
) !js.Function {
|
||||
// TODO: Make configurable.
|
||||
const script_name = self.isolate.initStringHandle("anonymous");
|
||||
const script_source = self.isolate.initStringHandle(function_body);
|
||||
|
||||
var parameter_list: [parameter_names.len]*const v8.String = undefined;
|
||||
inline for (0..parameter_names.len) |i| {
|
||||
parameter_list[i] = self.isolate.initStringHandle(parameter_names[i]);
|
||||
}
|
||||
|
||||
// Create `ScriptOrigin`.
|
||||
var origin: v8.ScriptOrigin = undefined;
|
||||
v8.v8__ScriptOrigin__CONSTRUCT(&origin, script_name);
|
||||
|
||||
// Create `ScriptCompilerSource`.
|
||||
var script_compiler_source: v8.ScriptCompilerSource = undefined;
|
||||
v8.v8__ScriptCompiler__Source__CONSTRUCT2(script_source, &origin, null, &script_compiler_source);
|
||||
defer v8.v8__ScriptCompiler__Source__DESTRUCT(&script_compiler_source);
|
||||
|
||||
// Compile the function.
|
||||
const result = v8.v8__ScriptCompiler__CompileFunction(
|
||||
self.handle,
|
||||
&script_compiler_source,
|
||||
parameter_list.len,
|
||||
¶meter_list,
|
||||
extensions.len,
|
||||
@ptrCast(&extensions),
|
||||
v8.kNoCompileOptions,
|
||||
v8.kNoCacheNoReason,
|
||||
) orelse return error.CompilationError;
|
||||
|
||||
return .{ .local = self, .handle = result };
|
||||
}
|
||||
|
||||
pub fn compileAndRun(self: *const Local, src: []const u8, name: ?[]const u8) !js.Value {
|
||||
const script_name = self.isolate.initStringHandle(name orelse "anonymous");
|
||||
const script_source = self.isolate.initStringHandle(src);
|
||||
@@ -137,7 +181,7 @@ pub fn compileAndRun(self: *const Local, src: []const u8, name: ?[]const u8) !js
|
||||
) orelse return error.CompilationError;
|
||||
|
||||
// Run the script
|
||||
const result = v8.v8__Script__Run(v8_script, self.handle) orelse return error.ExecutionError;
|
||||
const result = v8.v8__Script__Run(v8_script, self.handle) orelse return error.JsException;
|
||||
return .{ .local = self, .handle = result };
|
||||
}
|
||||
|
||||
@@ -158,20 +202,20 @@ pub fn compileAndRun(self: *const Local, src: []const u8, name: ?[]const u8) !js
|
||||
// we can just grab it from the identity_map)
|
||||
pub fn mapZigInstanceToJs(self: *const Local, js_obj_handle: ?*const v8.Object, value: anytype) !js.Object {
|
||||
const ctx = self.ctx;
|
||||
const arena = ctx.arena;
|
||||
const origin_arena = ctx.origin.arena;
|
||||
|
||||
const T = @TypeOf(value);
|
||||
switch (@typeInfo(T)) {
|
||||
.@"struct" => {
|
||||
// Struct, has to be placed on the heap
|
||||
const heap = try arena.create(T);
|
||||
const heap = try origin_arena.create(T);
|
||||
heap.* = value;
|
||||
return self.mapZigInstanceToJs(js_obj_handle, heap);
|
||||
},
|
||||
.pointer => |ptr| {
|
||||
const resolved = resolveValue(value);
|
||||
|
||||
const gop = try ctx.identity_map.getOrPut(arena, @intFromPtr(resolved.ptr));
|
||||
const gop = try ctx.origin.addIdentity(@intFromPtr(resolved.ptr));
|
||||
if (gop.found_existing) {
|
||||
// we've seen this instance before, return the same object
|
||||
return (js.Object.Global{ .handle = gop.value_ptr.* }).local(self);
|
||||
@@ -200,7 +244,7 @@ pub fn mapZigInstanceToJs(self: *const Local, js_obj_handle: ?*const v8.Object,
|
||||
// The TAO contains the pointer to our Zig instance as
|
||||
// well as any meta data we'll need to use it later.
|
||||
// See the TaggedOpaque struct for more details.
|
||||
const tao = try arena.create(TaggedOpaque);
|
||||
const tao = try origin_arena.create(TaggedOpaque);
|
||||
tao.* = .{
|
||||
.value = resolved.ptr,
|
||||
.prototype_chain = resolved.prototype_chain.ptr,
|
||||
@@ -225,16 +269,17 @@ pub fn mapZigInstanceToJs(self: *const Local, js_obj_handle: ?*const v8.Object,
|
||||
// can't figure out how to make that work, since it depends on
|
||||
// the [runtime] `value`.
|
||||
// We need the resolved finalizer, which we have in resolved.
|
||||
//
|
||||
// The above if statement would be more clear as:
|
||||
// if (resolved.finalizer_from_v8) |finalizer| {
|
||||
// But that's a runtime check.
|
||||
// Instead, we check if the base has finalizer. The assumption
|
||||
// here is that if a resolve type has a finalizer, then the base
|
||||
// should have a finalizer too.
|
||||
const fc = try ctx.createFinalizerCallback(gop.value_ptr.*, resolved.ptr, resolved.finalizer_from_zig.?);
|
||||
const fc = try ctx.origin.createFinalizerCallback(ctx.session, gop.value_ptr.*, resolved.ptr, resolved.finalizer_from_zig.?);
|
||||
{
|
||||
errdefer fc.deinit();
|
||||
try ctx.finalizer_callbacks.put(ctx.arena, @intFromPtr(resolved.ptr), fc);
|
||||
try ctx.origin.finalizer_callbacks.put(ctx.origin.arena, @intFromPtr(resolved.ptr), fc);
|
||||
}
|
||||
|
||||
conditionallyReference(value);
|
||||
@@ -1083,7 +1128,7 @@ const Resolved = struct {
|
||||
class_id: u16,
|
||||
prototype_chain: []const @import("TaggedOpaque.zig").PrototypeChainEntry,
|
||||
finalizer_from_v8: ?*const fn (handle: ?*const v8.WeakCallbackInfo) callconv(.c) void = null,
|
||||
finalizer_from_zig: ?*const fn (ptr: *anyopaque, page: *Page) void = null,
|
||||
finalizer_from_zig: ?*const fn (ptr: *anyopaque, session: *Session) void = null,
|
||||
};
|
||||
pub fn resolveValue(value: anytype) Resolved {
|
||||
const T = bridge.Struct(@TypeOf(value));
|
||||
@@ -1167,6 +1212,12 @@ pub fn rejectPromise(self: *const Local, value: anytype) !js.Promise {
|
||||
return resolver.promise();
|
||||
}
|
||||
|
||||
pub fn rejectErrorPromise(self: *const Local, value: js.PromiseResolver.RejectError) !js.Promise {
|
||||
var resolver = js.PromiseResolver.init(self);
|
||||
resolver.rejectError("Local.rejectPromise", value);
|
||||
return resolver.promise();
|
||||
}
|
||||
|
||||
pub fn resolvePromise(self: *const Local, value: anytype) !js.Promise {
|
||||
var resolver = js.PromiseResolver.init(self);
|
||||
resolver.resolve("Local.resolvePromise", value);
|
||||
|
||||
@@ -97,7 +97,7 @@ pub fn persist(self: Object) !Global {
|
||||
var global: v8.Global = undefined;
|
||||
v8.v8__Global__New(ctx.isolate.handle, self.handle, &global);
|
||||
|
||||
try ctx.global_objects.append(ctx.arena, global);
|
||||
try ctx.trackGlobal(global);
|
||||
|
||||
return .{ .handle = global };
|
||||
}
|
||||
|
||||
262
src/browser/js/Origin.zig
Normal file
262
src/browser/js/Origin.zig
Normal file
@@ -0,0 +1,262 @@
|
||||
// Copyright (C) 2023-2025 Lightpanda (Selecy SAS)
|
||||
//
|
||||
// Francis Bouvier <francis@lightpanda.io>
|
||||
// Pierre Tachoire <pierre@lightpanda.io>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU Affero General Public License as
|
||||
// published by the Free Software Foundation, either version 3 of the
|
||||
// License, or (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU Affero General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU Affero General Public License
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
// Origin represents the shared Zig<->JS bridge state for all contexts within
|
||||
// the same origin. Multiple contexts (frames) from the same origin share a
|
||||
// single Origin, ensuring that JS objects maintain their identity across frames.
|
||||
|
||||
const std = @import("std");
|
||||
const js = @import("js.zig");
|
||||
|
||||
const App = @import("../../App.zig");
|
||||
const Session = @import("../Session.zig");
|
||||
|
||||
const v8 = js.v8;
|
||||
const Allocator = std.mem.Allocator;
|
||||
const IS_DEBUG = @import("builtin").mode == .Debug;
|
||||
|
||||
const Origin = @This();
|
||||
|
||||
rc: usize = 1,
|
||||
arena: Allocator,
|
||||
|
||||
// The key, e.g. lightpanda.io:443
|
||||
key: []const u8,
|
||||
|
||||
// Security token - all contexts in this realm must use the same v8::Value instance
|
||||
// as their security token for V8 to allow cross-context access
|
||||
security_token: v8.Global,
|
||||
|
||||
// Serves two purposes. Like `global_objects`, this is used to free
|
||||
// every Global(Object) we've created during the lifetime of the realm.
|
||||
// More importantly, it serves as an identity map - for a given Zig
|
||||
// instance, we map it to the same Global(Object).
|
||||
// The key is the @intFromPtr of the Zig value
|
||||
identity_map: std.AutoHashMapUnmanaged(usize, v8.Global) = .empty,
|
||||
|
||||
// Some web APIs have to manage opaque values. Ideally, they use an
|
||||
// js.Object, but the js.Object has no lifetime guarantee beyond the
|
||||
// current call. They can call .persist() on their js.Object to get
|
||||
// a `Global(Object)`. We need to track these to free them.
|
||||
// This used to be a map and acted like identity_map; the key was
|
||||
// the @intFromPtr(js_obj.handle). But v8 can re-use address. Without
|
||||
// a reliable way to know if an object has already been persisted,
|
||||
// we now simply persist every time persist() is called.
|
||||
globals: std.ArrayList(v8.Global) = .empty,
|
||||
|
||||
// Temp variants stored in HashMaps for O(1) early cleanup.
|
||||
// Key is global.data_ptr.
|
||||
temps: std.AutoHashMapUnmanaged(usize, v8.Global) = .empty,
|
||||
|
||||
// Any type that is stored in the identity_map which has a finalizer declared
|
||||
// will have its finalizer stored here. This is only used when shutting down
|
||||
// if v8 hasn't called the finalizer directly itself.
|
||||
finalizer_callbacks: std.AutoHashMapUnmanaged(usize, *FinalizerCallback) = .empty,
|
||||
|
||||
taken_over: std.ArrayList(*Origin),
|
||||
|
||||
pub fn init(app: *App, isolate: js.Isolate, key: []const u8) !*Origin {
|
||||
const arena = try app.arena_pool.acquire();
|
||||
errdefer app.arena_pool.release(arena);
|
||||
|
||||
var hs: js.HandleScope = undefined;
|
||||
hs.init(isolate);
|
||||
defer hs.deinit();
|
||||
|
||||
const owned_key = try arena.dupe(u8, key);
|
||||
const token_local = isolate.initStringHandle(owned_key);
|
||||
var token_global: v8.Global = undefined;
|
||||
v8.v8__Global__New(isolate.handle, token_local, &token_global);
|
||||
|
||||
const self = try arena.create(Origin);
|
||||
self.* = .{
|
||||
.rc = 1,
|
||||
.arena = arena,
|
||||
.key = owned_key,
|
||||
.temps = .empty,
|
||||
.globals = .empty,
|
||||
.taken_over = .empty,
|
||||
.security_token = token_global,
|
||||
};
|
||||
return self;
|
||||
}
|
||||
|
||||
pub fn deinit(self: *Origin, app: *App) void {
|
||||
for (self.taken_over.items) |o| {
|
||||
o.deinit(app);
|
||||
}
|
||||
|
||||
// Call finalizers before releasing anything
|
||||
{
|
||||
var it = self.finalizer_callbacks.valueIterator();
|
||||
while (it.next()) |finalizer| {
|
||||
finalizer.*.deinit();
|
||||
}
|
||||
}
|
||||
|
||||
v8.v8__Global__Reset(&self.security_token);
|
||||
|
||||
{
|
||||
var it = self.identity_map.valueIterator();
|
||||
while (it.next()) |global| {
|
||||
v8.v8__Global__Reset(global);
|
||||
}
|
||||
}
|
||||
|
||||
for (self.globals.items) |*global| {
|
||||
v8.v8__Global__Reset(global);
|
||||
}
|
||||
|
||||
{
|
||||
var it = self.temps.valueIterator();
|
||||
while (it.next()) |global| {
|
||||
v8.v8__Global__Reset(global);
|
||||
}
|
||||
}
|
||||
|
||||
app.arena_pool.release(self.arena);
|
||||
}
|
||||
|
||||
pub fn trackGlobal(self: *Origin, global: v8.Global) !void {
|
||||
return self.globals.append(self.arena, global);
|
||||
}
|
||||
|
||||
pub const IdentityResult = struct {
|
||||
value_ptr: *v8.Global,
|
||||
found_existing: bool,
|
||||
};
|
||||
|
||||
pub fn addIdentity(self: *Origin, ptr: usize) !IdentityResult {
|
||||
const gop = try self.identity_map.getOrPut(self.arena, ptr);
|
||||
return .{
|
||||
.value_ptr = gop.value_ptr,
|
||||
.found_existing = gop.found_existing,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn trackTemp(self: *Origin, global: v8.Global) !void {
|
||||
return self.temps.put(self.arena, global.data_ptr, global);
|
||||
}
|
||||
|
||||
pub fn releaseTemp(self: *Origin, global: v8.Global) void {
|
||||
if (self.temps.fetchRemove(global.data_ptr)) |kv| {
|
||||
var g = kv.value;
|
||||
v8.v8__Global__Reset(&g);
|
||||
}
|
||||
}
|
||||
|
||||
/// Release an item from the identity_map (called after finalizer runs from V8)
|
||||
pub fn release(self: *Origin, item: *anyopaque) void {
|
||||
var global = self.identity_map.fetchRemove(@intFromPtr(item)) orelse {
|
||||
if (comptime IS_DEBUG) {
|
||||
std.debug.assert(false);
|
||||
}
|
||||
return;
|
||||
};
|
||||
v8.v8__Global__Reset(&global.value);
|
||||
|
||||
// The item has been finalized, remove it from the finalizer callback so that
|
||||
// we don't try to call it again on shutdown.
|
||||
const kv = self.finalizer_callbacks.fetchRemove(@intFromPtr(item)) orelse {
|
||||
if (comptime IS_DEBUG) {
|
||||
std.debug.assert(false);
|
||||
}
|
||||
return;
|
||||
};
|
||||
const fc = kv.value;
|
||||
fc.session.releaseArena(fc.arena);
|
||||
}
|
||||
|
||||
pub fn createFinalizerCallback(
|
||||
self: *Origin,
|
||||
session: *Session,
|
||||
global: v8.Global,
|
||||
ptr: *anyopaque,
|
||||
zig_finalizer: *const fn (ptr: *anyopaque, session: *Session) void,
|
||||
) !*FinalizerCallback {
|
||||
const arena = try session.getArena(.{ .debug = "FinalizerCallback" });
|
||||
errdefer session.releaseArena(arena);
|
||||
const fc = try arena.create(FinalizerCallback);
|
||||
fc.* = .{
|
||||
.arena = arena,
|
||||
.origin = self,
|
||||
.session = session,
|
||||
.ptr = ptr,
|
||||
.global = global,
|
||||
.zig_finalizer = zig_finalizer,
|
||||
};
|
||||
return fc;
|
||||
}
|
||||
|
||||
pub fn takeover(self: *Origin, original: *Origin) !void {
|
||||
const arena = self.arena;
|
||||
|
||||
try self.globals.ensureUnusedCapacity(arena, original.globals.items.len);
|
||||
for (original.globals.items) |obj| {
|
||||
self.globals.appendAssumeCapacity(obj);
|
||||
}
|
||||
original.globals.clearRetainingCapacity();
|
||||
|
||||
{
|
||||
try self.temps.ensureUnusedCapacity(arena, original.temps.count());
|
||||
var it = original.temps.iterator();
|
||||
while (it.next()) |kv| {
|
||||
try self.temps.put(arena, kv.key_ptr.*, kv.value_ptr.*);
|
||||
}
|
||||
original.temps.clearRetainingCapacity();
|
||||
}
|
||||
|
||||
{
|
||||
try self.finalizer_callbacks.ensureUnusedCapacity(arena, original.finalizer_callbacks.count());
|
||||
var it = original.finalizer_callbacks.iterator();
|
||||
while (it.next()) |kv| {
|
||||
kv.value_ptr.*.origin = self;
|
||||
try self.finalizer_callbacks.put(arena, kv.key_ptr.*, kv.value_ptr.*);
|
||||
}
|
||||
original.finalizer_callbacks.clearRetainingCapacity();
|
||||
}
|
||||
|
||||
{
|
||||
try self.identity_map.ensureUnusedCapacity(arena, original.identity_map.count());
|
||||
var it = original.identity_map.iterator();
|
||||
while (it.next()) |kv| {
|
||||
try self.identity_map.put(arena, kv.key_ptr.*, kv.value_ptr.*);
|
||||
}
|
||||
original.identity_map.clearRetainingCapacity();
|
||||
}
|
||||
|
||||
try self.taken_over.append(self.arena, original);
|
||||
}
|
||||
|
||||
// A type that has a finalizer can have its finalizer called one of two ways.
|
||||
// The first is from V8 via the WeakCallback we give to weakRef. But that isn't
|
||||
// guaranteed to fire, so we track this in finalizer_callbacks and call them on
|
||||
// origin shutdown.
|
||||
pub const FinalizerCallback = struct {
|
||||
arena: Allocator,
|
||||
origin: *Origin,
|
||||
session: *Session,
|
||||
ptr: *anyopaque,
|
||||
global: v8.Global,
|
||||
zig_finalizer: *const fn (ptr: *anyopaque, session: *Session) void,
|
||||
|
||||
pub fn deinit(self: *FinalizerCallback) void {
|
||||
self.zig_finalizer(self.ptr, self.session);
|
||||
self.session.releaseArena(self.arena);
|
||||
}
|
||||
};
|
||||
@@ -62,22 +62,25 @@ fn _persist(self: *const Promise, comptime is_global: bool) !(if (is_global) Glo
|
||||
var global: v8.Global = undefined;
|
||||
v8.v8__Global__New(ctx.isolate.handle, self.handle, &global);
|
||||
if (comptime is_global) {
|
||||
try ctx.global_promises.append(ctx.arena, global);
|
||||
} else {
|
||||
try ctx.global_promises_temp.put(ctx.arena, global.data_ptr, global);
|
||||
try ctx.trackGlobal(global);
|
||||
return .{ .handle = global, .origin = {} };
|
||||
}
|
||||
return .{ .handle = global };
|
||||
try ctx.trackTemp(global);
|
||||
return .{ .handle = global, .origin = ctx.origin };
|
||||
}
|
||||
|
||||
pub const Temp = G(0);
|
||||
pub const Global = G(1);
|
||||
pub const Temp = G(.temp);
|
||||
pub const Global = G(.global);
|
||||
|
||||
fn G(comptime discriminator: u8) type {
|
||||
const GlobalType = enum(u8) {
|
||||
temp,
|
||||
global,
|
||||
};
|
||||
|
||||
fn G(comptime global_type: GlobalType) type {
|
||||
return struct {
|
||||
handle: v8.Global,
|
||||
|
||||
// makes the types different (G(0) != G(1)), without taking up space
|
||||
comptime _: u8 = discriminator,
|
||||
origin: if (global_type == .temp) *js.Origin else void,
|
||||
|
||||
const Self = @This();
|
||||
|
||||
@@ -91,5 +94,9 @@ fn G(comptime discriminator: u8) type {
|
||||
.handle = @ptrCast(v8.v8__Global__Get(&self.handle, l.isolate.handle)),
|
||||
};
|
||||
}
|
||||
|
||||
pub fn release(self: *const Self) void {
|
||||
self.origin.releaseTemp(self.handle);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@@ -18,7 +18,9 @@
|
||||
|
||||
const js = @import("js.zig");
|
||||
const v8 = js.v8;
|
||||
|
||||
const log = @import("../../log.zig");
|
||||
const DOMException = @import("../webapi/DOMException.zig");
|
||||
|
||||
const PromiseResolver = @This();
|
||||
|
||||
@@ -63,6 +65,25 @@ pub fn reject(self: PromiseResolver, comptime source: []const u8, value: anytype
|
||||
};
|
||||
}
|
||||
|
||||
pub const RejectError = union(enum) {
|
||||
generic: []const u8,
|
||||
type_error: []const u8,
|
||||
dom_exception: anyerror,
|
||||
};
|
||||
pub fn rejectError(self: PromiseResolver, comptime source: []const u8, err: RejectError) void {
|
||||
const handle = switch (err) {
|
||||
.type_error => |str| self.local.isolate.createTypeError(str),
|
||||
.generic => |str| self.local.isolate.createError(str),
|
||||
.dom_exception => |exception| {
|
||||
self.reject(source, DOMException.fromError(exception));
|
||||
return;
|
||||
},
|
||||
};
|
||||
self._reject(js.Value{ .handle = handle, .local = self.local }) catch |reject_err| {
|
||||
log.err(.bug, "rejectError", .{ .source = source, .err = reject_err, .persistent = false });
|
||||
};
|
||||
}
|
||||
|
||||
fn _reject(self: PromiseResolver, value: anytype) !void {
|
||||
const local = self.local;
|
||||
const js_val = try local.zigValueToJs(value, .{});
|
||||
@@ -79,7 +100,7 @@ pub fn persist(self: PromiseResolver) !Global {
|
||||
var ctx = self.local.ctx;
|
||||
var global: v8.Global = undefined;
|
||||
v8.v8__Global__New(ctx.isolate.handle, self.handle, &global);
|
||||
try ctx.global_promise_resolvers.append(ctx.arena, global);
|
||||
try ctx.trackGlobal(global);
|
||||
return .{ .handle = global };
|
||||
}
|
||||
|
||||
|
||||
@@ -74,9 +74,10 @@ pub fn add(self: *Scheduler, ctx: *anyopaque, cb: Callback, run_in_ms: u32, opts
|
||||
});
|
||||
}
|
||||
|
||||
pub fn run(self: *Scheduler) !?u64 {
|
||||
_ = try self.runQueue(&self.low_priority);
|
||||
return self.runQueue(&self.high_priority);
|
||||
pub fn run(self: *Scheduler) !void {
|
||||
const now = milliTimestamp(.monotonic);
|
||||
try self.runQueue(&self.low_priority, now);
|
||||
try self.runQueue(&self.high_priority, now);
|
||||
}
|
||||
|
||||
pub fn hasReadyTasks(self: *Scheduler) bool {
|
||||
@@ -84,16 +85,23 @@ pub fn hasReadyTasks(self: *Scheduler) bool {
|
||||
return queueuHasReadyTask(&self.low_priority, now) or queueuHasReadyTask(&self.high_priority, now);
|
||||
}
|
||||
|
||||
fn runQueue(self: *Scheduler, queue: *Queue) !?u64 {
|
||||
if (queue.count() == 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
pub fn msToNextHigh(self: *Scheduler) ?u64 {
|
||||
const task = self.high_priority.peek() orelse return null;
|
||||
const now = milliTimestamp(.monotonic);
|
||||
if (task.run_at <= now) {
|
||||
return 0;
|
||||
}
|
||||
return @intCast(task.run_at - now);
|
||||
}
|
||||
|
||||
fn runQueue(self: *Scheduler, queue: *Queue, now: u64) !void {
|
||||
if (queue.count() == 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
while (queue.peek()) |*task_| {
|
||||
if (task_.run_at > now) {
|
||||
return @intCast(task_.run_at - now);
|
||||
return;
|
||||
}
|
||||
var task = queue.remove();
|
||||
if (comptime IS_DEBUG) {
|
||||
@@ -114,7 +122,7 @@ fn runQueue(self: *Scheduler, queue: *Queue) !?u64 {
|
||||
try self.low_priority.add(task);
|
||||
}
|
||||
}
|
||||
return null;
|
||||
return;
|
||||
}
|
||||
|
||||
fn queueuHasReadyTask(queue: *Queue, now: u64) bool {
|
||||
|
||||
@@ -56,7 +56,7 @@ fn _toSlice(self: String, comptime null_terminate: bool, allocator: Allocator) !
|
||||
|
||||
pub fn toSSO(self: String, comptime global: bool) !(if (global) SSO.Global else SSO) {
|
||||
if (comptime global) {
|
||||
return .{ .str = try self.toSSOWithAlloc(self.local.ctx.arena) };
|
||||
return .{ .str = try self.toSSOWithAlloc(self.local.ctx.origin.arena) };
|
||||
}
|
||||
return self.toSSOWithAlloc(self.local.call_arena);
|
||||
}
|
||||
|
||||
@@ -245,6 +245,46 @@ pub fn toJson(self: Value, allocator: Allocator) ![]u8 {
|
||||
return js.String.toSliceWithAlloc(.{ .local = local, .handle = str_handle }, allocator);
|
||||
}
|
||||
|
||||
// Currently does not support host objects (Blob, File, etc.) or transferables
|
||||
// which require delegate callbacks to be implemented.
|
||||
pub fn structuredClone(self: Value) !Value {
|
||||
const local = self.local;
|
||||
const v8_context = local.handle;
|
||||
const v8_isolate = local.isolate.handle;
|
||||
|
||||
const size, const data = blk: {
|
||||
const serializer = v8.v8__ValueSerializer__New(v8_isolate, null) orelse return error.JsException;
|
||||
defer v8.v8__ValueSerializer__DELETE(serializer);
|
||||
|
||||
var write_result: v8.MaybeBool = undefined;
|
||||
v8.v8__ValueSerializer__WriteHeader(serializer);
|
||||
v8.v8__ValueSerializer__WriteValue(serializer, v8_context, self.handle, &write_result);
|
||||
if (!write_result.has_value or !write_result.value) {
|
||||
return error.JsException;
|
||||
}
|
||||
|
||||
var size: usize = undefined;
|
||||
const data = v8.v8__ValueSerializer__Release(serializer, &size) orelse return error.JsException;
|
||||
break :blk .{ size, data };
|
||||
};
|
||||
|
||||
defer v8.v8__ValueSerializer__FreeBuffer(data);
|
||||
|
||||
const cloned_handle = blk: {
|
||||
const deserializer = v8.v8__ValueDeserializer__New(v8_isolate, data, size, null) orelse return error.JsException;
|
||||
defer v8.v8__ValueDeserializer__DELETE(deserializer);
|
||||
|
||||
var read_header_result: v8.MaybeBool = undefined;
|
||||
v8.v8__ValueDeserializer__ReadHeader(deserializer, v8_context, &read_header_result);
|
||||
if (!read_header_result.has_value or !read_header_result.value) {
|
||||
return error.JsException;
|
||||
}
|
||||
break :blk v8.v8__ValueDeserializer__ReadValue(deserializer, v8_context) orelse return error.JsException;
|
||||
};
|
||||
|
||||
return .{ .local = local, .handle = cloned_handle };
|
||||
}
|
||||
|
||||
pub fn persist(self: Value) !Global {
|
||||
return self._persist(true);
|
||||
}
|
||||
@@ -259,11 +299,11 @@ fn _persist(self: *const Value, comptime is_global: bool) !(if (is_global) Globa
|
||||
var global: v8.Global = undefined;
|
||||
v8.v8__Global__New(ctx.isolate.handle, self.handle, &global);
|
||||
if (comptime is_global) {
|
||||
try ctx.global_values.append(ctx.arena, global);
|
||||
} else {
|
||||
try ctx.global_values_temp.put(ctx.arena, global.data_ptr, global);
|
||||
try ctx.trackGlobal(global);
|
||||
return .{ .handle = global, .origin = {} };
|
||||
}
|
||||
return .{ .handle = global };
|
||||
try ctx.trackTemp(global);
|
||||
return .{ .handle = global, .origin = ctx.origin };
|
||||
}
|
||||
|
||||
pub fn toZig(self: Value, comptime T: type) !T {
|
||||
@@ -310,15 +350,18 @@ pub fn format(self: Value, writer: *std.Io.Writer) !void {
|
||||
return js_str.format(writer);
|
||||
}
|
||||
|
||||
pub const Temp = G(0);
|
||||
pub const Global = G(1);
|
||||
pub const Temp = G(.temp);
|
||||
pub const Global = G(.global);
|
||||
|
||||
fn G(comptime discriminator: u8) type {
|
||||
const GlobalType = enum(u8) {
|
||||
temp,
|
||||
global,
|
||||
};
|
||||
|
||||
fn G(comptime global_type: GlobalType) type {
|
||||
return struct {
|
||||
handle: v8.Global,
|
||||
|
||||
// makes the types different (G(0) != G(1)), without taking up space
|
||||
comptime _: u8 = discriminator,
|
||||
origin: if (global_type == .temp) *js.Origin else void,
|
||||
|
||||
const Self = @This();
|
||||
|
||||
@@ -336,5 +379,9 @@ fn G(comptime discriminator: u8) type {
|
||||
pub fn isEqual(self: *const Self, other: Value) bool {
|
||||
return v8.v8__Global__IsEqual(&self.handle, other.handle);
|
||||
}
|
||||
|
||||
pub fn release(self: *const Self) void {
|
||||
self.origin.releaseTemp(self.handle);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@@ -21,11 +21,13 @@ const js = @import("js.zig");
|
||||
const lp = @import("lightpanda");
|
||||
const log = @import("../../log.zig");
|
||||
const Page = @import("../Page.zig");
|
||||
const Session = @import("../Session.zig");
|
||||
|
||||
const v8 = js.v8;
|
||||
|
||||
const Caller = @import("Caller.zig");
|
||||
const Context = @import("Context.zig");
|
||||
const Origin = @import("Origin.zig");
|
||||
|
||||
const IS_DEBUG = @import("builtin").mode == .Debug;
|
||||
|
||||
@@ -104,24 +106,24 @@ pub fn Builder(comptime T: type) type {
|
||||
return entries;
|
||||
}
|
||||
|
||||
pub fn finalizer(comptime func: *const fn (self: *T, shutdown: bool, page: *Page) void) Finalizer {
|
||||
pub fn finalizer(comptime func: *const fn (self: *T, shutdown: bool, session: *Session) void) Finalizer {
|
||||
return .{
|
||||
.from_zig = struct {
|
||||
fn wrap(ptr: *anyopaque, page: *Page) void {
|
||||
func(@ptrCast(@alignCast(ptr)), true, page);
|
||||
fn wrap(ptr: *anyopaque, session: *Session) void {
|
||||
func(@ptrCast(@alignCast(ptr)), true, session);
|
||||
}
|
||||
}.wrap,
|
||||
|
||||
.from_v8 = struct {
|
||||
fn wrap(handle: ?*const v8.WeakCallbackInfo) callconv(.c) void {
|
||||
const ptr = v8.v8__WeakCallbackInfo__GetParameter(handle.?).?;
|
||||
const fc: *Context.FinalizerCallback = @ptrCast(@alignCast(ptr));
|
||||
const fc: *Origin.FinalizerCallback = @ptrCast(@alignCast(ptr));
|
||||
|
||||
const ctx = fc.ctx;
|
||||
const origin = fc.origin;
|
||||
const value_ptr = fc.ptr;
|
||||
if (ctx.finalizer_callbacks.contains(@intFromPtr(value_ptr))) {
|
||||
func(@ptrCast(@alignCast(value_ptr)), false, ctx.page);
|
||||
ctx.release(value_ptr);
|
||||
if (origin.finalizer_callbacks.contains(@intFromPtr(value_ptr))) {
|
||||
func(@ptrCast(@alignCast(value_ptr)), false, fc.session);
|
||||
origin.release(value_ptr);
|
||||
} else {
|
||||
// A bit weird, but v8 _requires_ that we release it
|
||||
// If we don't. We'll 100% crash.
|
||||
@@ -413,12 +415,12 @@ pub const Property = struct {
|
||||
};
|
||||
|
||||
const Finalizer = struct {
|
||||
// The finalizer wrapper when called fro Zig. This is only called on
|
||||
// Context.deinit
|
||||
from_zig: *const fn (ctx: *anyopaque, page: *Page) void,
|
||||
// The finalizer wrapper when called from Zig. This is only called on
|
||||
// Origin.deinit
|
||||
from_zig: *const fn (ctx: *anyopaque, session: *Session) void,
|
||||
|
||||
// The finalizer wrapper when called from V8. This may never be called
|
||||
// (hence why we fallback to calling in Context.denit). If it is called,
|
||||
// (hence why we fallback to calling in Origin.deinit). If it is called,
|
||||
// it is only ever called after we SetWeak on the Global.
|
||||
from_v8: *const fn (?*const v8.WeakCallbackInfo) callconv(.c) void,
|
||||
};
|
||||
@@ -723,6 +725,8 @@ pub const JsApis = flattenTypes(&.{
|
||||
@import("../webapi/collections.zig"),
|
||||
@import("../webapi/Console.zig"),
|
||||
@import("../webapi/Crypto.zig"),
|
||||
@import("../webapi/Permissions.zig"),
|
||||
@import("../webapi/StorageManager.zig"),
|
||||
@import("../webapi/CSS.zig"),
|
||||
@import("../webapi/css/CSSRule.zig"),
|
||||
@import("../webapi/css/CSSRuleList.zig"),
|
||||
@@ -730,6 +734,7 @@ pub const JsApis = flattenTypes(&.{
|
||||
@import("../webapi/css/CSSStyleRule.zig"),
|
||||
@import("../webapi/css/CSSStyleSheet.zig"),
|
||||
@import("../webapi/css/CSSStyleProperties.zig"),
|
||||
@import("../webapi/css/FontFace.zig"),
|
||||
@import("../webapi/css/FontFaceSet.zig"),
|
||||
@import("../webapi/css/MediaQueryList.zig"),
|
||||
@import("../webapi/css/StyleSheetList.zig"),
|
||||
@@ -845,6 +850,7 @@ pub const JsApis = flattenTypes(&.{
|
||||
@import("../webapi/event/FocusEvent.zig"),
|
||||
@import("../webapi/event/WheelEvent.zig"),
|
||||
@import("../webapi/event/TextEvent.zig"),
|
||||
@import("../webapi/event/InputEvent.zig"),
|
||||
@import("../webapi/event/PromiseRejectionEvent.zig"),
|
||||
@import("../webapi/MessageChannel.zig"),
|
||||
@import("../webapi/MessagePort.zig"),
|
||||
@@ -882,6 +888,7 @@ pub const JsApis = flattenTypes(&.{
|
||||
@import("../webapi/IdleDeadline.zig"),
|
||||
@import("../webapi/Blob.zig"),
|
||||
@import("../webapi/File.zig"),
|
||||
@import("../webapi/FileList.zig"),
|
||||
@import("../webapi/FileReader.zig"),
|
||||
@import("../webapi/Screen.zig"),
|
||||
@import("../webapi/VisualViewport.zig"),
|
||||
|
||||
@@ -24,6 +24,7 @@ const string = @import("../../string.zig");
|
||||
pub const Env = @import("Env.zig");
|
||||
pub const bridge = @import("bridge.zig");
|
||||
pub const Caller = @import("Caller.zig");
|
||||
pub const Origin = @import("Origin.zig");
|
||||
pub const Context = @import("Context.zig");
|
||||
pub const Local = @import("Local.zig");
|
||||
pub const Inspector = @import("Inspector.zig");
|
||||
@@ -161,7 +162,7 @@ pub fn ArrayBufferRef(comptime kind: ArrayType) type {
|
||||
var ctx = self.local.ctx;
|
||||
var global: v8.Global = undefined;
|
||||
v8.v8__Global__New(ctx.isolate.handle, self.handle, &global);
|
||||
try ctx.global_values.append(ctx.arena, global);
|
||||
try ctx.trackGlobal(global);
|
||||
|
||||
return .{ .handle = global };
|
||||
}
|
||||
|
||||
@@ -24,6 +24,7 @@ const TreeWalker = @import("webapi/TreeWalker.zig");
|
||||
const CData = @import("webapi/CData.zig");
|
||||
const Element = @import("webapi/Element.zig");
|
||||
const Node = @import("webapi/Node.zig");
|
||||
const isAllWhitespace = @import("../string.zig").isAllWhitespace;
|
||||
|
||||
pub const Opts = struct {
|
||||
// Options for future customization (e.g., dialect)
|
||||
@@ -46,13 +47,6 @@ const State = struct {
|
||||
last_char_was_newline: bool = true,
|
||||
};
|
||||
|
||||
fn isBlock(tag: Element.Tag) bool {
|
||||
return switch (tag) {
|
||||
.p, .div, .section, .article, .main, .header, .footer, .nav, .aside, .h1, .h2, .h3, .h4, .h5, .h6, .ul, .ol, .blockquote, .pre, .table, .hr => true,
|
||||
else => false,
|
||||
};
|
||||
}
|
||||
|
||||
fn shouldAddSpacing(tag: Element.Tag) bool {
|
||||
return switch (tag) {
|
||||
.p, .h1, .h2, .h3, .h4, .h5, .h6, .blockquote, .pre, .table => true,
|
||||
@@ -99,26 +93,18 @@ fn isSignificantText(node: *Node) bool {
|
||||
}
|
||||
|
||||
fn isVisibleElement(el: *Element) bool {
|
||||
return switch (el.getTag()) {
|
||||
.script, .style, .noscript, .template, .head, .meta, .link, .title, .svg => false,
|
||||
else => true,
|
||||
};
|
||||
const tag = el.getTag();
|
||||
return !tag.isMetadata() and tag != .svg;
|
||||
}
|
||||
|
||||
fn getAnchorLabel(el: *Element) ?[]const u8 {
|
||||
return el.getAttributeSafe(comptime .wrap("aria-label")) orelse el.getAttributeSafe(comptime .wrap("title"));
|
||||
}
|
||||
|
||||
fn isAllWhitespace(text: []const u8) bool {
|
||||
return for (text) |c| {
|
||||
if (!std.ascii.isWhitespace(c)) break false;
|
||||
} else true;
|
||||
}
|
||||
|
||||
fn hasBlockDescendant(root: *Node) bool {
|
||||
var tw = TreeWalker.FullExcludeSelf.Elements.init(root, .{});
|
||||
while (tw.next()) |el| {
|
||||
if (isBlock(el.getTag())) return true;
|
||||
if (el.getTag().isBlock()) return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
@@ -138,53 +124,49 @@ fn hasVisibleContent(root: *Node) bool {
|
||||
return false;
|
||||
}
|
||||
|
||||
fn ensureNewline(state: *State, writer: *std.Io.Writer) !void {
|
||||
if (!state.last_char_was_newline) {
|
||||
try writer.writeByte('\n');
|
||||
state.last_char_was_newline = true;
|
||||
}
|
||||
}
|
||||
const Context = struct {
|
||||
state: State,
|
||||
writer: *std.Io.Writer,
|
||||
page: *Page,
|
||||
|
||||
pub fn dump(node: *Node, opts: Opts, writer: *std.Io.Writer, page: *Page) !void {
|
||||
_ = opts;
|
||||
var state = State{};
|
||||
try render(node, &state, writer, page);
|
||||
if (!state.last_char_was_newline) {
|
||||
try writer.writeByte('\n');
|
||||
fn ensureNewline(self: *Context) !void {
|
||||
if (!self.state.last_char_was_newline) {
|
||||
try self.writer.writeByte('\n');
|
||||
self.state.last_char_was_newline = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn render(node: *Node, state: *State, writer: *std.Io.Writer, page: *Page) error{WriteFailed}!void {
|
||||
fn render(self: *Context, node: *Node) error{WriteFailed}!void {
|
||||
switch (node._type) {
|
||||
.document, .document_fragment => {
|
||||
try renderChildren(node, state, writer, page);
|
||||
try self.renderChildren(node);
|
||||
},
|
||||
.element => |el| {
|
||||
try renderElement(el, state, writer, page);
|
||||
try self.renderElement(el);
|
||||
},
|
||||
.cdata => |cd| {
|
||||
if (node.is(Node.CData.Text)) |_| {
|
||||
var text = cd.getData().str();
|
||||
if (state.pre_node) |pre| {
|
||||
if (self.state.pre_node) |pre| {
|
||||
if (node.parentNode() == pre and node.nextSibling() == null) {
|
||||
text = std.mem.trimRight(u8, text, " \t\r\n");
|
||||
}
|
||||
}
|
||||
try renderText(text, state, writer);
|
||||
try self.renderText(text);
|
||||
}
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn renderChildren(parent: *Node, state: *State, writer: *std.Io.Writer, page: *Page) !void {
|
||||
fn renderChildren(self: *Context, parent: *Node) !void {
|
||||
var it = parent.childrenIterator();
|
||||
while (it.next()) |child| {
|
||||
try render(child, state, writer, page);
|
||||
try self.render(child);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn renderElement(el: *Element, state: *State, writer: *std.Io.Writer, page: *Page) !void {
|
||||
fn renderElement(self: *Context, el: *Element) !void {
|
||||
const tag = el.getTag();
|
||||
|
||||
if (!isVisibleElement(el)) return;
|
||||
@@ -192,116 +174,116 @@ fn renderElement(el: *Element, state: *State, writer: *std.Io.Writer, page: *Pag
|
||||
// --- Opening Tag Logic ---
|
||||
|
||||
// Ensure block elements start on a new line (double newline for paragraphs etc)
|
||||
if (isBlock(tag) and !state.in_table) {
|
||||
try ensureNewline(state, writer);
|
||||
if (tag.isBlock() and !self.state.in_table) {
|
||||
try self.ensureNewline();
|
||||
if (shouldAddSpacing(tag)) {
|
||||
try writer.writeByte('\n');
|
||||
try self.writer.writeByte('\n');
|
||||
}
|
||||
} else if (tag == .li or tag == .tr) {
|
||||
try ensureNewline(state, writer);
|
||||
try self.ensureNewline();
|
||||
}
|
||||
|
||||
// Prefixes
|
||||
switch (tag) {
|
||||
.h1 => try writer.writeAll("# "),
|
||||
.h2 => try writer.writeAll("## "),
|
||||
.h3 => try writer.writeAll("### "),
|
||||
.h4 => try writer.writeAll("#### "),
|
||||
.h5 => try writer.writeAll("##### "),
|
||||
.h6 => try writer.writeAll("###### "),
|
||||
.h1 => try self.writer.writeAll("# "),
|
||||
.h2 => try self.writer.writeAll("## "),
|
||||
.h3 => try self.writer.writeAll("### "),
|
||||
.h4 => try self.writer.writeAll("#### "),
|
||||
.h5 => try self.writer.writeAll("##### "),
|
||||
.h6 => try self.writer.writeAll("###### "),
|
||||
.ul => {
|
||||
if (state.list_depth < state.list_stack.len) {
|
||||
state.list_stack[state.list_depth] = .{ .type = .unordered, .index = 0 };
|
||||
state.list_depth += 1;
|
||||
if (self.state.list_depth < self.state.list_stack.len) {
|
||||
self.state.list_stack[self.state.list_depth] = .{ .type = .unordered, .index = 0 };
|
||||
self.state.list_depth += 1;
|
||||
}
|
||||
},
|
||||
.ol => {
|
||||
if (state.list_depth < state.list_stack.len) {
|
||||
state.list_stack[state.list_depth] = .{ .type = .ordered, .index = 1 };
|
||||
state.list_depth += 1;
|
||||
if (self.state.list_depth < self.state.list_stack.len) {
|
||||
self.state.list_stack[self.state.list_depth] = .{ .type = .ordered, .index = 1 };
|
||||
self.state.list_depth += 1;
|
||||
}
|
||||
},
|
||||
.li => {
|
||||
const indent = if (state.list_depth > 0) state.list_depth - 1 else 0;
|
||||
for (0..indent) |_| try writer.writeAll(" ");
|
||||
const indent = if (self.state.list_depth > 0) self.state.list_depth - 1 else 0;
|
||||
for (0..indent) |_| try self.writer.writeAll(" ");
|
||||
|
||||
if (state.list_depth > 0 and state.list_stack[state.list_depth - 1].type == .ordered) {
|
||||
const current_list = &state.list_stack[state.list_depth - 1];
|
||||
try writer.print("{d}. ", .{current_list.index});
|
||||
if (self.state.list_depth > 0 and self.state.list_stack[self.state.list_depth - 1].type == .ordered) {
|
||||
const current_list = &self.state.list_stack[self.state.list_depth - 1];
|
||||
try self.writer.print("{d}. ", .{current_list.index});
|
||||
current_list.index += 1;
|
||||
} else {
|
||||
try writer.writeAll("- ");
|
||||
try self.writer.writeAll("- ");
|
||||
}
|
||||
state.last_char_was_newline = false;
|
||||
self.state.last_char_was_newline = false;
|
||||
},
|
||||
.table => {
|
||||
state.in_table = true;
|
||||
state.table_row_index = 0;
|
||||
state.table_col_count = 0;
|
||||
self.state.in_table = true;
|
||||
self.state.table_row_index = 0;
|
||||
self.state.table_col_count = 0;
|
||||
},
|
||||
.tr => {
|
||||
state.table_col_count = 0;
|
||||
try writer.writeByte('|');
|
||||
self.state.table_col_count = 0;
|
||||
try self.writer.writeByte('|');
|
||||
},
|
||||
.td, .th => {
|
||||
// Note: leading pipe handled by previous cell closing or tr opening
|
||||
state.last_char_was_newline = false;
|
||||
try writer.writeByte(' ');
|
||||
self.state.last_char_was_newline = false;
|
||||
try self.writer.writeByte(' ');
|
||||
},
|
||||
.blockquote => {
|
||||
try writer.writeAll("> ");
|
||||
state.last_char_was_newline = false;
|
||||
try self.writer.writeAll("> ");
|
||||
self.state.last_char_was_newline = false;
|
||||
},
|
||||
.pre => {
|
||||
try writer.writeAll("```\n");
|
||||
state.pre_node = el.asNode();
|
||||
state.last_char_was_newline = true;
|
||||
try self.writer.writeAll("```\n");
|
||||
self.state.pre_node = el.asNode();
|
||||
self.state.last_char_was_newline = true;
|
||||
},
|
||||
.code => {
|
||||
if (state.pre_node == null) {
|
||||
try writer.writeByte('`');
|
||||
state.in_code = true;
|
||||
state.last_char_was_newline = false;
|
||||
if (self.state.pre_node == null) {
|
||||
try self.writer.writeByte('`');
|
||||
self.state.in_code = true;
|
||||
self.state.last_char_was_newline = false;
|
||||
}
|
||||
},
|
||||
.b, .strong => {
|
||||
try writer.writeAll("**");
|
||||
state.last_char_was_newline = false;
|
||||
try self.writer.writeAll("**");
|
||||
self.state.last_char_was_newline = false;
|
||||
},
|
||||
.i, .em => {
|
||||
try writer.writeAll("*");
|
||||
state.last_char_was_newline = false;
|
||||
try self.writer.writeAll("*");
|
||||
self.state.last_char_was_newline = false;
|
||||
},
|
||||
.s, .del => {
|
||||
try writer.writeAll("~~");
|
||||
state.last_char_was_newline = false;
|
||||
try self.writer.writeAll("~~");
|
||||
self.state.last_char_was_newline = false;
|
||||
},
|
||||
.hr => {
|
||||
try writer.writeAll("---\n");
|
||||
state.last_char_was_newline = true;
|
||||
try self.writer.writeAll("---\n");
|
||||
self.state.last_char_was_newline = true;
|
||||
return;
|
||||
},
|
||||
.br => {
|
||||
if (state.in_table) {
|
||||
try writer.writeByte(' ');
|
||||
if (self.state.in_table) {
|
||||
try self.writer.writeByte(' ');
|
||||
} else {
|
||||
try writer.writeByte('\n');
|
||||
state.last_char_was_newline = true;
|
||||
try self.writer.writeByte('\n');
|
||||
self.state.last_char_was_newline = true;
|
||||
}
|
||||
return;
|
||||
},
|
||||
.img => {
|
||||
try writer.writeAll(";
|
||||
try self.writer.writeAll("](");
|
||||
if (el.getAttributeSafe(comptime .wrap("src"))) |src| {
|
||||
const absolute_src = URL.resolve(page.call_arena, page.base(), src, .{ .encode = true }) catch src;
|
||||
try writer.writeAll(absolute_src);
|
||||
const absolute_src = URL.resolve(self.page.call_arena, self.page.base(), src, .{ .encode = true }) catch src;
|
||||
try self.writer.writeAll(absolute_src);
|
||||
}
|
||||
try writer.writeAll(")");
|
||||
state.last_char_was_newline = false;
|
||||
try self.writer.writeAll(")");
|
||||
self.state.last_char_was_newline = false;
|
||||
return;
|
||||
},
|
||||
.anchor => {
|
||||
@@ -312,57 +294,57 @@ fn renderElement(el: *Element, state: *State, writer: *std.Io.Writer, page: *Pag
|
||||
if (!has_content and label == null and href_raw == null) return;
|
||||
|
||||
const has_block = hasBlockDescendant(el.asNode());
|
||||
const href = if (href_raw) |h| URL.resolve(page.call_arena, page.base(), h, .{ .encode = true }) catch h else null;
|
||||
const href = if (href_raw) |h| URL.resolve(self.page.call_arena, self.page.base(), h, .{ .encode = true }) catch h else null;
|
||||
|
||||
if (has_block) {
|
||||
try renderChildren(el.asNode(), state, writer, page);
|
||||
try self.renderChildren(el.asNode());
|
||||
if (href) |h| {
|
||||
if (!state.last_char_was_newline) try writer.writeByte('\n');
|
||||
try writer.writeAll("([](");
|
||||
try writer.writeAll(h);
|
||||
try writer.writeAll("))\n");
|
||||
state.last_char_was_newline = true;
|
||||
if (!self.state.last_char_was_newline) try self.writer.writeByte('\n');
|
||||
try self.writer.writeAll("([](");
|
||||
try self.writer.writeAll(h);
|
||||
try self.writer.writeAll("))\n");
|
||||
self.state.last_char_was_newline = true;
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
if (isStandaloneAnchor(el)) {
|
||||
if (!state.last_char_was_newline) try writer.writeByte('\n');
|
||||
try writer.writeByte('[');
|
||||
if (!self.state.last_char_was_newline) try self.writer.writeByte('\n');
|
||||
try self.writer.writeByte('[');
|
||||
if (has_content) {
|
||||
try renderChildren(el.asNode(), state, writer, page);
|
||||
try self.renderChildren(el.asNode());
|
||||
} else {
|
||||
try writer.writeAll(label orelse "");
|
||||
try self.writer.writeAll(label orelse "");
|
||||
}
|
||||
try writer.writeAll("](");
|
||||
try self.writer.writeAll("](");
|
||||
if (href) |h| {
|
||||
try writer.writeAll(h);
|
||||
try self.writer.writeAll(h);
|
||||
}
|
||||
try writer.writeAll(")\n");
|
||||
state.last_char_was_newline = true;
|
||||
try self.writer.writeAll(")\n");
|
||||
self.state.last_char_was_newline = true;
|
||||
return;
|
||||
}
|
||||
|
||||
try writer.writeByte('[');
|
||||
try self.writer.writeByte('[');
|
||||
if (has_content) {
|
||||
try renderChildren(el.asNode(), state, writer, page);
|
||||
try self.renderChildren(el.asNode());
|
||||
} else {
|
||||
try writer.writeAll(label orelse "");
|
||||
try self.writer.writeAll(label orelse "");
|
||||
}
|
||||
try writer.writeAll("](");
|
||||
try self.writer.writeAll("](");
|
||||
if (href) |h| {
|
||||
try writer.writeAll(h);
|
||||
try self.writer.writeAll(h);
|
||||
}
|
||||
try writer.writeByte(')');
|
||||
state.last_char_was_newline = false;
|
||||
try self.writer.writeByte(')');
|
||||
self.state.last_char_was_newline = false;
|
||||
return;
|
||||
},
|
||||
.input => {
|
||||
const type_attr = el.getAttributeSafe(comptime .wrap("type")) orelse return;
|
||||
if (std.ascii.eqlIgnoreCase(type_attr, "checkbox")) {
|
||||
const checked = el.getAttributeSafe(comptime .wrap("checked")) != null;
|
||||
try writer.writeAll(if (checked) "[x] " else "[ ] ");
|
||||
state.last_char_was_newline = false;
|
||||
try self.writer.writeAll(if (checked) "[x] " else "[ ] ");
|
||||
self.state.last_char_was_newline = false;
|
||||
}
|
||||
return;
|
||||
},
|
||||
@@ -370,85 +352,85 @@ fn renderElement(el: *Element, state: *State, writer: *std.Io.Writer, page: *Pag
|
||||
}
|
||||
|
||||
// --- Render Children ---
|
||||
try renderChildren(el.asNode(), state, writer, page);
|
||||
try self.renderChildren(el.asNode());
|
||||
|
||||
// --- Closing Tag Logic ---
|
||||
|
||||
// Suffixes
|
||||
switch (tag) {
|
||||
.pre => {
|
||||
if (!state.last_char_was_newline) {
|
||||
try writer.writeByte('\n');
|
||||
if (!self.state.last_char_was_newline) {
|
||||
try self.writer.writeByte('\n');
|
||||
}
|
||||
try writer.writeAll("```\n");
|
||||
state.pre_node = null;
|
||||
state.last_char_was_newline = true;
|
||||
try self.writer.writeAll("```\n");
|
||||
self.state.pre_node = null;
|
||||
self.state.last_char_was_newline = true;
|
||||
},
|
||||
.code => {
|
||||
if (state.pre_node == null) {
|
||||
try writer.writeByte('`');
|
||||
state.in_code = false;
|
||||
state.last_char_was_newline = false;
|
||||
if (self.state.pre_node == null) {
|
||||
try self.writer.writeByte('`');
|
||||
self.state.in_code = false;
|
||||
self.state.last_char_was_newline = false;
|
||||
}
|
||||
},
|
||||
.b, .strong => {
|
||||
try writer.writeAll("**");
|
||||
state.last_char_was_newline = false;
|
||||
try self.writer.writeAll("**");
|
||||
self.state.last_char_was_newline = false;
|
||||
},
|
||||
.i, .em => {
|
||||
try writer.writeAll("*");
|
||||
state.last_char_was_newline = false;
|
||||
try self.writer.writeAll("*");
|
||||
self.state.last_char_was_newline = false;
|
||||
},
|
||||
.s, .del => {
|
||||
try writer.writeAll("~~");
|
||||
state.last_char_was_newline = false;
|
||||
try self.writer.writeAll("~~");
|
||||
self.state.last_char_was_newline = false;
|
||||
},
|
||||
.blockquote => {},
|
||||
.ul, .ol => {
|
||||
if (state.list_depth > 0) state.list_depth -= 1;
|
||||
if (self.state.list_depth > 0) self.state.list_depth -= 1;
|
||||
},
|
||||
.table => {
|
||||
state.in_table = false;
|
||||
self.state.in_table = false;
|
||||
},
|
||||
.tr => {
|
||||
try writer.writeByte('\n');
|
||||
if (state.table_row_index == 0) {
|
||||
try writer.writeByte('|');
|
||||
for (0..state.table_col_count) |_| {
|
||||
try writer.writeAll("---|");
|
||||
try self.writer.writeByte('\n');
|
||||
if (self.state.table_row_index == 0) {
|
||||
try self.writer.writeByte('|');
|
||||
for (0..self.state.table_col_count) |_| {
|
||||
try self.writer.writeAll("---|");
|
||||
}
|
||||
try writer.writeByte('\n');
|
||||
try self.writer.writeByte('\n');
|
||||
}
|
||||
state.table_row_index += 1;
|
||||
state.last_char_was_newline = true;
|
||||
self.state.table_row_index += 1;
|
||||
self.state.last_char_was_newline = true;
|
||||
},
|
||||
.td, .th => {
|
||||
try writer.writeAll(" |");
|
||||
state.table_col_count += 1;
|
||||
state.last_char_was_newline = false;
|
||||
try self.writer.writeAll(" |");
|
||||
self.state.table_col_count += 1;
|
||||
self.state.last_char_was_newline = false;
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
|
||||
// Post-block newlines
|
||||
if (isBlock(tag) and !state.in_table) {
|
||||
try ensureNewline(state, writer);
|
||||
if (tag.isBlock() and !self.state.in_table) {
|
||||
try self.ensureNewline();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn renderText(text: []const u8, state: *State, writer: *std.Io.Writer) !void {
|
||||
fn renderText(self: *Context, text: []const u8) !void {
|
||||
if (text.len == 0) return;
|
||||
|
||||
if (state.pre_node) |_| {
|
||||
try writer.writeAll(text);
|
||||
state.last_char_was_newline = text[text.len - 1] == '\n';
|
||||
if (self.state.pre_node) |_| {
|
||||
try self.writer.writeAll(text);
|
||||
self.state.last_char_was_newline = text[text.len - 1] == '\n';
|
||||
return;
|
||||
}
|
||||
|
||||
// Check for pure whitespace
|
||||
if (isAllWhitespace(text)) {
|
||||
if (!state.last_char_was_newline) {
|
||||
try writer.writeByte(' ');
|
||||
if (!self.state.last_char_was_newline) {
|
||||
try self.writer.writeByte(' ');
|
||||
}
|
||||
return;
|
||||
}
|
||||
@@ -457,31 +439,45 @@ fn renderText(text: []const u8, state: *State, writer: *std.Io.Writer) !void {
|
||||
var it = std.mem.tokenizeAny(u8, text, " \t\n\r");
|
||||
var first = true;
|
||||
while (it.next()) |word| {
|
||||
if (!first or (!state.last_char_was_newline and std.ascii.isWhitespace(text[0]))) {
|
||||
try writer.writeByte(' ');
|
||||
if (!first or (!self.state.last_char_was_newline and std.ascii.isWhitespace(text[0]))) {
|
||||
try self.writer.writeByte(' ');
|
||||
}
|
||||
|
||||
try escapeMarkdown(writer, word);
|
||||
state.last_char_was_newline = false;
|
||||
try self.escape(word);
|
||||
self.state.last_char_was_newline = false;
|
||||
first = false;
|
||||
}
|
||||
|
||||
// Handle trailing whitespace from the original text
|
||||
if (!first and !state.last_char_was_newline and std.ascii.isWhitespace(text[text.len - 1])) {
|
||||
try writer.writeByte(' ');
|
||||
if (!first and !self.state.last_char_was_newline and std.ascii.isWhitespace(text[text.len - 1])) {
|
||||
try self.writer.writeByte(' ');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn escapeMarkdown(writer: *std.Io.Writer, text: []const u8) !void {
|
||||
fn escape(self: *Context, text: []const u8) !void {
|
||||
for (text) |c| {
|
||||
switch (c) {
|
||||
'\\', '`', '*', '_', '{', '}', '[', ']', '(', ')', '#', '+', '-', '!', '|' => {
|
||||
try writer.writeByte('\\');
|
||||
try writer.writeByte(c);
|
||||
try self.writer.writeByte('\\');
|
||||
try self.writer.writeByte(c);
|
||||
},
|
||||
else => try writer.writeByte(c),
|
||||
else => try self.writer.writeByte(c),
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
pub fn dump(node: *Node, opts: Opts, writer: *std.Io.Writer, page: *Page) !void {
|
||||
_ = opts;
|
||||
var ctx: Context = .{
|
||||
.state = .{},
|
||||
.writer = writer,
|
||||
.page = page,
|
||||
};
|
||||
try ctx.render(node);
|
||||
if (!ctx.state.last_char_was_newline) {
|
||||
try writer.writeByte('\n');
|
||||
}
|
||||
}
|
||||
|
||||
fn testMarkdownHTML(html: []const u8, expected: []const u8) !void {
|
||||
|
||||
@@ -23,6 +23,9 @@ const h5e = @import("html5ever.zig");
|
||||
const Page = @import("../Page.zig");
|
||||
const Node = @import("../webapi/Node.zig");
|
||||
const Element = @import("../webapi/Element.zig");
|
||||
|
||||
pub const AttributeIterator = h5e.AttributeIterator;
|
||||
|
||||
const Allocator = std.mem.Allocator;
|
||||
const IS_DEBUG = @import("builtin").mode == .Debug;
|
||||
|
||||
|
||||
489
src/browser/structured_data.zig
Normal file
489
src/browser/structured_data.zig
Normal file
@@ -0,0 +1,489 @@
|
||||
// Copyright (C) 2023-2026 Lightpanda (Selecy SAS)
|
||||
//
|
||||
// Francis Bouvier <francis@lightpanda.io>
|
||||
// Pierre Tachoire <pierre@lightpanda.io>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU Affero General Public License as
|
||||
// published by the Free Software Foundation, either version 3 of the
|
||||
// License, or (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU Affero General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU Affero General Public License
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
const std = @import("std");
|
||||
|
||||
const Page = @import("Page.zig");
|
||||
const URL = @import("URL.zig");
|
||||
const TreeWalker = @import("webapi/TreeWalker.zig");
|
||||
const Element = @import("webapi/Element.zig");
|
||||
const Node = @import("webapi/Node.zig");
|
||||
|
||||
const Allocator = std.mem.Allocator;
|
||||
|
||||
/// Key-value pair for structured data properties.
|
||||
pub const Property = struct {
|
||||
key: []const u8,
|
||||
value: []const u8,
|
||||
};
|
||||
|
||||
pub const AlternateLink = struct {
|
||||
href: []const u8,
|
||||
hreflang: ?[]const u8,
|
||||
type: ?[]const u8,
|
||||
title: ?[]const u8,
|
||||
};
|
||||
|
||||
pub const StructuredData = struct {
|
||||
json_ld: []const []const u8,
|
||||
open_graph: []const Property,
|
||||
twitter_card: []const Property,
|
||||
meta: []const Property,
|
||||
links: []const Property,
|
||||
alternate: []const AlternateLink,
|
||||
|
||||
pub fn jsonStringify(self: *const StructuredData, jw: anytype) !void {
|
||||
try jw.beginObject();
|
||||
|
||||
try jw.objectField("jsonLd");
|
||||
try jw.write(self.json_ld);
|
||||
|
||||
try jw.objectField("openGraph");
|
||||
try writeProperties(jw, self.open_graph);
|
||||
|
||||
try jw.objectField("twitterCard");
|
||||
try writeProperties(jw, self.twitter_card);
|
||||
|
||||
try jw.objectField("meta");
|
||||
try writeProperties(jw, self.meta);
|
||||
|
||||
try jw.objectField("links");
|
||||
try writeProperties(jw, self.links);
|
||||
|
||||
if (self.alternate.len > 0) {
|
||||
try jw.objectField("alternate");
|
||||
try jw.beginArray();
|
||||
for (self.alternate) |alt| {
|
||||
try jw.beginObject();
|
||||
try jw.objectField("href");
|
||||
try jw.write(alt.href);
|
||||
if (alt.hreflang) |v| {
|
||||
try jw.objectField("hreflang");
|
||||
try jw.write(v);
|
||||
}
|
||||
if (alt.type) |v| {
|
||||
try jw.objectField("type");
|
||||
try jw.write(v);
|
||||
}
|
||||
if (alt.title) |v| {
|
||||
try jw.objectField("title");
|
||||
try jw.write(v);
|
||||
}
|
||||
try jw.endObject();
|
||||
}
|
||||
try jw.endArray();
|
||||
}
|
||||
|
||||
try jw.endObject();
|
||||
}
|
||||
};
|
||||
|
||||
/// Serializes properties as a JSON object. When a key appears multiple times
|
||||
/// (e.g. multiple og:image tags), values are grouped into an array.
|
||||
/// Alternatives considered: always-array values (verbose), or an array of
|
||||
/// {key, value} pairs (preserves order but less ergonomic for consumers).
|
||||
fn writeProperties(jw: anytype, properties: []const Property) !void {
|
||||
try jw.beginObject();
|
||||
for (properties, 0..) |prop, i| {
|
||||
// Skip keys already written by an earlier occurrence.
|
||||
var already_written = false;
|
||||
for (properties[0..i]) |prev| {
|
||||
if (std.mem.eql(u8, prev.key, prop.key)) {
|
||||
already_written = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (already_written) continue;
|
||||
|
||||
// Count total occurrences to decide string vs array.
|
||||
var count: usize = 0;
|
||||
for (properties) |p| {
|
||||
if (std.mem.eql(u8, p.key, prop.key)) count += 1;
|
||||
}
|
||||
|
||||
try jw.objectField(prop.key);
|
||||
if (count == 1) {
|
||||
try jw.write(prop.value);
|
||||
} else {
|
||||
try jw.beginArray();
|
||||
for (properties) |p| {
|
||||
if (std.mem.eql(u8, p.key, prop.key)) {
|
||||
try jw.write(p.value);
|
||||
}
|
||||
}
|
||||
try jw.endArray();
|
||||
}
|
||||
}
|
||||
try jw.endObject();
|
||||
}
|
||||
|
||||
/// Extract all structured data from the page.
|
||||
pub fn collectStructuredData(
|
||||
root: *Node,
|
||||
arena: Allocator,
|
||||
page: *Page,
|
||||
) !StructuredData {
|
||||
var json_ld: std.ArrayList([]const u8) = .empty;
|
||||
var open_graph: std.ArrayList(Property) = .empty;
|
||||
var twitter_card: std.ArrayList(Property) = .empty;
|
||||
var meta: std.ArrayList(Property) = .empty;
|
||||
var links: std.ArrayList(Property) = .empty;
|
||||
var alternate: std.ArrayList(AlternateLink) = .empty;
|
||||
|
||||
// Extract language from the root <html> element.
|
||||
if (root.is(Element)) |root_el| {
|
||||
if (root_el.getAttributeSafe(comptime .wrap("lang"))) |lang| {
|
||||
try meta.append(arena, .{ .key = "language", .value = lang });
|
||||
}
|
||||
} else {
|
||||
// Root is document — check documentElement.
|
||||
var children = root.childrenIterator();
|
||||
while (children.next()) |child| {
|
||||
const el = child.is(Element) orelse continue;
|
||||
if (el.getTag() == .html) {
|
||||
if (el.getAttributeSafe(comptime .wrap("lang"))) |lang| {
|
||||
try meta.append(arena, .{ .key = "language", .value = lang });
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var tw = TreeWalker.Full.init(root, .{});
|
||||
while (tw.next()) |node| {
|
||||
const el = node.is(Element) orelse continue;
|
||||
|
||||
switch (el.getTag()) {
|
||||
.script => {
|
||||
try collectJsonLd(el, arena, &json_ld);
|
||||
tw.skipChildren();
|
||||
},
|
||||
.meta => collectMeta(el, &open_graph, &twitter_card, &meta, arena) catch {},
|
||||
.title => try collectTitle(node, arena, &meta),
|
||||
.link => try collectLink(el, arena, page, &links, &alternate),
|
||||
// Skip body subtree for non-JSON-LD — all other metadata is in <head>.
|
||||
// JSON-LD can appear in <body> so we don't skip the whole body.
|
||||
else => {},
|
||||
}
|
||||
}
|
||||
|
||||
return .{
|
||||
.json_ld = json_ld.items,
|
||||
.open_graph = open_graph.items,
|
||||
.twitter_card = twitter_card.items,
|
||||
.meta = meta.items,
|
||||
.links = links.items,
|
||||
.alternate = alternate.items,
|
||||
};
|
||||
}
|
||||
|
||||
fn collectJsonLd(
|
||||
el: *Element,
|
||||
arena: Allocator,
|
||||
json_ld: *std.ArrayList([]const u8),
|
||||
) !void {
|
||||
const type_attr = el.getAttributeSafe(comptime .wrap("type")) orelse return;
|
||||
if (!std.ascii.eqlIgnoreCase(type_attr, "application/ld+json")) return;
|
||||
|
||||
var buf: std.Io.Writer.Allocating = .init(arena);
|
||||
try el.asNode().getTextContent(&buf.writer);
|
||||
const text = buf.written();
|
||||
if (text.len > 0) {
|
||||
try json_ld.append(arena, std.mem.trim(u8, text, &std.ascii.whitespace));
|
||||
}
|
||||
}
|
||||
|
||||
fn collectMeta(
|
||||
el: *Element,
|
||||
open_graph: *std.ArrayList(Property),
|
||||
twitter_card: *std.ArrayList(Property),
|
||||
meta: *std.ArrayList(Property),
|
||||
arena: Allocator,
|
||||
) !void {
|
||||
// charset: <meta charset="..."> (no content attribute needed).
|
||||
if (el.getAttributeSafe(comptime .wrap("charset"))) |charset| {
|
||||
try meta.append(arena, .{ .key = "charset", .value = charset });
|
||||
}
|
||||
|
||||
const content = el.getAttributeSafe(comptime .wrap("content")) orelse return;
|
||||
|
||||
// Open Graph: <meta property="og:...">
|
||||
if (el.getAttributeSafe(comptime .wrap("property"))) |property| {
|
||||
if (std.mem.startsWith(u8, property, "og:")) {
|
||||
try open_graph.append(arena, .{ .key = property[3..], .value = content });
|
||||
return;
|
||||
}
|
||||
// Article, profile, etc. are OG sub-namespaces.
|
||||
if (std.mem.startsWith(u8, property, "article:") or
|
||||
std.mem.startsWith(u8, property, "profile:") or
|
||||
std.mem.startsWith(u8, property, "book:") or
|
||||
std.mem.startsWith(u8, property, "music:") or
|
||||
std.mem.startsWith(u8, property, "video:"))
|
||||
{
|
||||
try open_graph.append(arena, .{ .key = property, .value = content });
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// Twitter Cards: <meta name="twitter:...">
|
||||
if (el.getAttributeSafe(comptime .wrap("name"))) |name| {
|
||||
if (std.mem.startsWith(u8, name, "twitter:")) {
|
||||
try twitter_card.append(arena, .{ .key = name[8..], .value = content });
|
||||
return;
|
||||
}
|
||||
|
||||
// Standard meta tags by name.
|
||||
const known_names = [_][]const u8{
|
||||
"description", "author", "keywords", "robots",
|
||||
"viewport", "generator", "theme-color",
|
||||
};
|
||||
for (known_names) |known| {
|
||||
if (std.ascii.eqlIgnoreCase(name, known)) {
|
||||
try meta.append(arena, .{ .key = known, .value = content });
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// http-equiv (e.g. Content-Type, refresh)
|
||||
if (el.getAttributeSafe(comptime .wrap("http-equiv"))) |http_equiv| {
|
||||
try meta.append(arena, .{ .key = http_equiv, .value = content });
|
||||
}
|
||||
}
|
||||
|
||||
fn collectTitle(
|
||||
node: *Node,
|
||||
arena: Allocator,
|
||||
meta: *std.ArrayList(Property),
|
||||
) !void {
|
||||
var buf: std.Io.Writer.Allocating = .init(arena);
|
||||
try node.getTextContent(&buf.writer);
|
||||
const text = std.mem.trim(u8, buf.written(), &std.ascii.whitespace);
|
||||
if (text.len > 0) {
|
||||
try meta.append(arena, .{ .key = "title", .value = text });
|
||||
}
|
||||
}
|
||||
|
||||
fn collectLink(
|
||||
el: *Element,
|
||||
arena: Allocator,
|
||||
page: *Page,
|
||||
links: *std.ArrayList(Property),
|
||||
alternate: *std.ArrayList(AlternateLink),
|
||||
) !void {
|
||||
const rel = el.getAttributeSafe(comptime .wrap("rel")) orelse return;
|
||||
const raw_href = el.getAttributeSafe(comptime .wrap("href")) orelse return;
|
||||
const href = URL.resolve(arena, page.base(), raw_href, .{ .encode = true }) catch raw_href;
|
||||
|
||||
if (std.ascii.eqlIgnoreCase(rel, "alternate")) {
|
||||
try alternate.append(arena, .{
|
||||
.href = href,
|
||||
.hreflang = el.getAttributeSafe(comptime .wrap("hreflang")),
|
||||
.type = el.getAttributeSafe(comptime .wrap("type")),
|
||||
.title = el.getAttributeSafe(comptime .wrap("title")),
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
const relevant_rels = [_][]const u8{
|
||||
"canonical", "icon", "manifest", "shortcut icon",
|
||||
"apple-touch-icon", "search", "author", "license",
|
||||
"dns-prefetch", "preconnect",
|
||||
};
|
||||
for (relevant_rels) |known| {
|
||||
if (std.ascii.eqlIgnoreCase(rel, known)) {
|
||||
try links.append(arena, .{ .key = known, .value = href });
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// --- Tests ---
|
||||
|
||||
const testing = @import("../testing.zig");
|
||||
|
||||
fn testStructuredData(html: []const u8) !StructuredData {
|
||||
const page = try testing.test_session.createPage();
|
||||
defer testing.test_session.removePage();
|
||||
|
||||
const doc = page.window._document;
|
||||
const div = try doc.createElement("div", null, page);
|
||||
try page.parseHtmlAsChildren(div.asNode(), html);
|
||||
|
||||
return collectStructuredData(div.asNode(), page.call_arena, page);
|
||||
}
|
||||
|
||||
fn findProperty(props: []const Property, key: []const u8) ?[]const u8 {
|
||||
for (props) |p| {
|
||||
if (std.mem.eql(u8, p.key, key)) return p.value;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
test "structured_data: json-ld" {
|
||||
const data = try testStructuredData(
|
||||
\\<script type="application/ld+json">
|
||||
\\{"@context":"https://schema.org","@type":"Article","headline":"Test"}
|
||||
\\</script>
|
||||
);
|
||||
try testing.expectEqual(1, data.json_ld.len);
|
||||
try testing.expect(std.mem.indexOf(u8, data.json_ld[0], "Article") != null);
|
||||
}
|
||||
|
||||
test "structured_data: multiple json-ld" {
|
||||
const data = try testStructuredData(
|
||||
\\<script type="application/ld+json">{"@type":"Organization"}</script>
|
||||
\\<script type="application/ld+json">{"@type":"BreadcrumbList"}</script>
|
||||
\\<script type="text/javascript">var x = 1;</script>
|
||||
);
|
||||
try testing.expectEqual(2, data.json_ld.len);
|
||||
}
|
||||
|
||||
test "structured_data: open graph" {
|
||||
const data = try testStructuredData(
|
||||
\\<meta property="og:title" content="My Page">
|
||||
\\<meta property="og:description" content="A description">
|
||||
\\<meta property="og:image" content="https://example.com/img.jpg">
|
||||
\\<meta property="og:url" content="https://example.com">
|
||||
\\<meta property="og:type" content="article">
|
||||
\\<meta property="article:published_time" content="2026-03-10">
|
||||
);
|
||||
try testing.expectEqual(6, data.open_graph.len);
|
||||
try testing.expectEqual("My Page", findProperty(data.open_graph, "title").?);
|
||||
try testing.expectEqual("article", findProperty(data.open_graph, "type").?);
|
||||
try testing.expectEqual("2026-03-10", findProperty(data.open_graph, "article:published_time").?);
|
||||
}
|
||||
|
||||
test "structured_data: open graph duplicate keys" {
|
||||
const data = try testStructuredData(
|
||||
\\<meta property="og:title" content="My Page">
|
||||
\\<meta property="og:image" content="https://example.com/img1.jpg">
|
||||
\\<meta property="og:image" content="https://example.com/img2.jpg">
|
||||
\\<meta property="og:image" content="https://example.com/img3.jpg">
|
||||
);
|
||||
// Duplicate keys are preserved as separate Property entries.
|
||||
try testing.expectEqual(4, data.open_graph.len);
|
||||
|
||||
// Verify serialization groups duplicates into arrays.
|
||||
const json = try std.json.Stringify.valueAlloc(testing.allocator, data, .{});
|
||||
defer testing.allocator.free(json);
|
||||
|
||||
const parsed = try std.json.parseFromSlice(std.json.Value, testing.allocator, json, .{});
|
||||
defer parsed.deinit();
|
||||
const og = parsed.value.object.get("openGraph").?.object;
|
||||
// "title" appears once → string.
|
||||
switch (og.get("title").?) {
|
||||
.string => {},
|
||||
else => return error.TestUnexpectedResult,
|
||||
}
|
||||
// "image" appears 3 times → array.
|
||||
switch (og.get("image").?) {
|
||||
.array => |arr| try testing.expectEqual(3, arr.items.len),
|
||||
else => return error.TestUnexpectedResult,
|
||||
}
|
||||
}
|
||||
|
||||
test "structured_data: twitter card" {
|
||||
const data = try testStructuredData(
|
||||
\\<meta name="twitter:card" content="summary_large_image">
|
||||
\\<meta name="twitter:site" content="@example">
|
||||
\\<meta name="twitter:title" content="My Page">
|
||||
);
|
||||
try testing.expectEqual(3, data.twitter_card.len);
|
||||
try testing.expectEqual("summary_large_image", findProperty(data.twitter_card, "card").?);
|
||||
try testing.expectEqual("@example", findProperty(data.twitter_card, "site").?);
|
||||
}
|
||||
|
||||
test "structured_data: meta tags" {
|
||||
const data = try testStructuredData(
|
||||
\\<title>Page Title</title>
|
||||
\\<meta name="description" content="A test page">
|
||||
\\<meta name="author" content="Test Author">
|
||||
\\<meta name="keywords" content="test, example">
|
||||
\\<meta name="robots" content="index, follow">
|
||||
);
|
||||
try testing.expectEqual("Page Title", findProperty(data.meta, "title").?);
|
||||
try testing.expectEqual("A test page", findProperty(data.meta, "description").?);
|
||||
try testing.expectEqual("Test Author", findProperty(data.meta, "author").?);
|
||||
try testing.expectEqual("test, example", findProperty(data.meta, "keywords").?);
|
||||
try testing.expectEqual("index, follow", findProperty(data.meta, "robots").?);
|
||||
}
|
||||
|
||||
test "structured_data: link elements" {
|
||||
const data = try testStructuredData(
|
||||
\\<link rel="canonical" href="https://example.com/page">
|
||||
\\<link rel="icon" href="/favicon.ico">
|
||||
\\<link rel="manifest" href="/manifest.json">
|
||||
\\<link rel="stylesheet" href="/style.css">
|
||||
);
|
||||
try testing.expectEqual(3, data.links.len);
|
||||
try testing.expectEqual("https://example.com/page", findProperty(data.links, "canonical").?);
|
||||
// stylesheet should be filtered out
|
||||
try testing.expectEqual(null, findProperty(data.links, "stylesheet"));
|
||||
}
|
||||
|
||||
test "structured_data: alternate links" {
|
||||
const data = try testStructuredData(
|
||||
\\<link rel="alternate" href="https://example.com/fr" hreflang="fr" title="French">
|
||||
\\<link rel="alternate" href="https://example.com/de" hreflang="de">
|
||||
);
|
||||
try testing.expectEqual(2, data.alternate.len);
|
||||
try testing.expectEqual("fr", data.alternate[0].hreflang.?);
|
||||
try testing.expectEqual("French", data.alternate[0].title.?);
|
||||
try testing.expectEqual("de", data.alternate[1].hreflang.?);
|
||||
try testing.expectEqual(null, data.alternate[1].title);
|
||||
}
|
||||
|
||||
test "structured_data: non-metadata elements ignored" {
|
||||
const data = try testStructuredData(
|
||||
\\<div>Just text</div>
|
||||
\\<p>More text</p>
|
||||
\\<a href="/link">Link</a>
|
||||
);
|
||||
try testing.expectEqual(0, data.json_ld.len);
|
||||
try testing.expectEqual(0, data.open_graph.len);
|
||||
try testing.expectEqual(0, data.twitter_card.len);
|
||||
try testing.expectEqual(0, data.meta.len);
|
||||
try testing.expectEqual(0, data.links.len);
|
||||
}
|
||||
|
||||
test "structured_data: charset and http-equiv" {
|
||||
const data = try testStructuredData(
|
||||
\\<meta charset="utf-8">
|
||||
\\<meta http-equiv="Content-Type" content="text/html; charset=utf-8">
|
||||
);
|
||||
try testing.expectEqual("utf-8", findProperty(data.meta, "charset").?);
|
||||
try testing.expectEqual("text/html; charset=utf-8", findProperty(data.meta, "Content-Type").?);
|
||||
}
|
||||
|
||||
test "structured_data: mixed content" {
|
||||
const data = try testStructuredData(
|
||||
\\<title>My Site</title>
|
||||
\\<meta property="og:title" content="OG Title">
|
||||
\\<meta name="twitter:card" content="summary">
|
||||
\\<meta name="description" content="A page">
|
||||
\\<link rel="canonical" href="https://example.com">
|
||||
\\<script type="application/ld+json">{"@type":"WebSite"}</script>
|
||||
);
|
||||
try testing.expectEqual(1, data.json_ld.len);
|
||||
try testing.expectEqual(1, data.open_graph.len);
|
||||
try testing.expectEqual(1, data.twitter_card.len);
|
||||
try testing.expectEqual("My Site", findProperty(data.meta, "title").?);
|
||||
try testing.expectEqual("A page", findProperty(data.meta, "description").?);
|
||||
try testing.expectEqual(1, data.links.len);
|
||||
}
|
||||
@@ -98,6 +98,64 @@
|
||||
}
|
||||
</script>
|
||||
|
||||
<script id=mime_parsing>
|
||||
// MIME types are lowercased
|
||||
{
|
||||
const blob = new Blob([], { type: "TEXT/HTML" });
|
||||
testing.expectEqual("text/html", blob.type);
|
||||
}
|
||||
|
||||
{
|
||||
const blob = new Blob([], { type: "Application/JSON" });
|
||||
testing.expectEqual("application/json", blob.type);
|
||||
}
|
||||
|
||||
// MIME with parameters - lowercased
|
||||
{
|
||||
const blob = new Blob([], { type: "text/html; charset=UTF-8" });
|
||||
testing.expectEqual("text/html; charset=utf-8", blob.type);
|
||||
}
|
||||
|
||||
// Any ASCII string is accepted and lowercased (no MIME structure validation)
|
||||
{
|
||||
const blob = new Blob([], { type: "invalid" });
|
||||
testing.expectEqual("invalid", blob.type);
|
||||
}
|
||||
|
||||
{
|
||||
const blob = new Blob([], { type: "/" });
|
||||
testing.expectEqual("/", blob.type);
|
||||
}
|
||||
|
||||
// Non-ASCII characters cause empty string (chars outside U+0020-U+007E)
|
||||
{
|
||||
const blob = new Blob([], { type: "ý/x" });
|
||||
testing.expectEqual("", blob.type);
|
||||
}
|
||||
|
||||
{
|
||||
const blob = new Blob([], { type: "text/plàin" });
|
||||
testing.expectEqual("", blob.type);
|
||||
}
|
||||
|
||||
// Control characters cause empty string
|
||||
{
|
||||
const blob = new Blob([], { type: "text/html\x00" });
|
||||
testing.expectEqual("", blob.type);
|
||||
}
|
||||
|
||||
// Empty type stays empty
|
||||
{
|
||||
const blob = new Blob([]);
|
||||
testing.expectEqual("", blob.type);
|
||||
}
|
||||
|
||||
{
|
||||
const blob = new Blob([], { type: "" });
|
||||
testing.expectEqual("", blob.type);
|
||||
}
|
||||
</script>
|
||||
|
||||
<script id=slice>
|
||||
{
|
||||
const parts = ["la", "symphonie", "des", "éclairs"];
|
||||
|
||||
@@ -89,6 +89,41 @@
|
||||
}
|
||||
</script>
|
||||
|
||||
<script id="CanvasRenderingContext2D#getImageData">
|
||||
{
|
||||
const element = document.createElement("canvas");
|
||||
element.width = 100;
|
||||
element.height = 50;
|
||||
const ctx = element.getContext("2d");
|
||||
|
||||
const imageData = ctx.getImageData(0, 0, 10, 20);
|
||||
testing.expectEqual(true, imageData instanceof ImageData);
|
||||
testing.expectEqual(imageData.width, 10);
|
||||
testing.expectEqual(imageData.height, 20);
|
||||
testing.expectEqual(imageData.data.length, 10 * 20 * 4);
|
||||
testing.expectEqual(true, imageData.data instanceof Uint8ClampedArray);
|
||||
|
||||
// Undrawn canvas should return transparent black pixels.
|
||||
testing.expectEqual(imageData.data[0], 0);
|
||||
testing.expectEqual(imageData.data[1], 0);
|
||||
testing.expectEqual(imageData.data[2], 0);
|
||||
testing.expectEqual(imageData.data[3], 0);
|
||||
}
|
||||
</script>
|
||||
|
||||
<script id="CanvasRenderingContext2D#getImageData invalid">
|
||||
{
|
||||
const element = document.createElement("canvas");
|
||||
const ctx = element.getContext("2d");
|
||||
|
||||
// Zero or negative width/height should throw IndexSizeError.
|
||||
testing.expectError('Index or size', () => ctx.getImageData(0, 0, 0, 10));
|
||||
testing.expectError('Index or size', () => ctx.getImageData(0, 0, 10, 0));
|
||||
testing.expectError('Index or size', () => ctx.getImageData(0, 0, -5, 10));
|
||||
testing.expectError('Index or size', () => ctx.getImageData(0, 0, 10, -5));
|
||||
}
|
||||
</script>
|
||||
|
||||
|
||||
<script id="getter">
|
||||
{
|
||||
|
||||
@@ -62,3 +62,26 @@
|
||||
testing.expectEqual(offscreen.height, 96);
|
||||
}
|
||||
</script>
|
||||
|
||||
<script id=OffscreenCanvasRenderingContext2D#getImageData>
|
||||
{
|
||||
const canvas = new OffscreenCanvas(100, 50);
|
||||
const ctx = canvas.getContext("2d");
|
||||
|
||||
const imageData = ctx.getImageData(0, 0, 10, 20);
|
||||
testing.expectEqual(true, imageData instanceof ImageData);
|
||||
testing.expectEqual(imageData.width, 10);
|
||||
testing.expectEqual(imageData.height, 20);
|
||||
testing.expectEqual(imageData.data.length, 10 * 20 * 4);
|
||||
|
||||
// Undrawn canvas should return transparent black pixels.
|
||||
testing.expectEqual(imageData.data[0], 0);
|
||||
testing.expectEqual(imageData.data[1], 0);
|
||||
testing.expectEqual(imageData.data[2], 0);
|
||||
testing.expectEqual(imageData.data[3], 0);
|
||||
|
||||
// Zero or negative dimensions should throw.
|
||||
testing.expectError('Index or size', () => ctx.getImageData(0, 0, 0, 10));
|
||||
testing.expectError('Index or size', () => ctx.getImageData(0, 0, 10, -5));
|
||||
}
|
||||
</script>
|
||||
|
||||
63
src/browser/tests/css/font_face.html
Normal file
63
src/browser/tests/css/font_face.html
Normal file
@@ -0,0 +1,63 @@
|
||||
<!DOCTYPE html>
|
||||
<script src="../testing.js"></script>
|
||||
|
||||
<script id="constructor_basic">
|
||||
{
|
||||
const face = new FontFace("TestFont", "url(test.woff)");
|
||||
testing.expectTrue(face instanceof FontFace);
|
||||
}
|
||||
</script>
|
||||
|
||||
<script id="constructor_name">
|
||||
{
|
||||
testing.expectEqual('FontFace', FontFace.name);
|
||||
}
|
||||
</script>
|
||||
|
||||
<script id="family_property">
|
||||
{
|
||||
const face = new FontFace("MyFont", "url(font.woff2)");
|
||||
testing.expectEqual("MyFont", face.family);
|
||||
}
|
||||
</script>
|
||||
|
||||
<script id="status_is_loaded">
|
||||
{
|
||||
const face = new FontFace("F", "url(f.woff)");
|
||||
testing.expectEqual("loaded", face.status);
|
||||
}
|
||||
</script>
|
||||
|
||||
<script id="loaded_is_promise">
|
||||
{
|
||||
const face = new FontFace("F", "url(f.woff)");
|
||||
testing.expectTrue(face.loaded instanceof Promise);
|
||||
}
|
||||
</script>
|
||||
|
||||
<script id="load_returns_promise">
|
||||
{
|
||||
const face = new FontFace("F", "url(f.woff)");
|
||||
testing.expectTrue(face.load() instanceof Promise);
|
||||
}
|
||||
</script>
|
||||
|
||||
<script id="default_descriptors">
|
||||
{
|
||||
const face = new FontFace("F", "url(f.woff)");
|
||||
testing.expectEqual("normal", face.style);
|
||||
testing.expectEqual("normal", face.weight);
|
||||
testing.expectEqual("normal", face.stretch);
|
||||
testing.expectEqual("normal", face.variant);
|
||||
testing.expectEqual("normal", face.featureSettings);
|
||||
testing.expectEqual("auto", face.display);
|
||||
}
|
||||
</script>
|
||||
|
||||
<script id="document_fonts_add">
|
||||
{
|
||||
const face = new FontFace("AddedFont", "url(added.woff)");
|
||||
const result = document.fonts.add(face);
|
||||
testing.expectTrue(result === document.fonts);
|
||||
}
|
||||
</script>
|
||||
@@ -56,3 +56,25 @@
|
||||
testing.expectEqual('FontFaceSet', document.fonts.constructor.name);
|
||||
}
|
||||
</script>
|
||||
|
||||
<script id="document_fonts_addEventListener">
|
||||
{
|
||||
let loading = false;
|
||||
document.fonts.addEventListener('loading', function() {
|
||||
loading = true;
|
||||
});
|
||||
|
||||
let loadingdone = false;
|
||||
document.fonts.addEventListener('loadingdone', function() {
|
||||
loadingdone = true;
|
||||
});
|
||||
|
||||
document.fonts.load("italic bold 16px Roboto");
|
||||
|
||||
testing.eventually(() => {
|
||||
testing.expectEqual(true, loading);
|
||||
testing.expectEqual(true, loadingdone);
|
||||
});
|
||||
testing.expectEqual(true, true);
|
||||
}
|
||||
</script>
|
||||
|
||||
@@ -275,3 +275,147 @@
|
||||
testing.expectEqual('red', div.style.getPropertyValue('color'));
|
||||
}
|
||||
</script>
|
||||
|
||||
<script id="CSSStyleDeclaration_normalize_zero_to_0px">
|
||||
{
|
||||
// Per CSSOM spec, unitless zero in length properties should serialize as "0px"
|
||||
const div = document.createElement('div');
|
||||
|
||||
div.style.width = '0';
|
||||
testing.expectEqual('0px', div.style.width);
|
||||
|
||||
div.style.margin = '0';
|
||||
testing.expectEqual('0px', div.style.margin);
|
||||
|
||||
div.style.padding = '0';
|
||||
testing.expectEqual('0px', div.style.padding);
|
||||
|
||||
div.style.top = '0';
|
||||
testing.expectEqual('0px', div.style.top);
|
||||
|
||||
// Scroll properties
|
||||
div.style.scrollMarginTop = '0';
|
||||
testing.expectEqual('0px', div.style.scrollMarginTop);
|
||||
|
||||
div.style.scrollPaddingBottom = '0';
|
||||
testing.expectEqual('0px', div.style.scrollPaddingBottom);
|
||||
|
||||
// Multi-column
|
||||
div.style.columnWidth = '0';
|
||||
testing.expectEqual('0px', div.style.columnWidth);
|
||||
|
||||
div.style.columnRuleWidth = '0';
|
||||
testing.expectEqual('0px', div.style.columnRuleWidth);
|
||||
|
||||
// Outline shorthand
|
||||
div.style.outline = '0';
|
||||
testing.expectEqual('0px', div.style.outline);
|
||||
|
||||
// Shapes
|
||||
div.style.shapeMargin = '0';
|
||||
testing.expectEqual('0px', div.style.shapeMargin);
|
||||
|
||||
// Non-length properties should not be affected
|
||||
div.style.opacity = '0';
|
||||
testing.expectEqual('0', div.style.opacity);
|
||||
|
||||
div.style.zIndex = '0';
|
||||
testing.expectEqual('0', div.style.zIndex);
|
||||
}
|
||||
</script>
|
||||
|
||||
<script id="CSSStyleDeclaration_normalize_first_baseline">
|
||||
{
|
||||
// "first baseline" should serialize canonically as "baseline"
|
||||
const div = document.createElement('div');
|
||||
|
||||
div.style.alignItems = 'first baseline';
|
||||
testing.expectEqual('baseline', div.style.alignItems);
|
||||
|
||||
div.style.alignContent = 'first baseline';
|
||||
testing.expectEqual('baseline', div.style.alignContent);
|
||||
|
||||
div.style.alignSelf = 'first baseline';
|
||||
testing.expectEqual('baseline', div.style.alignSelf);
|
||||
|
||||
div.style.justifySelf = 'first baseline';
|
||||
testing.expectEqual('baseline', div.style.justifySelf);
|
||||
|
||||
// "last baseline" should remain unchanged
|
||||
div.style.alignItems = 'last baseline';
|
||||
testing.expectEqual('last baseline', div.style.alignItems);
|
||||
}
|
||||
</script>
|
||||
|
||||
<script id="CSSStyleDeclaration_normalize_duplicate_values">
|
||||
{
|
||||
// For 2-value shorthand properties, "X X" should collapse to "X"
|
||||
const div = document.createElement('div');
|
||||
|
||||
div.style.placeContent = 'center center';
|
||||
testing.expectEqual('center', div.style.placeContent);
|
||||
|
||||
div.style.placeContent = 'start start';
|
||||
testing.expectEqual('start', div.style.placeContent);
|
||||
|
||||
div.style.gap = '10px 10px';
|
||||
testing.expectEqual('10px', div.style.gap);
|
||||
|
||||
// Different values should not collapse
|
||||
div.style.placeContent = 'center start';
|
||||
testing.expectEqual('center start', div.style.placeContent);
|
||||
|
||||
div.style.gap = '10px 20px';
|
||||
testing.expectEqual('10px 20px', div.style.gap);
|
||||
|
||||
// New shorthands
|
||||
div.style.overflow = 'hidden hidden';
|
||||
testing.expectEqual('hidden', div.style.overflow);
|
||||
|
||||
div.style.scrollSnapAlign = 'start start';
|
||||
testing.expectEqual('start', div.style.scrollSnapAlign);
|
||||
|
||||
div.style.overscrollBehavior = 'auto auto';
|
||||
testing.expectEqual('auto', div.style.overscrollBehavior);
|
||||
}
|
||||
</script>
|
||||
|
||||
<script id="CSSStyleDeclaration_normalize_anchor_size">
|
||||
{
|
||||
// anchor-size() should serialize with dashed ident (anchor name) before size keyword
|
||||
const div = document.createElement('div');
|
||||
|
||||
// Already canonical order - should stay the same
|
||||
div.style.width = 'anchor-size(--foo width)';
|
||||
testing.expectEqual('anchor-size(--foo width)', div.style.width);
|
||||
|
||||
// Non-canonical order - should be reordered
|
||||
div.style.width = 'anchor-size(width --foo)';
|
||||
testing.expectEqual('anchor-size(--foo width)', div.style.width);
|
||||
|
||||
// With fallback value
|
||||
div.style.width = 'anchor-size(height --bar, 100px)';
|
||||
testing.expectEqual('anchor-size(--bar height, 100px)', div.style.width);
|
||||
|
||||
// Different size keywords
|
||||
div.style.width = 'anchor-size(block --baz)';
|
||||
testing.expectEqual('anchor-size(--baz block)', div.style.width);
|
||||
|
||||
div.style.width = 'anchor-size(inline --qux)';
|
||||
testing.expectEqual('anchor-size(--qux inline)', div.style.width);
|
||||
|
||||
div.style.width = 'anchor-size(self-block --test)';
|
||||
testing.expectEqual('anchor-size(--test self-block)', div.style.width);
|
||||
|
||||
div.style.width = 'anchor-size(self-inline --test)';
|
||||
testing.expectEqual('anchor-size(--test self-inline)', div.style.width);
|
||||
|
||||
// Without anchor name (implicit default anchor)
|
||||
div.style.width = 'anchor-size(width)';
|
||||
testing.expectEqual('anchor-size(width)', div.style.width);
|
||||
|
||||
// Nested anchor-size in fallback
|
||||
div.style.width = 'anchor-size(width --foo, anchor-size(height --bar))';
|
||||
testing.expectEqual('anchor-size(--foo width, anchor-size(--bar height))', div.style.width);
|
||||
}
|
||||
</script>
|
||||
|
||||
@@ -72,3 +72,59 @@
|
||||
testing.expectEqual(2, calls);
|
||||
}
|
||||
</script>
|
||||
|
||||
<div id=fragment_clone_container></div>
|
||||
|
||||
<script id=clone_fragment>
|
||||
{
|
||||
let calls = 0;
|
||||
class MyFragmentCloneElement extends HTMLElement {
|
||||
constructor() {
|
||||
super();
|
||||
calls += 1;
|
||||
$('#fragment_clone_container').appendChild(this);
|
||||
}
|
||||
}
|
||||
customElements.define('my-fragment-clone-element', MyFragmentCloneElement);
|
||||
|
||||
// Create a DocumentFragment with a custom element
|
||||
const fragment = document.createDocumentFragment();
|
||||
const customEl = document.createElement('my-fragment-clone-element');
|
||||
fragment.appendChild(customEl);
|
||||
|
||||
// Clone the fragment - this should trigger the crash
|
||||
// because the constructor will attach the element during cloning
|
||||
const clonedFragment = fragment.cloneNode(true);
|
||||
testing.expectEqual(2, calls);
|
||||
}
|
||||
</script>
|
||||
|
||||
<div id=range_clone_container></div>
|
||||
|
||||
<script id=clone_range>
|
||||
{
|
||||
let calls = 0;
|
||||
class MyRangeCloneElement extends HTMLElement {
|
||||
constructor() {
|
||||
super();
|
||||
calls += 1;
|
||||
$('#range_clone_container').appendChild(this);
|
||||
}
|
||||
}
|
||||
customElements.define('my-range-clone-element', MyRangeCloneElement);
|
||||
|
||||
// Create a container with a custom element
|
||||
const container = document.createElement('div');
|
||||
const customEl = document.createElement('my-range-clone-element');
|
||||
container.appendChild(customEl);
|
||||
|
||||
// Create a range that includes the custom element
|
||||
const range = document.createRange();
|
||||
range.selectNodeContents(container);
|
||||
|
||||
// Clone the range contents - this should trigger the crash
|
||||
// because the constructor will attach the element during cloning
|
||||
const clonedContents = range.cloneContents();
|
||||
testing.expectEqual(2, calls);
|
||||
}
|
||||
</script>
|
||||
|
||||
@@ -0,0 +1,66 @@
|
||||
<!DOCTYPE html>
|
||||
<head>
|
||||
<script src="../testing.js"></script>
|
||||
<script>
|
||||
// Test that document.open/write/close throw InvalidStateError during custom element
|
||||
// reactions when the element is parsed from HTML
|
||||
|
||||
window.constructorOpenException = null;
|
||||
window.constructorWriteException = null;
|
||||
window.constructorCloseException = null;
|
||||
window.constructorCalled = false;
|
||||
|
||||
class ThrowTestElement extends HTMLElement {
|
||||
constructor() {
|
||||
super();
|
||||
window.constructorCalled = true;
|
||||
|
||||
// Try document.open on the same document during constructor - should throw
|
||||
try {
|
||||
document.open();
|
||||
} catch (e) {
|
||||
window.constructorOpenException = e;
|
||||
}
|
||||
|
||||
// Try document.write on the same document during constructor - should throw
|
||||
try {
|
||||
document.write('<b>test</b>');
|
||||
} catch (e) {
|
||||
window.constructorWriteException = e;
|
||||
}
|
||||
|
||||
// Try document.close on the same document during constructor - should throw
|
||||
try {
|
||||
document.close();
|
||||
} catch (e) {
|
||||
window.constructorCloseException = e;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
customElements.define('throw-test-element', ThrowTestElement);
|
||||
</script>
|
||||
</head>
|
||||
<body>
|
||||
<!-- This element will be parsed from HTML, triggering the constructor -->
|
||||
<throw-test-element id="test-element"></throw-test-element>
|
||||
|
||||
<script id="verify_throws">
|
||||
{
|
||||
// Verify the constructor was called
|
||||
testing.expectEqual(true, window.constructorCalled);
|
||||
|
||||
// Verify document.open threw InvalidStateError
|
||||
testing.expectEqual(true, window.constructorOpenException !== null);
|
||||
testing.expectEqual('InvalidStateError', window.constructorOpenException.name);
|
||||
|
||||
// Verify document.write threw InvalidStateError
|
||||
testing.expectEqual(true, window.constructorWriteException !== null);
|
||||
testing.expectEqual('InvalidStateError', window.constructorWriteException.name);
|
||||
|
||||
// Verify document.close threw InvalidStateError
|
||||
testing.expectEqual(true, window.constructorCloseException !== null);
|
||||
testing.expectEqual('InvalidStateError', window.constructorCloseException.name);
|
||||
}
|
||||
</script>
|
||||
</body>
|
||||
@@ -24,11 +24,10 @@
|
||||
|
||||
<script id=byId name="test1">
|
||||
testing.expectEqual(1, document.querySelector.length);
|
||||
testing.expectError("SyntaxError: Syntax Error", () => document.querySelector(''));
|
||||
testing.expectError("SyntaxError", () => document.querySelector(''));
|
||||
testing.withError((err) => {
|
||||
testing.expectEqual(12, err.code);
|
||||
testing.expectEqual("SyntaxError", err.name);
|
||||
testing.expectEqual("Syntax Error", err.message);
|
||||
}, () => document.querySelector(''));
|
||||
|
||||
testing.expectEqual('test1', document.querySelector('#byId').getAttribute('name'));
|
||||
|
||||
@@ -34,11 +34,10 @@
|
||||
</script>
|
||||
|
||||
<script id=script1 name="test1">
|
||||
testing.expectError("SyntaxError: Syntax Error", () => document.querySelectorAll(''));
|
||||
testing.expectError("SyntaxError", () => document.querySelectorAll(''));
|
||||
testing.withError((err) => {
|
||||
testing.expectEqual(12, err.code);
|
||||
testing.expectEqual("SyntaxError", err.name);
|
||||
testing.expectEqual("Syntax Error", err.message);
|
||||
}, () => document.querySelectorAll(''));
|
||||
</script>
|
||||
|
||||
|
||||
@@ -127,7 +127,7 @@
|
||||
|
||||
testing.withError((err) => {
|
||||
testing.expectEqual(3, err.code);
|
||||
testing.expectEqual('Hierarchy Error', err.message);
|
||||
testing.expectEqual('HierarchyRequestError', err.name);
|
||||
testing.expectEqual(true, err instanceof DOMException);
|
||||
testing.expectEqual(true, err instanceof Error);
|
||||
}, () => link.appendChild(content));
|
||||
|
||||
@@ -4,9 +4,17 @@
|
||||
|
||||
<script id=basic>
|
||||
{
|
||||
{
|
||||
const parser = new DOMParser();
|
||||
testing.expectEqual('object', typeof parser);
|
||||
testing.expectEqual('function', typeof parser.parseFromString);
|
||||
}
|
||||
|
||||
{
|
||||
const parser = new DOMParser();
|
||||
let d = parser.parseFromString('', 'text/xml');
|
||||
testing.expectEqual('<parsererror>error</parsererror>', new XMLSerializer().serializeToString(d));
|
||||
}
|
||||
}
|
||||
</script>
|
||||
|
||||
@@ -389,3 +397,25 @@
|
||||
}
|
||||
}
|
||||
</script>
|
||||
|
||||
<script id=getElementsByTagName-xml>
|
||||
{
|
||||
const parser = new DOMParser();
|
||||
const doc = parser.parseFromString('<layout><row><col>A</col><col>B</col></row></layout>', 'text/xml');
|
||||
|
||||
// Test getElementsByTagName on document
|
||||
const rows = doc.getElementsByTagName('row');
|
||||
testing.expectEqual(1, rows.length);
|
||||
|
||||
// Test getElementsByTagName on element
|
||||
const row = rows[0];
|
||||
const cols = row.getElementsByTagName('col');
|
||||
testing.expectEqual(2, cols.length);
|
||||
testing.expectEqual('A', cols[0].textContent);
|
||||
testing.expectEqual('B', cols[1].textContent);
|
||||
|
||||
// Test getElementsByTagName('*') on element
|
||||
const allElements = row.getElementsByTagName('*');
|
||||
testing.expectEqual(2, allElements.length);
|
||||
}
|
||||
</script>
|
||||
|
||||
@@ -36,7 +36,6 @@
|
||||
testing.withError((err) => {
|
||||
testing.expectEqual(8, err.code);
|
||||
testing.expectEqual("NotFoundError", err.name);
|
||||
testing.expectEqual("Not Found", err.message);
|
||||
}, () => el1.removeAttributeNode(script_id_node));
|
||||
|
||||
testing.expectEqual(an1, el1.removeAttributeNode(an1));
|
||||
|
||||
@@ -12,7 +12,7 @@
|
||||
testing.expectEqual('', $('#a0').href);
|
||||
|
||||
testing.expectEqual(testing.BASE_URL + 'element/anchor1.html', $('#a1').href);
|
||||
testing.expectEqual(testing.ORIGIN + 'hello/world/anchor2.html', $('#a2').href);
|
||||
testing.expectEqual(testing.ORIGIN + '/hello/world/anchor2.html', $('#a2').href);
|
||||
testing.expectEqual('https://www.openmymind.net/Elixirs-With-Statement/', $('#a3').href);
|
||||
|
||||
testing.expectEqual(testing.BASE_URL + 'element/html/foo', $('#link').href);
|
||||
|
||||
@@ -23,6 +23,22 @@
|
||||
}
|
||||
</script>
|
||||
|
||||
<script id="action">
|
||||
{
|
||||
const form = document.createElement('form')
|
||||
testing.expectEqual(testing.BASE_URL + 'element/html/form.html', form.action)
|
||||
|
||||
form.action = 'hello';
|
||||
testing.expectEqual(testing.BASE_URL + 'element/html/hello', form.action)
|
||||
|
||||
form.action = '/hello';
|
||||
testing.expectEqual(testing.ORIGIN + '/hello', form.action)
|
||||
|
||||
form.action = 'https://lightpanda.io/hello';
|
||||
testing.expectEqual('https://lightpanda.io/hello', form.action)
|
||||
}
|
||||
</script>
|
||||
|
||||
<!-- Test fixtures for form.method -->
|
||||
<form id="form_get" method="get"></form>
|
||||
<form id="form_post" method="post"></form>
|
||||
@@ -327,3 +343,123 @@
|
||||
testing.expectEqual('', form.elements['choice'].value)
|
||||
}
|
||||
</script>
|
||||
|
||||
<!-- Test: requestSubmit() fires the submit event (unlike submit()) -->
|
||||
<form id="test_form2" action="/should-not-navigate2" method="get">
|
||||
<input name="q" value="test2">
|
||||
</form>
|
||||
|
||||
<script id="requestSubmit_fires_submit_event">
|
||||
{
|
||||
const form = $('#test_form2');
|
||||
let submitFired = false;
|
||||
|
||||
form.addEventListener('submit', (e) => {
|
||||
e.preventDefault();
|
||||
submitFired = true;
|
||||
});
|
||||
|
||||
form.requestSubmit();
|
||||
|
||||
testing.expectEqual(true, submitFired);
|
||||
}
|
||||
</script>
|
||||
|
||||
<!-- Test: requestSubmit() with preventDefault stops navigation -->
|
||||
<form id="test_form3" action="/should-not-navigate3" method="get">
|
||||
<input name="q" value="test3">
|
||||
</form>
|
||||
|
||||
<script id="requestSubmit_respects_preventDefault">
|
||||
{
|
||||
const form = $('#test_form3');
|
||||
|
||||
form.addEventListener('submit', (e) => {
|
||||
e.preventDefault();
|
||||
});
|
||||
|
||||
form.requestSubmit();
|
||||
|
||||
// Form submission was prevented, so no navigation should be scheduled
|
||||
testing.expectEqual(true, true);
|
||||
}
|
||||
</script>
|
||||
|
||||
<!-- Test: requestSubmit() with non-submit-button submitter throws TypeError -->
|
||||
<form id="test_form_rs1" action="/should-not-navigate4" method="get">
|
||||
<input id="rs1_text" type="text" name="q" value="test">
|
||||
<input id="rs1_submit" type="submit" value="Go">
|
||||
<input id="rs1_image" type="image" src="x.png">
|
||||
<button id="rs1_btn_submit" type="submit">Submit</button>
|
||||
<button id="rs1_btn_reset" type="reset">Reset</button>
|
||||
<button id="rs1_btn_button" type="button">Button</button>
|
||||
</form>
|
||||
|
||||
<script id="requestSubmit_rejects_non_submit_button">
|
||||
{
|
||||
const form = $('#test_form_rs1');
|
||||
form.addEventListener('submit', (e) => e.preventDefault());
|
||||
|
||||
// A text input is not a submit button — should throw TypeError
|
||||
testing.expectError('TypeError', () => {
|
||||
form.requestSubmit($('#rs1_text'));
|
||||
});
|
||||
|
||||
// A reset button is not a submit button — should throw TypeError
|
||||
testing.expectError('TypeError', () => {
|
||||
form.requestSubmit($('#rs1_btn_reset'));
|
||||
});
|
||||
|
||||
// A <button type="button"> is not a submit button — should throw TypeError
|
||||
testing.expectError('TypeError', () => {
|
||||
form.requestSubmit($('#rs1_btn_button'));
|
||||
});
|
||||
|
||||
// A <div> is not a submit button — should throw TypeError
|
||||
const div = document.createElement('div');
|
||||
form.appendChild(div);
|
||||
testing.expectError('TypeError', () => {
|
||||
form.requestSubmit(div);
|
||||
});
|
||||
}
|
||||
</script>
|
||||
|
||||
<!-- Test: requestSubmit() accepts valid submit buttons -->
|
||||
<script id="requestSubmit_accepts_submit_buttons">
|
||||
{
|
||||
const form = $('#test_form_rs1');
|
||||
let submitCount = 0;
|
||||
form.addEventListener('submit', (e) => { e.preventDefault(); submitCount++; });
|
||||
|
||||
// <input type="submit"> is a valid submitter
|
||||
form.requestSubmit($('#rs1_submit'));
|
||||
testing.expectEqual(1, submitCount);
|
||||
|
||||
// <input type="image"> is a valid submitter
|
||||
form.requestSubmit($('#rs1_image'));
|
||||
testing.expectEqual(2, submitCount);
|
||||
|
||||
// <button type="submit"> is a valid submitter
|
||||
form.requestSubmit($('#rs1_btn_submit'));
|
||||
testing.expectEqual(3, submitCount);
|
||||
}
|
||||
</script>
|
||||
|
||||
<!-- Test: requestSubmit() with submitter not owned by form throws NotFoundError -->
|
||||
<form id="test_form_rs2" action="/should-not-navigate5" method="get">
|
||||
<input type="text" name="q" value="test">
|
||||
</form>
|
||||
<form id="test_form_rs3">
|
||||
<input id="rs3_submit" type="submit" value="Other Submit">
|
||||
</form>
|
||||
|
||||
<script id="requestSubmit_rejects_wrong_form_submitter">
|
||||
{
|
||||
const form = $('#test_form_rs2');
|
||||
|
||||
// Submit button belongs to a different form — should throw NotFoundError
|
||||
testing.expectError('NotFoundError', () => {
|
||||
form.requestSubmit($('#rs3_submit'));
|
||||
});
|
||||
}
|
||||
</script>
|
||||
|
||||
@@ -37,7 +37,7 @@
|
||||
testing.expectEqual('test.png', img.getAttribute('src'));
|
||||
|
||||
img.src = '/absolute/path.png';
|
||||
testing.expectEqual(testing.ORIGIN + 'absolute/path.png', img.src);
|
||||
testing.expectEqual(testing.ORIGIN + '/absolute/path.png', img.src);
|
||||
testing.expectEqual('/absolute/path.png', img.getAttribute('src'));
|
||||
|
||||
img.src = 'https://example.com/image.png';
|
||||
|
||||
@@ -8,7 +8,7 @@
|
||||
testing.expectEqual('https://lightpanda.io/opensource-browser/15', l2.href);
|
||||
|
||||
l2.href = '/over/9000';
|
||||
testing.expectEqual(testing.ORIGIN + 'over/9000', l2.href);
|
||||
testing.expectEqual(testing.ORIGIN + '/over/9000', l2.href);
|
||||
|
||||
l2.crossOrigin = 'nope';
|
||||
testing.expectEqual('anonymous', l2.crossOrigin);
|
||||
@@ -84,3 +84,24 @@
|
||||
testing.eventually(() => testing.expectEqual(true, result));
|
||||
}
|
||||
</script>
|
||||
|
||||
<script id="refs">
|
||||
{
|
||||
const rels = ['stylesheet', 'preload', 'modulepreload'];
|
||||
const results = rels.map(() => false);
|
||||
rels.forEach((rel, i) => {
|
||||
let link = document.createElement('link')
|
||||
link.rel = rel;
|
||||
link.href = '/nope';
|
||||
link.onload = () => results[i] = true;
|
||||
document.documentElement.appendChild(link);
|
||||
});
|
||||
|
||||
|
||||
testing.eventually(() => {
|
||||
results.forEach((r) => {
|
||||
testing.expectEqual(true, r);
|
||||
});
|
||||
});
|
||||
}
|
||||
</script>
|
||||
|
||||
61
src/browser/tests/element/html/script/async_text.html
Normal file
61
src/browser/tests/element/html/script/async_text.html
Normal file
@@ -0,0 +1,61 @@
|
||||
<!DOCTYPE html>
|
||||
<script src="../../../testing.js"></script>
|
||||
|
||||
<script id=force_async>
|
||||
{
|
||||
// Dynamically created scripts have async=true by default
|
||||
let s = document.createElement('script');
|
||||
testing.expectEqual(true, s.async);
|
||||
|
||||
// Setting async=false clears the force async flag and removes attribute
|
||||
s.async = false;
|
||||
testing.expectEqual(false, s.async);
|
||||
testing.expectEqual(false, s.hasAttribute('async'));
|
||||
|
||||
// Setting async=true adds the attribute
|
||||
s.async = true;
|
||||
testing.expectEqual(true, s.async);
|
||||
testing.expectEqual(true, s.hasAttribute('async'));
|
||||
}
|
||||
</script>
|
||||
|
||||
<script></script>
|
||||
<script id=empty>
|
||||
{
|
||||
// Empty parser-inserted script should have async=true (force async retained)
|
||||
let scripts = document.getElementsByTagName('script');
|
||||
let emptyScript = scripts[scripts.length - 2];
|
||||
testing.expectEqual(true, emptyScript.async);
|
||||
}
|
||||
</script>
|
||||
|
||||
<script id=text_content>
|
||||
{
|
||||
let s = document.createElement('script');
|
||||
s.appendChild(document.createComment('COMMENT'));
|
||||
s.appendChild(document.createTextNode(' TEXT '));
|
||||
s.appendChild(document.createProcessingInstruction('P', 'I'));
|
||||
let a = s.appendChild(document.createElement('a'));
|
||||
a.appendChild(document.createTextNode('ELEMENT'));
|
||||
|
||||
// script.text should return only direct Text node children
|
||||
testing.expectEqual(' TEXT ', s.text);
|
||||
// script.textContent should return all descendant text
|
||||
testing.expectEqual(' TEXT ELEMENT', s.textContent);
|
||||
}
|
||||
</script>
|
||||
|
||||
<script id=lazy_inline>
|
||||
{
|
||||
// Empty script in DOM, then append text - should execute
|
||||
window.lazyScriptRan = false;
|
||||
let s = document.createElement('script');
|
||||
document.head.appendChild(s);
|
||||
// Script is in DOM but empty, so not yet executed
|
||||
testing.expectEqual(false, window.lazyScriptRan);
|
||||
// Append text node with code
|
||||
s.appendChild(document.createTextNode('window.lazyScriptRan = true;'));
|
||||
// Now it should have executed
|
||||
testing.expectEqual(true, window.lazyScriptRan);
|
||||
}
|
||||
</script>
|
||||
54
src/browser/tests/element/html/script/dynamic_inline.html
Normal file
54
src/browser/tests/element/html/script/dynamic_inline.html
Normal file
@@ -0,0 +1,54 @@
|
||||
<!DOCTYPE html>
|
||||
<head></head>
|
||||
<script src="../../../testing.js"></script>
|
||||
|
||||
<script id=textContent_inline>
|
||||
window.inline_executed = false;
|
||||
const s1 = document.createElement('script');
|
||||
s1.textContent = 'window.inline_executed = true;';
|
||||
document.head.appendChild(s1);
|
||||
testing.expectTrue(window.inline_executed);
|
||||
</script>
|
||||
|
||||
<script id=text_property_inline>
|
||||
window.text_executed = false;
|
||||
const s2 = document.createElement('script');
|
||||
s2.text = 'window.text_executed = true;';
|
||||
document.head.appendChild(s2);
|
||||
testing.expectTrue(window.text_executed);
|
||||
</script>
|
||||
|
||||
<script id=innerHTML_inline>
|
||||
window.innerHTML_executed = false;
|
||||
const s3 = document.createElement('script');
|
||||
s3.innerHTML = 'window.innerHTML_executed = true;';
|
||||
document.head.appendChild(s3);
|
||||
testing.expectTrue(window.innerHTML_executed);
|
||||
</script>
|
||||
|
||||
<script id=no_double_execute_inline>
|
||||
window.inline_counter = 0;
|
||||
const s4 = document.createElement('script');
|
||||
s4.textContent = 'window.inline_counter++;';
|
||||
document.head.appendChild(s4);
|
||||
document.head.appendChild(s4);
|
||||
testing.expectEqual(1, window.inline_counter);
|
||||
</script>
|
||||
|
||||
<script id=empty_script_no_execute>
|
||||
window.empty_ran = false;
|
||||
const s5 = document.createElement('script');
|
||||
document.head.appendChild(s5);
|
||||
testing.expectFalse(window.empty_ran);
|
||||
</script>
|
||||
|
||||
<script id=module_inline>
|
||||
window.module_executed = false;
|
||||
const s6 = document.createElement('script');
|
||||
s6.type = 'module';
|
||||
s6.textContent = 'window.module_executed = true;';
|
||||
document.head.appendChild(s6);
|
||||
testing.eventually(() => {
|
||||
testing.expectTrue(window.module_executed);
|
||||
});
|
||||
</script>
|
||||
@@ -66,11 +66,10 @@
|
||||
{
|
||||
const container = $('#test-container');
|
||||
|
||||
testing.expectError("SyntaxError: Syntax Error", () => container.matches(''));
|
||||
testing.expectError("SyntaxError", () => container.matches(''));
|
||||
testing.withError((err) => {
|
||||
testing.expectEqual(12, err.code);
|
||||
testing.expectEqual("SyntaxError", err.name);
|
||||
testing.expectEqual("Syntax Error", err.message);
|
||||
}, () => container.matches(''));
|
||||
}
|
||||
</script>
|
||||
|
||||
@@ -81,6 +81,17 @@
|
||||
}
|
||||
</script>
|
||||
|
||||
<script id="is_empty">
|
||||
{
|
||||
// Empty :is() and :where() are valid per spec and match nothing
|
||||
const isEmptyResult = document.querySelectorAll(':is()');
|
||||
testing.expectEqual(0, isEmptyResult.length);
|
||||
|
||||
const whereEmptyResult = document.querySelectorAll(':where()');
|
||||
testing.expectEqual(0, whereEmptyResult.length);
|
||||
}
|
||||
</script>
|
||||
|
||||
<div id=escaped class=":popover-open"></div>
|
||||
<script id="escaped">
|
||||
{
|
||||
|
||||
@@ -12,11 +12,10 @@
|
||||
const p1 = $('#p1');
|
||||
testing.expectEqual(null, p1.querySelector('#p1'));
|
||||
|
||||
testing.expectError("SyntaxError: Syntax Error", () => p1.querySelector(''));
|
||||
testing.expectError("SyntaxError", () => p1.querySelector(''));
|
||||
testing.withError((err) => {
|
||||
testing.expectEqual(12, err.code);
|
||||
testing.expectEqual("SyntaxError", err.name);
|
||||
testing.expectEqual("Syntax Error", err.message);
|
||||
}, () => p1.querySelector(''));
|
||||
|
||||
testing.expectEqual($('#c2'), p1.querySelector('#c2'));
|
||||
|
||||
@@ -24,11 +24,10 @@
|
||||
<script id=errors>
|
||||
{
|
||||
const root = $('#root');
|
||||
testing.expectError("SyntaxError: Syntax Error", () => root.querySelectorAll(''));
|
||||
testing.expectError("SyntaxError", () => root.querySelectorAll(''));
|
||||
testing.withError((err) => {
|
||||
testing.expectEqual(12, err.code);
|
||||
testing.expectEqual("SyntaxError", err.name);
|
||||
testing.expectEqual("Syntax Error", err.message);
|
||||
}, () => root.querySelectorAll(''));
|
||||
}
|
||||
</script>
|
||||
|
||||
@@ -12,8 +12,6 @@
|
||||
// Empty functional pseudo-classes should error
|
||||
testing.expectError("Error: InvalidPseudoClass", () => container.querySelector(':has()'));
|
||||
testing.expectError("Error: InvalidPseudoClass", () => container.querySelector(':not()'));
|
||||
testing.expectError("Error: InvalidPseudoClass", () => container.querySelector(':is()'));
|
||||
testing.expectError("Error: InvalidPseudoClass", () => container.querySelector(':where()'));
|
||||
testing.expectError("Error: InvalidPseudoClass", () => container.querySelector(':lang()'));
|
||||
}
|
||||
</script>
|
||||
@@ -45,8 +43,8 @@
|
||||
const container = $('#container');
|
||||
|
||||
// Empty selectors
|
||||
testing.expectError("SyntaxError: Syntax Error", () => container.querySelector(''));
|
||||
testing.expectError("SyntaxError: Syntax Error", () => document.querySelectorAll(''));
|
||||
testing.expectError("SyntaxError", () => container.querySelector(''));
|
||||
testing.expectError("SyntaxError", () => document.querySelectorAll(''));
|
||||
}
|
||||
</script>
|
||||
|
||||
|
||||
38
src/browser/tests/event/report_error.html
Normal file
38
src/browser/tests/event/report_error.html
Normal file
@@ -0,0 +1,38 @@
|
||||
<!DOCTYPE html>
|
||||
<script src="../testing.js"></script>
|
||||
|
||||
<script id=onerrorFiveArguments>
|
||||
let called = false;
|
||||
let argCount = 0;
|
||||
window.onerror = function() {
|
||||
called = true;
|
||||
argCount = arguments.length;
|
||||
return true; // suppress default
|
||||
};
|
||||
try { undefinedVariable; } catch(e) { window.reportError(e); }
|
||||
testing.expectEqual(true, called);
|
||||
testing.expectEqual(5, argCount);
|
||||
window.onerror = null;
|
||||
</script>
|
||||
|
||||
<script id=onerrorCalledBeforeEventListener>
|
||||
let callOrder = [];
|
||||
window.onerror = function() { callOrder.push('onerror'); return true; };
|
||||
window.addEventListener('error', function() { callOrder.push('listener'); });
|
||||
try { undefinedVariable; } catch(e) { window.reportError(e); }
|
||||
testing.expectEqual('onerror', callOrder[0]);
|
||||
testing.expectEqual('listener', callOrder[1]);
|
||||
window.onerror = null;
|
||||
</script>
|
||||
|
||||
<script id=onerrorReturnTrueSuppresses>
|
||||
let listenerCalled = false;
|
||||
window.onerror = function() { return true; };
|
||||
window.addEventListener('error', function(e) {
|
||||
// listener still fires even when onerror returns true
|
||||
listenerCalled = true;
|
||||
});
|
||||
try { undefinedVariable; } catch(e) { window.reportError(e); }
|
||||
testing.expectEqual(true, listenerCalled);
|
||||
window.onerror = null;
|
||||
</script>
|
||||
@@ -7,54 +7,69 @@
|
||||
}
|
||||
</script>
|
||||
|
||||
<iframe id=f1 onload="frame1Onload" src="support/sub 1.html"></iframe>
|
||||
<iframe id=f0></iframe>
|
||||
<iframe id=f1 onload="frame1Onload()" src="support/sub 1.html"></iframe>
|
||||
<iframe id=f2 src="support/sub2.html"></iframe>
|
||||
|
||||
<script id=empty>
|
||||
{
|
||||
const blank = document.createElement('iframe');
|
||||
testing.expectEqual(null, blank.contentDocument);
|
||||
document.documentElement.appendChild(blank);
|
||||
testing.expectEqual('<html><head></head><body></body></html>', blank.contentDocument.documentElement.outerHTML);
|
||||
|
||||
const f0 = $('#f0')
|
||||
testing.expectEqual('<html><head></head><body></body></html>', f0.contentDocument.documentElement.outerHTML);
|
||||
}
|
||||
</script>
|
||||
|
||||
<script id="basic">
|
||||
// reload it
|
||||
$('#f2').src = 'support/sub2.html';
|
||||
testing.expectEqual(true, true);
|
||||
|
||||
testing.eventually(() => {
|
||||
testing.expectEqual(undefined, window[10]);
|
||||
|
||||
testing.expectEqual(window, window[0].top);
|
||||
testing.expectEqual(window, window[0].parent);
|
||||
testing.expectEqual(false, window === window[0]);
|
||||
testing.expectEqual(undefined, window[20]);
|
||||
|
||||
testing.expectEqual(window, window[1].top);
|
||||
testing.expectEqual(window, window[1].parent);
|
||||
testing.expectEqual(false, window === window[1]);
|
||||
testing.expectEqual(false, window[0] === window[1]);
|
||||
|
||||
testing.expectEqual(window, window[2].top);
|
||||
testing.expectEqual(window, window[2].parent);
|
||||
testing.expectEqual(false, window === window[2]);
|
||||
testing.expectEqual(false, window[1] === window[2]);
|
||||
|
||||
testing.expectEqual(0, $('#f1').childNodes.length);
|
||||
|
||||
testing.expectEqual(testing.BASE_URL + 'frames/support/sub%201.html', $('#f1').src);
|
||||
testing.expectEqual(window[0], $('#f1').contentWindow);
|
||||
testing.expectEqual(window[1], $('#f2').contentWindow);
|
||||
testing.expectEqual(window[1], $('#f1').contentWindow);
|
||||
testing.expectEqual(window[2], $('#f2').contentWindow);
|
||||
|
||||
testing.expectEqual(window[0].document, $('#f1').contentDocument);
|
||||
testing.expectEqual(window[1].document, $('#f2').contentDocument);
|
||||
testing.expectEqual(window[1].document, $('#f1').contentDocument);
|
||||
testing.expectEqual(window[2].document, $('#f2').contentDocument);
|
||||
|
||||
// sibling frames share the same top
|
||||
testing.expectEqual(window[0].top, window[1].top);
|
||||
testing.expectEqual(window[1].top, window[2].top);
|
||||
|
||||
// child frames have no sub-frames
|
||||
testing.expectEqual(0, window[0].length);
|
||||
testing.expectEqual(0, window[1].length);
|
||||
testing.expectEqual(0, window[2].length);
|
||||
|
||||
// self and window are self-referential on child frames
|
||||
testing.expectEqual(window[0], window[0].self);
|
||||
testing.expectEqual(window[0], window[0].window);
|
||||
testing.expectEqual(window[1], window[1].self);
|
||||
testing.expectEqual(window[1], window[1].window);
|
||||
testing.expectEqual(window[2], window[2].self);
|
||||
|
||||
// child frame's top.parent is itself (root has no parent)
|
||||
testing.expectEqual(window, window[0].top.parent);
|
||||
|
||||
// Todo: Context security tokens
|
||||
// testing.expectEqual(true, window.sub1_loaded);
|
||||
// testing.expectEqual(true, window.sub2_loaded);
|
||||
// testing.expectEqual(1, window.sub1_count);
|
||||
// testing.expectEqual(2, window.sub2_count);
|
||||
// Cross-frame property access
|
||||
testing.expectEqual(true, window.sub1_loaded);
|
||||
testing.expectEqual(true, window.sub2_loaded);
|
||||
testing.expectEqual(1, window.sub1_count);
|
||||
// depends on how far the initial load got before it was cancelled.
|
||||
testing.expectEqual(true, window.sub2_count == 1 || window.sub2_count == 2);
|
||||
});
|
||||
</script>
|
||||
|
||||
@@ -62,6 +77,7 @@
|
||||
{
|
||||
let f3_load_event = false;
|
||||
let f3 = document.createElement('iframe');
|
||||
f3.id = 'f3';
|
||||
f3.addEventListener('load', () => {
|
||||
f3_load_event = true;
|
||||
});
|
||||
@@ -75,9 +91,10 @@
|
||||
}
|
||||
</script>
|
||||
|
||||
<script id=onload>
|
||||
<script id=about_blank>
|
||||
{
|
||||
let f4 = document.createElement('iframe');
|
||||
f4.id = 'f4';
|
||||
f4.src = "about:blank";
|
||||
document.documentElement.appendChild(f4);
|
||||
|
||||
@@ -87,8 +104,43 @@
|
||||
}
|
||||
</script>
|
||||
|
||||
<script id=count>
|
||||
<script id=about_blank_renavigate>
|
||||
{
|
||||
let f5 = document.createElement('iframe');
|
||||
f5.id = 'f5';
|
||||
f5.src = "support/page.html";
|
||||
document.documentElement.appendChild(f5);
|
||||
f5.src = "about:blank";
|
||||
|
||||
testing.eventually(() => {
|
||||
testing.expectEqual(4, window.length);
|
||||
testing.expectEqual("<html><head></head><body></body></html>", f5.contentDocument.documentElement.outerHTML);
|
||||
});
|
||||
}
|
||||
</script>
|
||||
|
||||
<script id=link_click>
|
||||
testing.async(async (restore) => {
|
||||
await new Promise((resolve) => {
|
||||
let count = 0;
|
||||
let f6 = document.createElement('iframe');
|
||||
f6.id = 'f6';
|
||||
f6.addEventListener('load', () => {
|
||||
if (++count == 2) {
|
||||
resolve();
|
||||
return;
|
||||
}
|
||||
f6.contentDocument.querySelector('#link').click();
|
||||
});
|
||||
f6.src = "support/with_link.html";
|
||||
document.documentElement.appendChild(f6);
|
||||
});
|
||||
restore();
|
||||
testing.expectEqual("<html><head></head><body>It was clicked!\n</body></html>", f6.contentDocument.documentElement.outerHTML);
|
||||
});
|
||||
</script>
|
||||
|
||||
<script id=count>
|
||||
testing.eventually(() => {
|
||||
testing.expectEqual(8, window.length);
|
||||
});
|
||||
</script>
|
||||
|
||||
25
src/browser/tests/frames/post_message.html
Normal file
25
src/browser/tests/frames/post_message.html
Normal file
@@ -0,0 +1,25 @@
|
||||
<!DOCTYPE html>
|
||||
<script src="../testing.js"></script>
|
||||
|
||||
<iframe id="receiver"></iframe>
|
||||
|
||||
<script id="messages">
|
||||
{
|
||||
let reply = null;
|
||||
window.addEventListener('message', (e) => {
|
||||
console.warn('reply')
|
||||
reply = e.data;
|
||||
});
|
||||
|
||||
const iframe = $('#receiver');
|
||||
iframe.src = 'support/message_receiver.html';
|
||||
iframe.addEventListener('load', () => {
|
||||
iframe.contentWindow.postMessage('ping', '*');
|
||||
});
|
||||
|
||||
testing.eventually(() => {
|
||||
testing.expectEqual('pong', reply.data);
|
||||
testing.expectEqual(testing.ORIGIN, reply.origin);
|
||||
});
|
||||
}
|
||||
</script>
|
||||
2
src/browser/tests/frames/support/after_link.html
Normal file
2
src/browser/tests/frames/support/after_link.html
Normal file
@@ -0,0 +1,2 @@
|
||||
<!DOCTYPE html>
|
||||
It was clicked!
|
||||
9
src/browser/tests/frames/support/message_receiver.html
Normal file
9
src/browser/tests/frames/support/message_receiver.html
Normal file
@@ -0,0 +1,9 @@
|
||||
<!DOCTYPE html>
|
||||
<script>
|
||||
window.addEventListener('message', (e) => {
|
||||
console.warn('Frame Message', e.data);
|
||||
if (e.data === 'ping') {
|
||||
window.top.postMessage({data: 'pong', origin: e.origin}, '*');
|
||||
}
|
||||
});
|
||||
</script>
|
||||
2
src/browser/tests/frames/support/page.html
Normal file
2
src/browser/tests/frames/support/page.html
Normal file
@@ -0,0 +1,2 @@
|
||||
<!DOCTYPE html>
|
||||
a-page
|
||||
2
src/browser/tests/frames/support/with_link.html
Normal file
2
src/browser/tests/frames/support/with_link.html
Normal file
@@ -0,0 +1,2 @@
|
||||
<!DOCTYPE html>
|
||||
<a href="support/after_link.html" id=link>a link</a>
|
||||
42
src/browser/tests/frames/target.html
Normal file
42
src/browser/tests/frames/target.html
Normal file
@@ -0,0 +1,42 @@
|
||||
<!DOCTYPE html>
|
||||
<script src="../testing.js"></script>
|
||||
|
||||
<iframe name=f1 id=frame1></iframe>
|
||||
<a id=l1 target=f1 href=support/page.html></a>
|
||||
<script id=anchor>
|
||||
$('#l1').click();
|
||||
testing.eventually(() => {
|
||||
testing.expectEqual('<html><head></head><body>a-page\n</body></html>', $('#frame1').contentDocument.documentElement.outerHTML);
|
||||
});
|
||||
</script>
|
||||
|
||||
<script id=form>
|
||||
{
|
||||
let frame2 = document.createElement('iframe');
|
||||
frame2.name = 'frame2';
|
||||
document.documentElement.appendChild(frame2);
|
||||
|
||||
let form = document.createElement('form');
|
||||
form.target = 'frame2';
|
||||
form.action = 'support/page.html';
|
||||
form.submit();
|
||||
|
||||
testing.eventually(() => {
|
||||
testing.expectEqual('<html><head></head><body>a-page\n</body></html>', frame2.contentDocument.documentElement.outerHTML);
|
||||
});
|
||||
}
|
||||
</script>
|
||||
|
||||
<iframe name=frame3 id=f3></iframe>
|
||||
<form target="_top" action="support/page.html">
|
||||
<input type=submit id=submit1 formtarget="frame3">
|
||||
</form>
|
||||
|
||||
<script id=formtarget>
|
||||
{
|
||||
$('#submit1').click();
|
||||
testing.eventually(() => {
|
||||
testing.expectEqual('<html><head></head><body>a-page\n</body></html>', $('#f3').contentDocument.documentElement.outerHTML);
|
||||
});
|
||||
}
|
||||
</script>
|
||||
@@ -2,37 +2,17 @@
|
||||
<script src="testing.js"></script>
|
||||
|
||||
<script id=history>
|
||||
testing.expectEqual('auto', history.scrollRestoration);
|
||||
|
||||
history.scrollRestoration = 'manual';
|
||||
testing.expectEqual('manual', history.scrollRestoration);
|
||||
|
||||
history.scrollRestoration = 'auto';
|
||||
testing.expectEqual('auto', history.scrollRestoration);
|
||||
testing.expectEqual(null, history.state)
|
||||
|
||||
history.pushState({ testInProgress: true }, null, 'http://127.0.0.1:9582/src/browser/tests/history_after_nav.skip.html');
|
||||
testing.expectEqual({ testInProgress: true }, history.state);
|
||||
|
||||
history.pushState({ testInProgress: false }, null, 'http://127.0.0.1:9582/xhr/json');
|
||||
history.replaceState({ "new": "field", testComplete: true }, null);
|
||||
|
||||
let state = { "new": "field", testComplete: true };
|
||||
testing.expectEqual(state, history.state);
|
||||
|
||||
let popstateEventFired = false;
|
||||
let popstateEventState = null;
|
||||
|
||||
window.addEventListener('popstate', (event) => {
|
||||
popstateEventFired = true;
|
||||
popstateEventState = event.state;
|
||||
});
|
||||
|
||||
// This test is a bit wonky. But it's trying to test navigation, which is
|
||||
// something we can't do in the main page (we can't navigate away from this
|
||||
// page and still assertOk in the test runner).
|
||||
// If support/history.html has a failed assertion, it'll log the error and
|
||||
// stop the script. If it succeeds, it'll set support_history_completed
|
||||
// which we can use here to assume everything passed.
|
||||
testing.eventually(() => {
|
||||
testing.expectEqual(true, popstateEventFired);
|
||||
testing.expectEqual({testInProgress: true }, popstateEventState);
|
||||
})
|
||||
|
||||
history.back();
|
||||
testing.expectEqual(true, window.support_history_completed);
|
||||
testing.expectEqual(true, window.support_history_popstateEventFired);
|
||||
testing.expectEqual({testInProgress: true }, window.support_history_popstateEventState);
|
||||
});
|
||||
</script>
|
||||
|
||||
<iframe id=frame src="support/history.html"></iframe>
|
||||
|
||||
@@ -6,6 +6,7 @@
|
||||
</html>
|
||||
|
||||
<script src="../testing.js"></script>
|
||||
<applet></applet>
|
||||
|
||||
<script id=document>
|
||||
testing.expectEqual('HTMLDocument', document.__proto__.constructor.name);
|
||||
@@ -23,7 +24,7 @@
|
||||
testing.expectEqual(2, document.scripts.length);
|
||||
testing.expectEqual(0, document.forms.length);
|
||||
testing.expectEqual(1, document.links.length);
|
||||
testing.expectEqual(0, document.applets.length);
|
||||
testing.expectEqual(0, document.applets.length); // deprecated, always returns 0
|
||||
testing.expectEqual(0, document.anchors.length);
|
||||
testing.expectEqual(7, document.all.length);
|
||||
testing.expectEqual('document', document.currentScript.id);
|
||||
|
||||
14
src/browser/tests/mcp_actions.html
Normal file
14
src/browser/tests/mcp_actions.html
Normal file
@@ -0,0 +1,14 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<body>
|
||||
<button id="btn" onclick="window.clicked = true;">Click Me</button>
|
||||
<input id="inp" oninput="window.inputVal = this.value" onchange="window.changed = true;">
|
||||
<select id="sel" onchange="window.selChanged = this.value">
|
||||
<option value="opt1">Option 1</option>
|
||||
<option value="opt2">Option 2</option>
|
||||
</select>
|
||||
<div id="scrollbox" style="width: 100px; height: 100px; overflow: scroll;" onscroll="window.scrolled = true;">
|
||||
<div style="height: 500px;">Long content</div>
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
||||
@@ -27,3 +27,44 @@
|
||||
testing.expectEqual(false, navigator.javaEnabled());
|
||||
testing.expectEqual(false, navigator.webdriver);
|
||||
</script>
|
||||
|
||||
<script id=permission_query>
|
||||
testing.async(async (restore) => {
|
||||
const p = navigator.permissions.query({ name: 'notifications' });
|
||||
testing.expectTrue(p instanceof Promise);
|
||||
const status = await p;
|
||||
restore();
|
||||
testing.expectEqual('prompt', status.state);
|
||||
testing.expectEqual('notifications', status.name);
|
||||
});
|
||||
</script>
|
||||
|
||||
<script id=storage_estimate>
|
||||
testing.async(async (restore) => {
|
||||
const p = navigator.storage.estimate();
|
||||
testing.expectTrue(p instanceof Promise);
|
||||
|
||||
const estimate = await p;
|
||||
restore();
|
||||
testing.expectEqual(0, estimate.usage);
|
||||
testing.expectEqual(1024 * 1024 * 1024, estimate.quota);
|
||||
});
|
||||
</script>
|
||||
|
||||
<script id=deviceMemory>
|
||||
testing.expectEqual(8, navigator.deviceMemory);
|
||||
</script>
|
||||
|
||||
<script id=getBattery>
|
||||
testing.async(async (restore) => {
|
||||
const p = navigator.getBattery();
|
||||
try {
|
||||
await p;
|
||||
testing.fail('getBattery should reject');
|
||||
} catch (err) {
|
||||
restore();
|
||||
testing.expectEqual('NotSupportedError', err.name);
|
||||
}
|
||||
});
|
||||
</script>
|
||||
|
||||
@@ -203,3 +203,39 @@
|
||||
testing.expectEqual(true, response.body !== null);
|
||||
});
|
||||
</script>
|
||||
|
||||
<script id=fetch_blob_url>
|
||||
testing.async(async (restore) => {
|
||||
// Create a blob and get its URL
|
||||
const blob = new Blob(['Hello from blob!'], { type: 'text/plain' });
|
||||
const blobUrl = URL.createObjectURL(blob);
|
||||
|
||||
const response = await fetch(blobUrl);
|
||||
restore();
|
||||
|
||||
testing.expectEqual(200, response.status);
|
||||
testing.expectEqual(true, response.ok);
|
||||
testing.expectEqual(blobUrl, response.url);
|
||||
testing.expectEqual('text/plain', response.headers.get('Content-Type'));
|
||||
|
||||
const text = await response.text();
|
||||
testing.expectEqual('Hello from blob!', text);
|
||||
|
||||
// Clean up
|
||||
URL.revokeObjectURL(blobUrl);
|
||||
});
|
||||
</script>
|
||||
|
||||
<script id=abort>
|
||||
testing.async(async (restore) => {
|
||||
const controller = new AbortController();
|
||||
controller.abort();
|
||||
try {
|
||||
await fetch('http://127.0.0.1:9582/xhr', { signal: controller.signal });
|
||||
testain.fail('fetch should have been aborted');
|
||||
} catch (e) {
|
||||
restore();
|
||||
testing.expectEqual("AbortError", e.name);
|
||||
}
|
||||
});
|
||||
</script>
|
||||
|
||||
@@ -137,3 +137,79 @@
|
||||
testing.expectEqual('PROPFIND', req.method);
|
||||
}
|
||||
</script>
|
||||
|
||||
<script id=body_methods>
|
||||
testing.async(async () => {
|
||||
const req = new Request('https://example.com/api', {
|
||||
method: 'POST',
|
||||
body: 'Hello, World!',
|
||||
headers: { 'Content-Type': 'text/plain' }
|
||||
});
|
||||
|
||||
const text = await req.text();
|
||||
testing.expectEqual('Hello, World!', text);
|
||||
});
|
||||
|
||||
testing.async(async () => {
|
||||
const req = new Request('https://example.com/api', {
|
||||
method: 'POST',
|
||||
body: '{"name": "test"}',
|
||||
headers: { 'Content-Type': 'application/json' }
|
||||
});
|
||||
|
||||
const json = await req.json();
|
||||
testing.expectEqual('test', json.name);
|
||||
});
|
||||
|
||||
testing.async(async () => {
|
||||
const req = new Request('https://example.com/api', {
|
||||
method: 'POST',
|
||||
body: 'binary data',
|
||||
headers: { 'Content-Type': 'application/octet-stream' }
|
||||
});
|
||||
|
||||
const buffer = await req.arrayBuffer();
|
||||
testing.expectEqual(true, buffer instanceof ArrayBuffer);
|
||||
testing.expectEqual(11, buffer.byteLength);
|
||||
});
|
||||
|
||||
testing.async(async () => {
|
||||
const req = new Request('https://example.com/api', {
|
||||
method: 'POST',
|
||||
body: 'blob content',
|
||||
headers: { 'Content-Type': 'text/plain' }
|
||||
});
|
||||
|
||||
const blob = await req.blob();
|
||||
testing.expectEqual(true, blob instanceof Blob);
|
||||
testing.expectEqual(12, blob.size);
|
||||
testing.expectEqual('text/plain', blob.type);
|
||||
});
|
||||
|
||||
testing.async(async () => {
|
||||
const req = new Request('https://example.com/api', {
|
||||
method: 'POST',
|
||||
body: 'bytes'
|
||||
});
|
||||
|
||||
const bytes = await req.bytes();
|
||||
testing.expectEqual(true, bytes instanceof Uint8Array);
|
||||
testing.expectEqual(5, bytes.length);
|
||||
});
|
||||
</script>
|
||||
|
||||
<script id=clone>
|
||||
{
|
||||
const req1 = new Request('https://example.com/api', {
|
||||
method: 'POST',
|
||||
body: 'test body',
|
||||
headers: { 'X-Custom': 'value' }
|
||||
});
|
||||
|
||||
const req2 = req1.clone();
|
||||
|
||||
testing.expectEqual(req1.url, req2.url);
|
||||
testing.expectEqual(req1.method, req2.method);
|
||||
testing.expectEqual('value', req2.headers.get('X-Custom'));
|
||||
}
|
||||
</script>
|
||||
|
||||
@@ -2,13 +2,16 @@
|
||||
<script src="../testing.js"></script>
|
||||
|
||||
<script id=response>
|
||||
// let response = new Response("Hello, World!");
|
||||
// testing.expectEqual(200, response.status);
|
||||
// testing.expectEqual("", response.statusText);
|
||||
// testing.expectEqual(true, response.ok);
|
||||
// testing.expectEqual("", response.url);
|
||||
// testing.expectEqual(false, response.redirected);
|
||||
{
|
||||
let response = new Response("Hello, World!");
|
||||
testing.expectEqual(200, response.status);
|
||||
testing.expectEqual("", response.statusText);
|
||||
testing.expectEqual(true, response.ok);
|
||||
testing.expectEqual("", response.url);
|
||||
testing.expectEqual(false, response.redirected);
|
||||
}
|
||||
|
||||
{
|
||||
let response2 = new Response("Error occurred", {
|
||||
status: 404,
|
||||
statusText: "Not Found",
|
||||
@@ -18,35 +21,94 @@
|
||||
"Cache-Control": "no-cache"
|
||||
}
|
||||
});
|
||||
testing.expectEqual(true, true);
|
||||
// testing.expectEqual(404, response2.status);
|
||||
// testing.expectEqual("Not Found", response2.statusText);
|
||||
// testing.expectEqual(false, response2.ok);
|
||||
// testing.expectEqual("text/plain", response2.headers);
|
||||
// testing.expectEqual("test-value", response2.headers.get("X-Custom"));
|
||||
testing.expectEqual(404, response2.status);
|
||||
testing.expectEqual("Not Found", response2.statusText);
|
||||
testing.expectEqual(false, response2.ok);
|
||||
testing.expectEqual("test-value", response2.headers.get("X-Custom"));
|
||||
testing.expectEqual("no-cache", response2.headers.get("cache-control"));
|
||||
}
|
||||
|
||||
// let response3 = new Response("Created", { status: 201, statusText: "Created" });
|
||||
// testing.expectEqual("basic", response3.type);
|
||||
// testing.expectEqual(201, response3.status);
|
||||
// testing.expectEqual("Created", response3.statusText);
|
||||
// testing.expectEqual(true, response3.ok);
|
||||
{
|
||||
let response3 = new Response("Created", { status: 201, statusText: "Created" });
|
||||
testing.expectEqual("basic", response3.type);
|
||||
testing.expectEqual(201, response3.status);
|
||||
testing.expectEqual("Created", response3.statusText);
|
||||
testing.expectEqual(true, response3.ok);
|
||||
}
|
||||
|
||||
// let nullResponse = new Response(null);
|
||||
// testing.expectEqual(200, nullResponse.status);
|
||||
// testing.expectEqual("", nullResponse.statusText);
|
||||
{
|
||||
let nullResponse = new Response(null);
|
||||
testing.expectEqual(200, nullResponse.status);
|
||||
testing.expectEqual("", nullResponse.statusText);
|
||||
}
|
||||
|
||||
// let emptyResponse = new Response("");
|
||||
// testing.expectEqual(200, emptyResponse.status);
|
||||
{
|
||||
let emptyResponse = new Response("");
|
||||
testing.expectEqual(200, emptyResponse.status);
|
||||
}
|
||||
</script>
|
||||
|
||||
<!-- <script id=json>
|
||||
<script id=body_methods>
|
||||
testing.async(async () => {
|
||||
const json = await new Promise((resolve) => {
|
||||
let response = new Response('[]');
|
||||
response.json().then(resolve)
|
||||
const response = new Response('Hello, World!');
|
||||
const text = await response.text();
|
||||
testing.expectEqual('Hello, World!', text);
|
||||
});
|
||||
testing.expectEqual([], json);
|
||||
|
||||
testing.async(async () => {
|
||||
const response = new Response('{"name": "test"}');
|
||||
const json = await response.json();
|
||||
testing.expectEqual('test', json.name);
|
||||
});
|
||||
|
||||
testing.async(async () => {
|
||||
const response = new Response('binary data');
|
||||
const buffer = await response.arrayBuffer();
|
||||
testing.expectEqual(true, buffer instanceof ArrayBuffer);
|
||||
testing.expectEqual(11, buffer.byteLength);
|
||||
});
|
||||
|
||||
testing.async(async () => {
|
||||
const response = new Response('blob content', {
|
||||
headers: { 'Content-Type': 'text/plain' }
|
||||
});
|
||||
const blob = await response.blob();
|
||||
testing.expectEqual(true, blob instanceof Blob);
|
||||
testing.expectEqual(12, blob.size);
|
||||
testing.expectEqual('text/plain', blob.type);
|
||||
});
|
||||
|
||||
testing.async(async () => {
|
||||
const response = new Response('bytes');
|
||||
const bytes = await response.bytes();
|
||||
testing.expectEqual(true, bytes instanceof Uint8Array);
|
||||
testing.expectEqual(5, bytes.length);
|
||||
});
|
||||
</script>
|
||||
|
||||
<script id=clone>
|
||||
{
|
||||
const response1 = new Response('test body', {
|
||||
status: 201,
|
||||
statusText: 'Created',
|
||||
headers: { 'X-Custom': 'value' }
|
||||
});
|
||||
|
||||
const response2 = response1.clone();
|
||||
|
||||
testing.expectEqual(response1.status, response2.status);
|
||||
testing.expectEqual(response1.statusText, response2.statusText);
|
||||
testing.expectEqual('value', response2.headers.get('X-Custom'));
|
||||
}
|
||||
|
||||
testing.async(async () => {
|
||||
const response1 = new Response('cloned body');
|
||||
const response2 = response1.clone();
|
||||
|
||||
const text1 = await response1.text();
|
||||
const text2 = await response2.text();
|
||||
|
||||
testing.expectEqual('cloned body', text1);
|
||||
testing.expectEqual('cloned body', text2);
|
||||
});
|
||||
</script>
|
||||
-->
|
||||
|
||||
@@ -283,3 +283,26 @@
|
||||
testing.expectEqual(XMLHttpRequest.UNSENT, req.readyState);
|
||||
});
|
||||
</script>
|
||||
|
||||
<script id=xhr_blob_url>
|
||||
testing.async(async (restore) => {
|
||||
// Create a blob and get its URL
|
||||
const blob = new Blob(['Hello from blob!'], { type: 'text/plain' });
|
||||
const blobUrl = URL.createObjectURL(blob);
|
||||
|
||||
const req = new XMLHttpRequest();
|
||||
await new Promise((resolve) => {
|
||||
req.onload = resolve;
|
||||
req.open('GET', blobUrl);
|
||||
req.send();
|
||||
});
|
||||
|
||||
restore();
|
||||
testing.expectEqual(200, req.status);
|
||||
testing.expectEqual('Hello from blob!', req.responseText);
|
||||
testing.expectEqual(blobUrl, req.responseURL);
|
||||
|
||||
// Clean up
|
||||
URL.revokeObjectURL(blobUrl);
|
||||
});
|
||||
</script>
|
||||
|
||||
@@ -19,7 +19,6 @@
|
||||
testing.withError((err) => {
|
||||
testing.expectEqual(8, err.code);
|
||||
testing.expectEqual("NotFoundError", err.name);
|
||||
testing.expectEqual("Not Found", err.message);
|
||||
}, () => d1.insertBefore(document.createElement('div'), d2));
|
||||
|
||||
let c1 = document.createElement('div');
|
||||
|
||||
@@ -7,7 +7,6 @@
|
||||
testing.withError((err) => {
|
||||
testing.expectEqual(8, err.code);
|
||||
testing.expectEqual("NotFoundError", err.name);
|
||||
testing.expectEqual("Not Found", err.message);
|
||||
}, () => $('#d1').removeChild($('#p1')));
|
||||
|
||||
const p1 = $('#p1');
|
||||
|
||||
@@ -25,7 +25,6 @@
|
||||
testing.withError((err) => {
|
||||
testing.expectEqual(3, err.code);
|
||||
testing.expectEqual("HierarchyRequestError", err.name);
|
||||
testing.expectEqual("Hierarchy Error", err.message);
|
||||
}, () => d1.replaceChild(c4, c3));
|
||||
|
||||
testing.expectEqual(c2, d1.replaceChild(c4, c2));
|
||||
|
||||
41
src/browser/tests/page/blob.html
Normal file
41
src/browser/tests/page/blob.html
Normal file
@@ -0,0 +1,41 @@
|
||||
<!DOCTYPE html>
|
||||
<body></body>
|
||||
<script src="../testing.js"></script>
|
||||
|
||||
<script id="basic_blob_navigation">
|
||||
{
|
||||
const html = '<html><head></head><body><div id="test">Hello Blob</div></body></html>';
|
||||
const blob = new Blob([html], { type: 'text/html' });
|
||||
const blob_url = URL.createObjectURL(blob);
|
||||
|
||||
const iframe = document.createElement('iframe');
|
||||
document.body.appendChild(iframe);
|
||||
iframe.src = blob_url;
|
||||
|
||||
testing.eventually(() => {
|
||||
testing.expectEqual('Hello Blob', iframe.contentDocument.getElementById('test').textContent);
|
||||
});
|
||||
}
|
||||
</script>
|
||||
|
||||
<script id="multiple_blobs">
|
||||
{
|
||||
const blob1 = new Blob(['<html><body>First</body></html>'], { type: 'text/html' });
|
||||
const blob2 = new Blob(['<html><body>Second</body></html>'], { type: 'text/html' });
|
||||
const url1 = URL.createObjectURL(blob1);
|
||||
const url2 = URL.createObjectURL(blob2);
|
||||
|
||||
const iframe1 = document.createElement('iframe');
|
||||
document.body.appendChild(iframe1);
|
||||
iframe1.src = url1;
|
||||
|
||||
const iframe2 = document.createElement('iframe');
|
||||
document.body.appendChild(iframe2);
|
||||
iframe2.src = url2;
|
||||
|
||||
testing.eventually(() => {
|
||||
testing.expectEqual('First', iframe1.contentDocument.body.textContent);
|
||||
testing.expectEqual('Second', iframe2.contentDocument.body.textContent);
|
||||
});
|
||||
}
|
||||
</script>
|
||||
@@ -451,12 +451,12 @@
|
||||
const p1 = $('#p1');
|
||||
|
||||
// Test setStart with offset beyond node length
|
||||
testing.expectError('IndexSizeError: Index or size is negative or greater than the allowed amount', () => {
|
||||
testing.expectError('IndexSizeError:', () => {
|
||||
range.setStart(p1, 999);
|
||||
});
|
||||
|
||||
// Test with negative offset (wraps to large u32)
|
||||
testing.expectError('IndexSizeError: Index or size is negative or greater than the allowed amount', () => {
|
||||
testing.expectError('IndexSizeError:', () => {
|
||||
range.setStart(p1.firstChild, -1);
|
||||
});
|
||||
}
|
||||
@@ -468,12 +468,12 @@
|
||||
const p1 = $('#p1');
|
||||
|
||||
// Test setEnd with offset beyond node length
|
||||
testing.expectError('IndexSizeError: Index or size is negative or greater than the allowed amount', () => {
|
||||
testing.expectError('IndexSizeError:', () => {
|
||||
range.setEnd(p1, 999);
|
||||
});
|
||||
|
||||
// Test with text node
|
||||
testing.expectError('IndexSizeError: Index or size is negative or greater than the allowed amount', () => {
|
||||
testing.expectError('IndexSizeError:', () => {
|
||||
range.setEnd(p1.firstChild, 9999);
|
||||
});
|
||||
}
|
||||
@@ -525,11 +525,11 @@
|
||||
range.setEnd(p1, 1);
|
||||
|
||||
// Test comparePoint with invalid offset
|
||||
testing.expectError('IndexSizeError: Index or size is negative or greater than the allowed amount', () => {
|
||||
testing.expectError('IndexSizeError:', () => {
|
||||
range.comparePoint(p1, 20);
|
||||
});
|
||||
|
||||
testing.expectError('IndexSizeError: Index or size is negative or greater than the allowed amount', () => {
|
||||
testing.expectError('IndexSizeError:', () => {
|
||||
range.comparePoint(p1.firstChild, -1);
|
||||
});
|
||||
}
|
||||
@@ -650,11 +650,11 @@
|
||||
range.setEnd(p1, 1);
|
||||
|
||||
// Invalid offset should throw IndexSizeError
|
||||
testing.expectError('IndexSizeError: Index or size is negative or greater than the allowed amount', () => {
|
||||
testing.expectError('IndexSizeError:', () => {
|
||||
range.isPointInRange(p1, 999);
|
||||
});
|
||||
|
||||
testing.expectError('IndexSizeError: Index or size is negative or greater than the allowed amount', () => {
|
||||
testing.expectError('IndexSizeError:', () => {
|
||||
range.isPointInRange(p1.firstChild, 9999);
|
||||
});
|
||||
}
|
||||
@@ -854,11 +854,11 @@
|
||||
range2.setStart(p, 0);
|
||||
|
||||
// Invalid how parameter should throw NotSupportedError
|
||||
testing.expectError('NotSupportedError: Not Supported', () => {
|
||||
testing.expectError('NotSupportedError:', () => {
|
||||
range1.compareBoundaryPoints(4, range2);
|
||||
});
|
||||
|
||||
testing.expectError('NotSupportedError: Not Supported', () => {
|
||||
testing.expectError('NotSupportedError:', () => {
|
||||
range1.compareBoundaryPoints(99, range2);
|
||||
});
|
||||
}
|
||||
@@ -883,7 +883,7 @@
|
||||
range2.setEnd(foreignP, 1);
|
||||
|
||||
// Comparing ranges in different documents should throw WrongDocumentError
|
||||
testing.expectError('WrongDocumentError: wrong_document_error', () => {
|
||||
testing.expectError('WrongDocumentError:', () => {
|
||||
range1.compareBoundaryPoints(Range.START_TO_START, range2);
|
||||
});
|
||||
}
|
||||
@@ -1022,3 +1022,50 @@
|
||||
testing.expectEqual('Stnd', div.textContent);
|
||||
}
|
||||
</script>
|
||||
|
||||
<script id=getBoundingClientRect_collapsed>
|
||||
{
|
||||
const range = new Range();
|
||||
const rect = range.getBoundingClientRect();
|
||||
testing.expectTrue(rect instanceof DOMRect);
|
||||
testing.expectEqual(0, rect.x);
|
||||
testing.expectEqual(0, rect.y);
|
||||
testing.expectEqual(0, rect.width);
|
||||
testing.expectEqual(0, rect.height);
|
||||
}
|
||||
</script>
|
||||
|
||||
<script id=getBoundingClientRect_element>
|
||||
{
|
||||
const range = new Range();
|
||||
const p = document.getElementById('p1');
|
||||
range.selectNodeContents(p);
|
||||
const rect = range.getBoundingClientRect();
|
||||
testing.expectTrue(rect instanceof DOMRect);
|
||||
// Non-collapsed range delegates to the container element
|
||||
const elemRect = p.getBoundingClientRect();
|
||||
testing.expectEqual(elemRect.x, rect.x);
|
||||
testing.expectEqual(elemRect.y, rect.y);
|
||||
testing.expectEqual(elemRect.width, rect.width);
|
||||
testing.expectEqual(elemRect.height, rect.height);
|
||||
}
|
||||
</script>
|
||||
|
||||
<script id=getClientRects_collapsed>
|
||||
{
|
||||
const range = new Range();
|
||||
const rects = range.getClientRects();
|
||||
testing.expectEqual(0, rects.length);
|
||||
}
|
||||
</script>
|
||||
|
||||
<script id=getClientRects_element>
|
||||
{
|
||||
const range = new Range();
|
||||
const p = document.getElementById('p1');
|
||||
range.selectNodeContents(p);
|
||||
const rects = range.getClientRects();
|
||||
const elemRects = p.getClientRects();
|
||||
testing.expectEqual(elemRects.length, rects.length);
|
||||
}
|
||||
</script>
|
||||
|
||||
315
src/browser/tests/range_mutations.html
Normal file
315
src/browser/tests/range_mutations.html
Normal file
@@ -0,0 +1,315 @@
|
||||
<!DOCTYPE html>
|
||||
<script src="testing.js"></script>
|
||||
|
||||
<script id=insertData_adjusts_range_offsets>
|
||||
{
|
||||
const text = document.createTextNode('abcdef');
|
||||
const div = document.createElement('div');
|
||||
div.appendChild(text);
|
||||
|
||||
const range = document.createRange();
|
||||
range.setStart(text, 2);
|
||||
range.setEnd(text, 5);
|
||||
// range covers "cde"
|
||||
|
||||
// Insert "XX" at offset 1 (before range start)
|
||||
text.insertData(1, 'XX');
|
||||
// "aXXbcdef" — range should shift right by 2
|
||||
testing.expectEqual(4, range.startOffset);
|
||||
testing.expectEqual(7, range.endOffset);
|
||||
testing.expectEqual(text, range.startContainer);
|
||||
}
|
||||
</script>
|
||||
|
||||
<script id=insertData_at_range_start>
|
||||
{
|
||||
const text = document.createTextNode('abcdef');
|
||||
const div = document.createElement('div');
|
||||
div.appendChild(text);
|
||||
|
||||
const range = document.createRange();
|
||||
range.setStart(text, 2);
|
||||
range.setEnd(text, 5);
|
||||
|
||||
// Insert at exactly the start offset — should not shift start
|
||||
text.insertData(2, 'YY');
|
||||
// "abYYcdef" — start stays at 2, end shifts by 2
|
||||
testing.expectEqual(2, range.startOffset);
|
||||
testing.expectEqual(7, range.endOffset);
|
||||
}
|
||||
</script>
|
||||
|
||||
<script id=insertData_inside_range>
|
||||
{
|
||||
const text = document.createTextNode('abcdef');
|
||||
const div = document.createElement('div');
|
||||
div.appendChild(text);
|
||||
|
||||
const range = document.createRange();
|
||||
range.setStart(text, 2);
|
||||
range.setEnd(text, 5);
|
||||
|
||||
// Insert inside the range
|
||||
text.insertData(3, 'Z');
|
||||
// "abcZdef" — start unchanged, end shifts by 1
|
||||
testing.expectEqual(2, range.startOffset);
|
||||
testing.expectEqual(6, range.endOffset);
|
||||
}
|
||||
</script>
|
||||
|
||||
<script id=insertData_after_range>
|
||||
{
|
||||
const text = document.createTextNode('abcdef');
|
||||
const div = document.createElement('div');
|
||||
div.appendChild(text);
|
||||
|
||||
const range = document.createRange();
|
||||
range.setStart(text, 2);
|
||||
range.setEnd(text, 5);
|
||||
|
||||
// Insert after range end — no change
|
||||
text.insertData(5, 'ZZ');
|
||||
testing.expectEqual(2, range.startOffset);
|
||||
testing.expectEqual(5, range.endOffset);
|
||||
}
|
||||
</script>
|
||||
|
||||
<script id=deleteData_before_range>
|
||||
{
|
||||
const text = document.createTextNode('abcdef');
|
||||
const div = document.createElement('div');
|
||||
div.appendChild(text);
|
||||
|
||||
const range = document.createRange();
|
||||
range.setStart(text, 3);
|
||||
range.setEnd(text, 5);
|
||||
// range covers "de"
|
||||
|
||||
// Delete "ab" (offset 0, count 2) — before range
|
||||
text.deleteData(0, 2);
|
||||
// "cdef" — range shifts left by 2
|
||||
testing.expectEqual(1, range.startOffset);
|
||||
testing.expectEqual(3, range.endOffset);
|
||||
}
|
||||
</script>
|
||||
|
||||
<script id=deleteData_overlapping_range_start>
|
||||
{
|
||||
const text = document.createTextNode('abcdef');
|
||||
const div = document.createElement('div');
|
||||
div.appendChild(text);
|
||||
|
||||
const range = document.createRange();
|
||||
range.setStart(text, 2);
|
||||
range.setEnd(text, 5);
|
||||
|
||||
// Delete from offset 1, count 2 — overlaps range start
|
||||
text.deleteData(1, 2);
|
||||
// "adef" — start clamped to offset(1), end adjusted
|
||||
testing.expectEqual(1, range.startOffset);
|
||||
testing.expectEqual(3, range.endOffset);
|
||||
}
|
||||
</script>
|
||||
|
||||
<script id=deleteData_inside_range>
|
||||
{
|
||||
const text = document.createTextNode('abcdef');
|
||||
const div = document.createElement('div');
|
||||
div.appendChild(text);
|
||||
|
||||
const range = document.createRange();
|
||||
range.setStart(text, 1);
|
||||
range.setEnd(text, 5);
|
||||
|
||||
// Delete inside range: offset 2, count 2
|
||||
text.deleteData(2, 2);
|
||||
// "abef" — start unchanged, end shifts by -2
|
||||
testing.expectEqual(1, range.startOffset);
|
||||
testing.expectEqual(3, range.endOffset);
|
||||
}
|
||||
</script>
|
||||
|
||||
<script id=replaceData_adjusts_range>
|
||||
{
|
||||
const text = document.createTextNode('abcdef');
|
||||
const div = document.createElement('div');
|
||||
div.appendChild(text);
|
||||
|
||||
const range = document.createRange();
|
||||
range.setStart(text, 2);
|
||||
range.setEnd(text, 5);
|
||||
|
||||
// Replace "cd" (offset 2, count 2) with "XXXX" (4 chars)
|
||||
text.replaceData(2, 2, 'XXXX');
|
||||
// "abXXXXef" — start clamped to 2, end adjusted by (4-2)=+2
|
||||
testing.expectEqual(2, range.startOffset);
|
||||
testing.expectEqual(7, range.endOffset);
|
||||
}
|
||||
</script>
|
||||
|
||||
<script id=splitText_moves_range_to_new_node>
|
||||
{
|
||||
const text = document.createTextNode('abcdef');
|
||||
const div = document.createElement('div');
|
||||
div.appendChild(text);
|
||||
|
||||
const range = document.createRange();
|
||||
range.setStart(text, 4);
|
||||
range.setEnd(text, 6);
|
||||
// range covers "ef"
|
||||
|
||||
const newText = text.splitText(3);
|
||||
// text = "abc", newText = "def"
|
||||
// Range was at (text, 4)-(text, 6), with offset > 3:
|
||||
// start moves to (newText, 4-3=1), end moves to (newText, 6-3=3)
|
||||
testing.expectEqual(newText, range.startContainer);
|
||||
testing.expectEqual(1, range.startOffset);
|
||||
testing.expectEqual(newText, range.endContainer);
|
||||
testing.expectEqual(3, range.endOffset);
|
||||
}
|
||||
</script>
|
||||
|
||||
<script id=splitText_range_at_split_point>
|
||||
{
|
||||
const text = document.createTextNode('abcdef');
|
||||
const div = document.createElement('div');
|
||||
div.appendChild(text);
|
||||
|
||||
const range = document.createRange();
|
||||
range.setStart(text, 0);
|
||||
range.setEnd(text, 3);
|
||||
// range covers "abc"
|
||||
|
||||
const newText = text.splitText(3);
|
||||
// text = "abc", newText = "def"
|
||||
// Range end is at exactly the split offset — should stay on original node
|
||||
testing.expectEqual(text, range.startContainer);
|
||||
testing.expectEqual(0, range.startOffset);
|
||||
testing.expectEqual(text, range.endContainer);
|
||||
testing.expectEqual(3, range.endOffset);
|
||||
}
|
||||
</script>
|
||||
|
||||
<script id=appendChild_does_not_affect_range>
|
||||
{
|
||||
const div = document.createElement('div');
|
||||
const p1 = document.createElement('p');
|
||||
const p2 = document.createElement('p');
|
||||
div.appendChild(p1);
|
||||
div.appendChild(p2);
|
||||
|
||||
const range = document.createRange();
|
||||
range.setStart(div, 0);
|
||||
range.setEnd(div, 2);
|
||||
|
||||
// Appending should not affect range offsets (spec: no update for append)
|
||||
const p3 = document.createElement('p');
|
||||
div.appendChild(p3);
|
||||
testing.expectEqual(0, range.startOffset);
|
||||
testing.expectEqual(2, range.endOffset);
|
||||
}
|
||||
</script>
|
||||
|
||||
<script id=insertBefore_shifts_range_offsets>
|
||||
{
|
||||
const div = document.createElement('div');
|
||||
const p1 = document.createElement('p');
|
||||
const p2 = document.createElement('p');
|
||||
div.appendChild(p1);
|
||||
div.appendChild(p2);
|
||||
|
||||
const range = document.createRange();
|
||||
range.setStart(div, 1);
|
||||
range.setEnd(div, 2);
|
||||
|
||||
// Insert before p1 (index 0) — range offsets > 0 should increment
|
||||
const span = document.createElement('span');
|
||||
div.insertBefore(span, p1);
|
||||
testing.expectEqual(2, range.startOffset);
|
||||
testing.expectEqual(3, range.endOffset);
|
||||
}
|
||||
</script>
|
||||
|
||||
<script id=removeChild_shifts_range_offsets>
|
||||
{
|
||||
const div = document.createElement('div');
|
||||
const p1 = document.createElement('p');
|
||||
const p2 = document.createElement('p');
|
||||
const p3 = document.createElement('p');
|
||||
div.appendChild(p1);
|
||||
div.appendChild(p2);
|
||||
div.appendChild(p3);
|
||||
|
||||
const range = document.createRange();
|
||||
range.setStart(div, 1);
|
||||
range.setEnd(div, 3);
|
||||
|
||||
// Remove p1 (index 0) — offsets > 0 should decrement
|
||||
div.removeChild(p1);
|
||||
testing.expectEqual(0, range.startOffset);
|
||||
testing.expectEqual(2, range.endOffset);
|
||||
}
|
||||
</script>
|
||||
|
||||
<script id=removeChild_moves_range_from_descendant>
|
||||
{
|
||||
const div = document.createElement('div');
|
||||
const p = document.createElement('p');
|
||||
const text = document.createTextNode('hello');
|
||||
p.appendChild(text);
|
||||
div.appendChild(p);
|
||||
|
||||
const range = document.createRange();
|
||||
range.setStart(text, 2);
|
||||
range.setEnd(text, 4);
|
||||
|
||||
// Remove p (which contains text) — range should move to (div, index_of_p)
|
||||
div.removeChild(p);
|
||||
testing.expectEqual(div, range.startContainer);
|
||||
testing.expectEqual(0, range.startOffset);
|
||||
testing.expectEqual(div, range.endContainer);
|
||||
testing.expectEqual(0, range.endOffset);
|
||||
}
|
||||
</script>
|
||||
|
||||
<script id=multiple_ranges_updated>
|
||||
{
|
||||
const text = document.createTextNode('abcdefgh');
|
||||
const div = document.createElement('div');
|
||||
div.appendChild(text);
|
||||
|
||||
const range1 = document.createRange();
|
||||
range1.setStart(text, 1);
|
||||
range1.setEnd(text, 3);
|
||||
|
||||
const range2 = document.createRange();
|
||||
range2.setStart(text, 5);
|
||||
range2.setEnd(text, 7);
|
||||
|
||||
// Insert at offset 0 — both ranges should shift
|
||||
text.insertData(0, 'XX');
|
||||
testing.expectEqual(3, range1.startOffset);
|
||||
testing.expectEqual(5, range1.endOffset);
|
||||
testing.expectEqual(7, range2.startOffset);
|
||||
testing.expectEqual(9, range2.endOffset);
|
||||
}
|
||||
</script>
|
||||
|
||||
<script id=data_setter_updates_ranges>
|
||||
{
|
||||
const text = document.createTextNode('abcdef');
|
||||
const div = document.createElement('div');
|
||||
div.appendChild(text);
|
||||
|
||||
const range = document.createRange();
|
||||
range.setStart(text, 2);
|
||||
range.setEnd(text, 5);
|
||||
|
||||
// Setting data replaces all content — range collapses to offset 0
|
||||
text.data = 'new content';
|
||||
testing.expectEqual(text, range.startContainer);
|
||||
testing.expectEqual(0, range.startOffset);
|
||||
testing.expectEqual(text, range.endContainer);
|
||||
testing.expectEqual(0, range.endOffset);
|
||||
}
|
||||
</script>
|
||||
@@ -5,7 +5,7 @@
|
||||
<div id="host2"></div>
|
||||
<div id="host3"></div>
|
||||
|
||||
<!-- <script id="attachShadow_open">
|
||||
<script id="attachShadow_open">
|
||||
{
|
||||
const host = $('#host1');
|
||||
const shadow = host.attachShadow({ mode: 'open' });
|
||||
@@ -140,7 +140,7 @@
|
||||
shadow.replaceChildren('New content');
|
||||
testing.expectEqual('New content', shadow.innerHTML);
|
||||
}
|
||||
</script> -->
|
||||
</script>
|
||||
|
||||
<script id="getElementById">
|
||||
{
|
||||
@@ -154,3 +154,16 @@
|
||||
testing.expectEqual(null, shadow.getElementById('nonexistent'));
|
||||
}
|
||||
</script>
|
||||
|
||||
|
||||
<script id=adoptedStyleSheets>
|
||||
{
|
||||
const host = document.createElement('div');
|
||||
const shadow = host.attachShadow({ mode: 'open' });
|
||||
|
||||
const acss = shadow.adoptedStyleSheets;
|
||||
testing.expectEqual(0, acss.length);
|
||||
acss.push(new CSSStyleSheet());
|
||||
testing.expectEqual(1, acss.length);
|
||||
}
|
||||
</script>
|
||||
|
||||
33
src/browser/tests/support/history.html
Normal file
33
src/browser/tests/support/history.html
Normal file
@@ -0,0 +1,33 @@
|
||||
<!DOCTYPE html>
|
||||
<script src="../testing.js"></script>
|
||||
<script id=history>
|
||||
testing.expectEqual('auto', history.scrollRestoration);
|
||||
|
||||
history.scrollRestoration = 'manual';
|
||||
testing.expectEqual('manual', history.scrollRestoration);
|
||||
|
||||
history.scrollRestoration = 'auto';
|
||||
testing.expectEqual('auto', history.scrollRestoration);
|
||||
testing.expectEqual(null, history.state)
|
||||
|
||||
history.pushState({ testInProgress: true }, null, testing.BASE_URL + 'history_after_nav.skip.html');
|
||||
testing.expectEqual({ testInProgress: true }, history.state);
|
||||
|
||||
history.pushState({ testInProgress: false }, null, testing.ORIGIN + '/xhr/json');
|
||||
history.replaceState({ "new": "field", testComplete: true }, null);
|
||||
|
||||
let state = { "new": "field", testComplete: true };
|
||||
testing.expectEqual(state, history.state);
|
||||
|
||||
let popstateEventFired = false;
|
||||
let popstateEventState = null;
|
||||
|
||||
window.top.support_history_completed = true;
|
||||
window.addEventListener('popstate', (event) => {
|
||||
window.top.window.support_history_popstateEventFired = true;
|
||||
window.top.window.support_history_popstateEventState = event.state;
|
||||
});
|
||||
|
||||
history.back();
|
||||
</script>
|
||||
|
||||
@@ -99,8 +99,7 @@
|
||||
}
|
||||
}
|
||||
|
||||
// our test runner sets this to true
|
||||
const IS_TEST_RUNNER = window._lightpanda_skip_auto_assert === true;
|
||||
const IS_TEST_RUNNER = window.navigator.userAgent.startsWith("Lightpanda/");
|
||||
|
||||
window.testing = {
|
||||
fail: fail,
|
||||
@@ -114,17 +113,17 @@
|
||||
eventually: eventually,
|
||||
IS_TEST_RUNNER: IS_TEST_RUNNER,
|
||||
HOST: '127.0.0.1',
|
||||
ORIGIN: 'http://127.0.0.1:9582/',
|
||||
ORIGIN: 'http://127.0.0.1:9582',
|
||||
BASE_URL: 'http://127.0.0.1:9582/src/browser/tests/',
|
||||
};
|
||||
|
||||
if (!IS_TEST_RUNNER) {
|
||||
if (IS_TEST_RUNNER === false) {
|
||||
// The page is running in a different browser. Probably a developer making sure
|
||||
// a test is correct. There are a few tweaks we need to do to make this a
|
||||
// seemless, namely around adapting paths/urls.
|
||||
console.warn(`The page is not being executed in the test runner, certain behavior has been adjusted`);
|
||||
window.testing.HOST = location.hostname;
|
||||
window.testing.ORIGIN = location.origin + '/';
|
||||
window.testing.ORIGIN = location.origin;
|
||||
window.testing.BASE_URL = location.origin + '/src/browser/tests/';
|
||||
window.addEventListener('load', testing.assertOk);
|
||||
}
|
||||
|
||||
@@ -218,6 +218,106 @@
|
||||
testing.expectEqual('', url.password);
|
||||
}
|
||||
|
||||
{
|
||||
const url = new URL('https://example.com/path');
|
||||
url.username = 'newuser';
|
||||
testing.expectEqual('newuser', url.username);
|
||||
testing.expectEqual('https://newuser@example.com/path', url.href);
|
||||
}
|
||||
|
||||
{
|
||||
const url = new URL('https://olduser@example.com/path');
|
||||
url.username = 'newuser';
|
||||
testing.expectEqual('newuser', url.username);
|
||||
testing.expectEqual('https://newuser@example.com/path', url.href);
|
||||
}
|
||||
|
||||
{
|
||||
const url = new URL('https://olduser:pass@example.com/path');
|
||||
url.username = 'newuser';
|
||||
testing.expectEqual('newuser', url.username);
|
||||
testing.expectEqual('pass', url.password);
|
||||
testing.expectEqual('https://newuser:pass@example.com/path', url.href);
|
||||
}
|
||||
|
||||
{
|
||||
const url = new URL('https://user@example.com/path');
|
||||
url.password = 'secret';
|
||||
testing.expectEqual('user', url.username);
|
||||
testing.expectEqual('secret', url.password);
|
||||
testing.expectEqual('https://user:secret@example.com/path', url.href);
|
||||
}
|
||||
|
||||
{
|
||||
const url = new URL('https://user:oldpass@example.com/path');
|
||||
url.password = 'newpass';
|
||||
testing.expectEqual('user', url.username);
|
||||
testing.expectEqual('newpass', url.password);
|
||||
testing.expectEqual('https://user:newpass@example.com/path', url.href);
|
||||
}
|
||||
|
||||
{
|
||||
const url = new URL('https://user:pass@example.com/path');
|
||||
url.username = '';
|
||||
url.password = '';
|
||||
testing.expectEqual('', url.username);
|
||||
testing.expectEqual('', url.password);
|
||||
testing.expectEqual('https://example.com/path', url.href);
|
||||
}
|
||||
|
||||
{
|
||||
const url = new URL('https://example.com/path');
|
||||
url.username = 'user@domain';
|
||||
testing.expectEqual('user%40domain', url.username);
|
||||
testing.expectEqual('https://user%40domain@example.com/path', url.href);
|
||||
}
|
||||
|
||||
{
|
||||
const url = new URL('https://example.com/path');
|
||||
url.username = 'user:name';
|
||||
testing.expectEqual('user%3Aname', url.username);
|
||||
}
|
||||
|
||||
{
|
||||
const url = new URL('https://example.com/path');
|
||||
url.password = 'pass@word';
|
||||
testing.expectEqual('pass%40word', url.password);
|
||||
}
|
||||
|
||||
{
|
||||
const url = new URL('https://example.com/path');
|
||||
url.password = 'pass:word';
|
||||
testing.expectEqual('pass%3Aword', url.password);
|
||||
}
|
||||
|
||||
{
|
||||
const url = new URL('https://example.com/path');
|
||||
url.username = 'user/name';
|
||||
testing.expectEqual('user%2Fname', url.username);
|
||||
}
|
||||
|
||||
{
|
||||
const url = new URL('https://example.com/path');
|
||||
url.password = 'pass?word';
|
||||
testing.expectEqual('pass%3Fword', url.password);
|
||||
}
|
||||
|
||||
{
|
||||
const url = new URL('https://user%40domain:pass%3Aword@example.com/path');
|
||||
testing.expectEqual('user%40domain', url.username);
|
||||
testing.expectEqual('pass%3Aword', url.password);
|
||||
}
|
||||
|
||||
{
|
||||
const url = new URL('https://example.com:8080/path?a=b#hash');
|
||||
url.username = 'user';
|
||||
url.password = 'pass';
|
||||
testing.expectEqual('https://user:pass@example.com:8080/path?a=b#hash', url.href);
|
||||
testing.expectEqual('8080', url.port);
|
||||
testing.expectEqual('?a=b', url.search);
|
||||
testing.expectEqual('#hash', url.hash);
|
||||
}
|
||||
|
||||
{
|
||||
const url = new URL('http://user:pass@example.com:8080/path?query=1#hash');
|
||||
testing.expectEqual('http:', url.protocol);
|
||||
@@ -437,9 +537,9 @@
|
||||
{
|
||||
const url = new URL('https://example.com:8080/path');
|
||||
url.host = 'newhost.com';
|
||||
testing.expectEqual('https://newhost.com/path', url.href);
|
||||
testing.expectEqual('https://newhost.com:8080/path', url.href);
|
||||
testing.expectEqual('newhost.com', url.hostname);
|
||||
testing.expectEqual('', url.port);
|
||||
testing.expectEqual('8080', url.port);
|
||||
}
|
||||
|
||||
{
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
<!DOCTYPE html>
|
||||
<body onload=func1></body>
|
||||
<body onload="func1(event)"></body>
|
||||
<script src="../testing.js"></script>
|
||||
|
||||
<script id=bodyOnLoad1>
|
||||
@@ -14,4 +14,3 @@
|
||||
testing.expectEqual(1, called);
|
||||
});
|
||||
</script>
|
||||
|
||||
|
||||
@@ -1,15 +1,15 @@
|
||||
<!DOCTYPE html>
|
||||
<body onload="loaded()"></body>
|
||||
<body onload="loadEvent = event"></body>
|
||||
<script src="../testing.js"></script>
|
||||
|
||||
<script id=bodyOnLoad2>
|
||||
let called = 0;
|
||||
function loaded(e) {
|
||||
called += 1;
|
||||
}
|
||||
// Per spec, the handler is compiled as: function(event) { loadEvent = event }
|
||||
// Verify: handler fires, "event" parameter is a proper Event, and handler is a function.
|
||||
let loadEvent = null;
|
||||
|
||||
testing.eventually(() => {
|
||||
testing.expectEqual(1, called);
|
||||
testing.expectEqual("function", typeof document.body.onload);
|
||||
testing.expectTrue(loadEvent instanceof Event);
|
||||
testing.expectEqual("load", loadEvent.type);
|
||||
});
|
||||
</script>
|
||||
|
||||
|
||||
28
src/browser/tests/window/body_onload3.html
Normal file
28
src/browser/tests/window/body_onload3.html
Normal file
@@ -0,0 +1,28 @@
|
||||
<!DOCTYPE html>
|
||||
<body onload="called++"></body>
|
||||
<script src="../testing.js"></script>
|
||||
|
||||
<script id=bodyOnLoad3>
|
||||
// Per spec, the handler is compiled as: function(event) { called++ }
|
||||
// Verify: handler fires exactly once, and body.onload reflects to window.onload.
|
||||
let called = 0;
|
||||
|
||||
testing.eventually(() => {
|
||||
// The attribute handler should have fired exactly once.
|
||||
testing.expectEqual(1, called);
|
||||
|
||||
// body.onload is a Window-reflecting handler per spec.
|
||||
testing.expectEqual("function", typeof document.body.onload);
|
||||
testing.expectEqual(document.body.onload, window.onload);
|
||||
|
||||
// Setting body.onload via property replaces the attribute handler.
|
||||
let propertyCalled = false;
|
||||
document.body.onload = function() { propertyCalled = true; };
|
||||
testing.expectEqual(document.body.onload, window.onload);
|
||||
|
||||
// Setting onload to null removes the handler.
|
||||
document.body.onload = null;
|
||||
testing.expectEqual(null, document.body.onload);
|
||||
testing.expectEqual(null, window.onload);
|
||||
});
|
||||
</script>
|
||||
@@ -82,7 +82,7 @@
|
||||
testing.expectEqual('ceil', atob('Y2VpbA')); // 6 chars, len%4==2, needs '=='
|
||||
|
||||
// length % 4 == 1 must still throw
|
||||
testing.expectError('Error: InvalidCharacterError', () => {
|
||||
testing.expectError('InvalidCharacterError', () => {
|
||||
atob('Y');
|
||||
});
|
||||
</script>
|
||||
@@ -125,6 +125,143 @@
|
||||
testing.expectEqual(screen, window.screen);
|
||||
</script>
|
||||
|
||||
<script id=structuredClone>
|
||||
// Basic types
|
||||
testing.expectEqual(42, structuredClone(42));
|
||||
testing.expectEqual('hello', structuredClone('hello'));
|
||||
testing.expectEqual(true, structuredClone(true));
|
||||
testing.expectEqual(null, structuredClone(null));
|
||||
testing.expectEqual(undefined, structuredClone(undefined));
|
||||
|
||||
// Objects and arrays (these work with JSON too, but verify they're cloned)
|
||||
const obj = { a: 1, b: { c: 2 } };
|
||||
const clonedObj = structuredClone(obj);
|
||||
testing.expectEqual(1, clonedObj.a);
|
||||
testing.expectEqual(2, clonedObj.b.c);
|
||||
clonedObj.b.c = 999;
|
||||
testing.expectEqual(2, obj.b.c); // original unchanged
|
||||
|
||||
const arr = [1, [2, 3]];
|
||||
const clonedArr = structuredClone(arr);
|
||||
testing.expectEqual(1, clonedArr[0]);
|
||||
testing.expectEqual(2, clonedArr[1][0]);
|
||||
clonedArr[1][0] = 999;
|
||||
testing.expectEqual(2, arr[1][0]); // original unchanged
|
||||
|
||||
// Date - JSON would stringify to ISO string
|
||||
const date = new Date('2024-01-15T12:30:00Z');
|
||||
const clonedDate = structuredClone(date);
|
||||
testing.expectEqual(true, clonedDate instanceof Date);
|
||||
testing.expectEqual(date.getTime(), clonedDate.getTime());
|
||||
testing.expectEqual(date.toISOString(), clonedDate.toISOString());
|
||||
|
||||
// RegExp - JSON would stringify to {}
|
||||
const regex = /test\d+/gi;
|
||||
const clonedRegex = structuredClone(regex);
|
||||
testing.expectEqual(true, clonedRegex instanceof RegExp);
|
||||
testing.expectEqual(regex.source, clonedRegex.source);
|
||||
testing.expectEqual(regex.flags, clonedRegex.flags);
|
||||
testing.expectEqual(true, clonedRegex.test('test123'));
|
||||
|
||||
// Map - JSON can't handle
|
||||
const map = new Map([['a', 1], ['b', 2]]);
|
||||
const clonedMap = structuredClone(map);
|
||||
testing.expectEqual(true, clonedMap instanceof Map);
|
||||
testing.expectEqual(2, clonedMap.size);
|
||||
testing.expectEqual(1, clonedMap.get('a'));
|
||||
testing.expectEqual(2, clonedMap.get('b'));
|
||||
|
||||
// Set - JSON can't handle
|
||||
const set = new Set([1, 2, 3]);
|
||||
const clonedSet = structuredClone(set);
|
||||
testing.expectEqual(true, clonedSet instanceof Set);
|
||||
testing.expectEqual(3, clonedSet.size);
|
||||
testing.expectEqual(true, clonedSet.has(1));
|
||||
testing.expectEqual(true, clonedSet.has(2));
|
||||
testing.expectEqual(true, clonedSet.has(3));
|
||||
|
||||
// ArrayBuffer
|
||||
const buffer = new ArrayBuffer(8);
|
||||
const view = new Uint8Array(buffer);
|
||||
view[0] = 42;
|
||||
view[7] = 99;
|
||||
const clonedBuffer = structuredClone(buffer);
|
||||
testing.expectEqual(true, clonedBuffer instanceof ArrayBuffer);
|
||||
testing.expectEqual(8, clonedBuffer.byteLength);
|
||||
const clonedView = new Uint8Array(clonedBuffer);
|
||||
testing.expectEqual(42, clonedView[0]);
|
||||
testing.expectEqual(99, clonedView[7]);
|
||||
|
||||
// TypedArray
|
||||
const typedArr = new Uint32Array([100, 200, 300]);
|
||||
const clonedTypedArr = structuredClone(typedArr);
|
||||
testing.expectEqual(true, clonedTypedArr instanceof Uint32Array);
|
||||
testing.expectEqual(3, clonedTypedArr.length);
|
||||
testing.expectEqual(100, clonedTypedArr[0]);
|
||||
testing.expectEqual(200, clonedTypedArr[1]);
|
||||
testing.expectEqual(300, clonedTypedArr[2]);
|
||||
|
||||
// Special number values - JSON can't preserve these
|
||||
testing.expectEqual(true, Number.isNaN(structuredClone(NaN)));
|
||||
testing.expectEqual(Infinity, structuredClone(Infinity));
|
||||
testing.expectEqual(-Infinity, structuredClone(-Infinity));
|
||||
|
||||
// Object with undefined value - JSON would omit it
|
||||
const objWithUndef = { a: 1, b: undefined, c: 3 };
|
||||
const clonedObjWithUndef = structuredClone(objWithUndef);
|
||||
testing.expectEqual(1, clonedObjWithUndef.a);
|
||||
testing.expectEqual(undefined, clonedObjWithUndef.b);
|
||||
testing.expectEqual(true, 'b' in clonedObjWithUndef);
|
||||
testing.expectEqual(3, clonedObjWithUndef.c);
|
||||
|
||||
// Error objects
|
||||
const error = new Error('test error');
|
||||
const clonedError = structuredClone(error);
|
||||
testing.expectEqual(true, clonedError instanceof Error);
|
||||
testing.expectEqual('test error', clonedError.message);
|
||||
|
||||
// TypeError
|
||||
const typeError = new TypeError('type error');
|
||||
const clonedTypeError = structuredClone(typeError);
|
||||
testing.expectEqual(true, clonedTypeError instanceof TypeError);
|
||||
testing.expectEqual('type error', clonedTypeError.message);
|
||||
|
||||
// BigInt
|
||||
const bigInt = BigInt('9007199254740993');
|
||||
const clonedBigInt = structuredClone(bigInt);
|
||||
testing.expectEqual(bigInt, clonedBigInt);
|
||||
|
||||
// Circular references ARE supported by structuredClone (unlike JSON)
|
||||
const circular = { a: 1 };
|
||||
circular.self = circular;
|
||||
const clonedCircular = structuredClone(circular);
|
||||
testing.expectEqual(1, clonedCircular.a);
|
||||
testing.expectEqual(clonedCircular, clonedCircular.self); // circular ref preserved
|
||||
|
||||
// Functions cannot be cloned - should throw
|
||||
{
|
||||
let threw = false;
|
||||
try {
|
||||
structuredClone(() => {});
|
||||
} catch (err) {
|
||||
threw = true;
|
||||
// Just verify an error was thrown - V8's message format may vary
|
||||
}
|
||||
testing.expectEqual(true, threw);
|
||||
}
|
||||
|
||||
// Symbols cannot be cloned - should throw
|
||||
{
|
||||
let threw = false;
|
||||
try {
|
||||
structuredClone(Symbol('test'));
|
||||
} catch (err) {
|
||||
threw = true;
|
||||
}
|
||||
testing.expectEqual(true, threw);
|
||||
}
|
||||
</script>
|
||||
|
||||
<script id=unhandled_rejection>
|
||||
{
|
||||
let unhandledCalled = 0;
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user