From bf6dbedbe43c4706c1053e72d99a55f39134e864 Mon Sep 17 00:00:00 2001 From: Salman Muin Kayser Chishti <13schishti@gmail.com> Date: Sat, 14 Mar 2026 09:11:46 +0000 Subject: [PATCH 01/17] Upgrade GitHub Actions for Node 24 compatibility Signed-off-by: Salman Muin Kayser Chishti <13schishti@gmail.com> --- .github/actions/install/action.yml | 2 +- .github/workflows/build.yml | 6 +++--- .github/workflows/e2e-integration-test.yml | 8 ++++---- .github/workflows/e2e-test.yml | 24 +++++++++++----------- .github/workflows/wpt.yml | 12 +++++------ .github/workflows/zig-test.yml | 8 ++++---- 6 files changed, 30 insertions(+), 30 deletions(-) diff --git a/.github/actions/install/action.yml b/.github/actions/install/action.yml index 4c0a28f9..ac054c8f 100644 --- a/.github/actions/install/action.yml +++ b/.github/actions/install/action.yml @@ -46,7 +46,7 @@ runs: - name: Cache v8 id: cache-v8 - uses: actions/cache@v4 + uses: actions/cache@v5 env: cache-name: cache-v8 with: diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index d2bcde3b..7cb213a9 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -72,7 +72,7 @@ jobs: timeout-minutes: 20 steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v6 with: fetch-depth: 0 # fetch submodules recusively, to get zig-js-runtime submodules also. @@ -116,7 +116,7 @@ jobs: timeout-minutes: 20 steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v6 with: fetch-depth: 0 # fetch submodules recusively, to get zig-js-runtime submodules also. @@ -158,7 +158,7 @@ jobs: timeout-minutes: 20 steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v6 with: fetch-depth: 0 # fetch submodules recusively, to get zig-js-runtime submodules also. diff --git a/.github/workflows/e2e-integration-test.yml b/.github/workflows/e2e-integration-test.yml index 1a0217bb..dff1fb59 100644 --- a/.github/workflows/e2e-integration-test.yml +++ b/.github/workflows/e2e-integration-test.yml @@ -20,7 +20,7 @@ jobs: if: github.event.pull_request.draft == false steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v6 with: fetch-depth: 0 # fetch submodules recusively, to get zig-js-runtime submodules also. @@ -32,7 +32,7 @@ jobs: run: zig build -Dprebuilt_v8_path=v8/libc_v8.a -Doptimize=ReleaseFast -Dcpu=x86_64 -Dgit_commit=$(git rev-parse --short ${{ github.sha }}) - name: upload artifact - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v7 with: name: lightpanda-build-release path: | @@ -47,7 +47,7 @@ jobs: timeout-minutes: 15 steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v6 with: repository: 'lightpanda-io/demo' fetch-depth: 0 @@ -55,7 +55,7 @@ jobs: - run: npm install - name: download artifact - uses: actions/download-artifact@v4 + uses: actions/download-artifact@v8 with: name: lightpanda-build-release diff --git a/.github/workflows/e2e-test.yml b/.github/workflows/e2e-test.yml index 675dd36b..d0e945df 100644 --- a/.github/workflows/e2e-test.yml +++ b/.github/workflows/e2e-test.yml @@ -61,7 +61,7 @@ jobs: run: zig build -Dprebuilt_v8_path=v8/libc_v8.a -Doptimize=ReleaseFast -Dcpu=x86_64 -Dgit_commit=$(git rev-parse --short ${{ github.sha }}) - name: upload artifact - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v7 with: name: lightpanda-build-release path: | @@ -76,7 +76,7 @@ jobs: timeout-minutes: 15 steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v6 with: repository: 'lightpanda-io/demo' fetch-depth: 0 @@ -84,7 +84,7 @@ jobs: - run: npm install - name: download artifact - uses: actions/download-artifact@v4 + uses: actions/download-artifact@v8 with: name: lightpanda-build-release @@ -126,7 +126,7 @@ jobs: timeout-minutes: 15 steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v6 with: repository: 'lightpanda-io/demo' fetch-depth: 0 @@ -134,7 +134,7 @@ jobs: - run: npm install - name: download artifact - uses: actions/download-artifact@v4 + uses: actions/download-artifact@v8 with: name: lightpanda-build-release @@ -189,7 +189,7 @@ jobs: timeout-minutes: 5 steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v6 with: repository: 'lightpanda-io/demo' fetch-depth: 0 @@ -197,7 +197,7 @@ jobs: - run: echo "${{ secrets.WBA_PRIVATE_KEY_PEM }}" > private_key.pem - name: download artifact - uses: actions/download-artifact@v4 + uses: actions/download-artifact@v8 with: name: lightpanda-build-release @@ -239,7 +239,7 @@ jobs: timeout-minutes: 15 steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v6 with: repository: 'lightpanda-io/demo' fetch-depth: 0 @@ -247,7 +247,7 @@ jobs: - run: npm install - name: download artifact - uses: actions/download-artifact@v4 + uses: actions/download-artifact@v8 with: name: lightpanda-build-release @@ -333,7 +333,7 @@ jobs: echo "${{github.sha}}" > commit.txt - name: upload artifact - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v7 with: name: bench-results path: | @@ -361,7 +361,7 @@ jobs: steps: - name: download artifact - uses: actions/download-artifact@v4 + uses: actions/download-artifact@v8 with: name: bench-results @@ -379,7 +379,7 @@ jobs: steps: - name: download artifact - uses: actions/download-artifact@v4 + uses: actions/download-artifact@v8 with: name: lightpanda-build-release diff --git a/.github/workflows/wpt.yml b/.github/workflows/wpt.yml index 71d485d0..ef887612 100644 --- a/.github/workflows/wpt.yml +++ b/.github/workflows/wpt.yml @@ -35,7 +35,7 @@ jobs: run: zig build -Dprebuilt_v8_path=v8/libc_v8.a -Doptimize=ReleaseFast -Dcpu=x86_64 -Dgit_commit=$(git rev-parse --short ${{ github.sha }}) - name: upload artifact - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v7 with: name: lightpanda-build-release path: | @@ -59,7 +59,7 @@ jobs: CGO_ENABLED=0 go build - name: upload artifact - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v7 with: name: wptrunner path: | @@ -91,14 +91,14 @@ jobs: run: ./wpt manifest - name: download lightpanda release - uses: actions/download-artifact@v4 + uses: actions/download-artifact@v8 with: name: lightpanda-build-release - run: chmod a+x ./lightpanda - name: download wptrunner - uses: actions/download-artifact@v4 + uses: actions/download-artifact@v8 with: name: wptrunner @@ -116,7 +116,7 @@ jobs: echo "${{github.sha}}" > commit.txt - name: upload artifact - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v7 with: name: wpt-results path: | @@ -139,7 +139,7 @@ jobs: steps: - name: download artifact - uses: actions/download-artifact@v4 + uses: actions/download-artifact@v8 with: name: wpt-results diff --git a/.github/workflows/zig-test.yml b/.github/workflows/zig-test.yml index db2f362d..ca967c3a 100644 --- a/.github/workflows/zig-test.yml +++ b/.github/workflows/zig-test.yml @@ -44,7 +44,7 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v6 with: fetch-depth: 0 # fetch submodules recusively, to get zig-js-runtime submodules also. @@ -67,7 +67,7 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v6 with: fetch-depth: 0 # fetch submodules recusively, to get zig-js-runtime submodules also. @@ -83,7 +83,7 @@ jobs: echo "${{github.sha}}" > commit.txt - name: upload artifact - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v7 with: name: bench-results path: | @@ -109,7 +109,7 @@ jobs: steps: - name: download artifact - uses: actions/download-artifact@v4 + uses: actions/download-artifact@v8 with: name: bench-results From f754773bf6ae3d7627bd9683bba1e108447599d6 Mon Sep 17 00:00:00 2001 From: Karl Seguin Date: Sat, 14 Mar 2026 09:13:50 +0800 Subject: [PATCH 02/17] window.postMessage across frames Depends on https://github.com/lightpanda-io/zig-v8-fork/pull/160 Improves postMessage support, specifically for use across frames. This commit also addresses a few other issues (identified while implementing this). 1 - Since macrotasks can schedule more macrotasks, we need to check the time-to- next microtask after all microtasks have completed. 2 - frame's onload callback is triggered from the frame's context, but has to execute on the parents contet. --- build.zig.zon | 4 +- src/browser/Browser.zig | 15 ++++-- src/browser/Page.zig | 12 +++-- src/browser/Session.zig | 32 ++++++------ src/browser/js/Context.zig | 4 ++ src/browser/js/Env.zig | 19 ++++--- src/browser/js/Scheduler.zig | 28 ++++++---- src/browser/tests/element/html/anchor.html | 2 +- src/browser/tests/element/html/form.html | 2 +- src/browser/tests/element/html/image.html | 2 +- src/browser/tests/element/html/link.html | 2 +- src/browser/tests/frames/post_message.html | 25 +++++++++ .../frames/support/message_receiver.html | 9 ++++ src/browser/tests/testing.js | 4 +- src/browser/webapi/Window.zig | 51 +++++++++++++------ 15 files changed, 145 insertions(+), 66 deletions(-) create mode 100644 src/browser/tests/frames/post_message.html create mode 100644 src/browser/tests/frames/support/message_receiver.html diff --git a/build.zig.zon b/build.zig.zon index 9a28408b..33fc0a09 100644 --- a/build.zig.zon +++ b/build.zig.zon @@ -5,8 +5,8 @@ .minimum_zig_version = "0.15.2", .dependencies = .{ .v8 = .{ - .url = "https://github.com/lightpanda-io/zig-v8-fork/archive/refs/tags/v0.3.3.tar.gz", - .hash = "v8-0.0.0-xddH6yx3BAAGD9jSoq_ttt_bk9MectTU44s_HZxxE5LD", + .url = "https://github.com/lightpanda-io/zig-v8-fork/archive/34cb5f50227047c6cc1b2af73dad958c267f0a83.tar.gz", + .hash = "v8-0.0.0-xddH6_F3BAAiFvKY6R1H-gkuQlk19BkDQ0--uZuTrSup", }, // .v8 = .{ .path = "../zig-v8-fork" }, .brotli = .{ diff --git a/src/browser/Browser.zig b/src/browser/Browser.zig index 8f8c4aa2..50a7c037 100644 --- a/src/browser/Browser.zig +++ b/src/browser/Browser.zig @@ -91,25 +91,32 @@ pub fn runMicrotasks(self: *Browser) void { self.env.runMicrotasks(); } -pub fn runMacrotasks(self: *Browser) !?u64 { +pub fn runMacrotasks(self: *Browser) !void { const env = &self.env; - const time_to_next = try self.env.runMacrotasks(); + try self.env.runMacrotasks(); env.pumpMessageLoop(); // either of the above could have queued more microtasks env.runMicrotasks(); - - return time_to_next; } pub fn hasBackgroundTasks(self: *Browser) bool { return self.env.hasBackgroundTasks(); } + pub fn waitForBackgroundTasks(self: *Browser) void { self.env.waitForBackgroundTasks(); } +pub fn msToNextMacrotask(self: *Browser) ?u64 { + return self.env.msToNextMacrotask(); +} + +pub fn msTo(self: *Browser) bool { + return self.env.hasBackgroundTasks(); +} + pub fn runIdleTasks(self: *const Browser) void { self.env.runIdleTasks(); } diff --git a/src/browser/Page.zig b/src/browser/Page.zig index cb62cb31..b290b862 100644 --- a/src/browser/Page.zig +++ b/src/browser/Page.zig @@ -709,11 +709,14 @@ pub fn scriptsCompletedLoading(self: *Page) void { } pub fn iframeCompletedLoading(self: *Page, iframe: *IFrame) void { - blk: { - var ls: JS.Local.Scope = undefined; - self.js.localScope(&ls); - defer ls.deinit(); + var ls: JS.Local.Scope = undefined; + self.js.localScope(&ls); + defer ls.deinit(); + const entered = self.js.enter(&ls.handle_scope); + defer entered.exit(); + + blk: { const event = Event.initTrusted(comptime .wrap("load"), .{}, self) catch |err| { log.err(.page, "iframe event init", .{ .err = err, .url = iframe._src }); break :blk; @@ -722,6 +725,7 @@ pub fn iframeCompletedLoading(self: *Page, iframe: *IFrame) void { log.warn(.js, "iframe onload", .{ .err = err, .url = iframe._src }); }; } + self.pendingLoadCompleted(); } diff --git a/src/browser/Session.zig b/src/browser/Session.zig index fea56a87..4f605ec0 100644 --- a/src/browser/Session.zig +++ b/src/browser/Session.zig @@ -401,7 +401,7 @@ fn _wait(self: *Session, page: *Page, wait_ms: u32) !WaitResult { // scheduler.run could trigger new http transfers, so do not // store http_client.active BEFORE this call and then use // it AFTER. - const ms_to_next_task = try browser.runMacrotasks(); + try browser.runMacrotasks(); // Each call to this runs scheduled load events. try page.dispatchLoad(); @@ -423,16 +423,16 @@ fn _wait(self: *Session, page: *Page, wait_ms: u32) !WaitResult { std.debug.assert(http_client.intercepted == 0); } - var ms: u64 = ms_to_next_task orelse blk: { - if (wait_ms - ms_remaining < 100) { - if (comptime builtin.is_test) { - return .done; - } - // Look, we want to exit ASAP, but we don't want - // to exit so fast that we've run none of the - // background jobs. - break :blk 50; - } + var ms = blk: { + // if (wait_ms - ms_remaining < 100) { + // if (comptime builtin.is_test) { + // return .done; + // } + // // Look, we want to exit ASAP, but we don't want + // // to exit so fast that we've run none of the + // // background jobs. + // break :blk 50; + // } if (browser.hasBackgroundTasks()) { // _we_ have nothing to run, but v8 is working on @@ -441,9 +441,7 @@ fn _wait(self: *Session, page: *Page, wait_ms: u32) !WaitResult { break :blk 20; } - // No http transfers, no cdp extra socket, no - // scheduled tasks, we're done. - return .done; + break :blk browser.msToNextMacrotask() orelse return .done; }; if (ms > ms_remaining) { @@ -470,9 +468,9 @@ fn _wait(self: *Session, page: *Page, wait_ms: u32) !WaitResult { // We're here because we either have active HTTP // connections, or exit_when_done == false (aka, there's // an cdp_socket registered with the http client). - // We should continue to run lowPriority tasks, so we - // minimize how long we'll poll for network I/O. - var ms_to_wait = @min(200, ms_to_next_task orelse 200); + // We should continue to run tasks, so we minimize how long + // we'll poll for network I/O. + var ms_to_wait = @min(200, browser.msToNextMacrotask() orelse 200); if (ms_to_wait > 10 and browser.hasBackgroundTasks()) { // if we have background tasks, we don't want to wait too // long for a message from the client. We want to go back diff --git a/src/browser/js/Context.zig b/src/browser/js/Context.zig index 5c58c5cb..a972b6c7 100644 --- a/src/browser/js/Context.zig +++ b/src/browser/js/Context.zig @@ -252,6 +252,10 @@ pub fn toLocal(self: *Context, global: anytype) js.Local.ToLocalReturnType(@Type return l.toLocal(global); } +pub fn getIncumbent(self: *Context) *Page { + return fromC(v8.v8__Isolate__GetIncumbentContext(self.env.isolate.handle).?).page; +} + pub fn stringToPersistedFunction( self: *Context, function_body: []const u8, diff --git a/src/browser/js/Env.zig b/src/browser/js/Env.zig index ba2e3e5a..1ac9e6b3 100644 --- a/src/browser/js/Env.zig +++ b/src/browser/js/Env.zig @@ -382,8 +382,7 @@ pub fn runMicrotasks(self: *Env) void { } } -pub fn runMacrotasks(self: *Env) !?u64 { - var ms_to_next_task: ?u64 = null; +pub fn runMacrotasks(self: *Env) !void { for (self.contexts[0..self.context_count]) |ctx| { if (comptime builtin.is_test == false) { // I hate this comptime check as much as you do. But we have tests @@ -398,13 +397,17 @@ pub fn runMacrotasks(self: *Env) !?u64 { var hs: js.HandleScope = undefined; const entered = ctx.enter(&hs); defer entered.exit(); - - const ms = (try ctx.scheduler.run()) orelse continue; - if (ms_to_next_task == null or ms < ms_to_next_task.?) { - ms_to_next_task = ms; - } + try ctx.scheduler.run(); } - return ms_to_next_task; +} + +pub fn msToNextMacrotask(self: *Env) ?u64 { + var next_task: u64 = std.math.maxInt(u64); + for (self.contexts[0..self.context_count]) |ctx| { + const candidate = ctx.scheduler.msToNextHigh() orelse continue; + next_task = @min(candidate, next_task); + } + return if (next_task == std.math.maxInt(u64)) null else next_task; } pub fn pumpMessageLoop(self: *const Env) void { diff --git a/src/browser/js/Scheduler.zig b/src/browser/js/Scheduler.zig index e667a872..322351f3 100644 --- a/src/browser/js/Scheduler.zig +++ b/src/browser/js/Scheduler.zig @@ -74,9 +74,10 @@ pub fn add(self: *Scheduler, ctx: *anyopaque, cb: Callback, run_in_ms: u32, opts }); } -pub fn run(self: *Scheduler) !?u64 { - _ = try self.runQueue(&self.low_priority); - return self.runQueue(&self.high_priority); +pub fn run(self: *Scheduler) !void { + const now = milliTimestamp(.monotonic); + try self.runQueue(&self.low_priority, now); + try self.runQueue(&self.high_priority, now); } pub fn hasReadyTasks(self: *Scheduler) bool { @@ -84,16 +85,23 @@ pub fn hasReadyTasks(self: *Scheduler) bool { return queueuHasReadyTask(&self.low_priority, now) or queueuHasReadyTask(&self.high_priority, now); } -fn runQueue(self: *Scheduler, queue: *Queue) !?u64 { - if (queue.count() == 0) { - return null; - } - +pub fn msToNextHigh(self: *Scheduler) ?u64 { + const task = self.high_priority.peek() orelse return null; const now = milliTimestamp(.monotonic); + if (task.run_at <= now) { + return 0; + } + return @intCast(task.run_at - now); +} + +fn runQueue(self: *Scheduler, queue: *Queue, now: u64) !void { + if (queue.count() == 0) { + return; + } while (queue.peek()) |*task_| { if (task_.run_at > now) { - return @intCast(task_.run_at - now); + return; } var task = queue.remove(); if (comptime IS_DEBUG) { @@ -114,7 +122,7 @@ fn runQueue(self: *Scheduler, queue: *Queue) !?u64 { try self.low_priority.add(task); } } - return null; + return; } fn queueuHasReadyTask(queue: *Queue, now: u64) bool { diff --git a/src/browser/tests/element/html/anchor.html b/src/browser/tests/element/html/anchor.html index 0522163f..74bf486c 100644 --- a/src/browser/tests/element/html/anchor.html +++ b/src/browser/tests/element/html/anchor.html @@ -12,7 +12,7 @@ testing.expectEqual('', $('#a0').href); testing.expectEqual(testing.BASE_URL + 'element/anchor1.html', $('#a1').href); - testing.expectEqual(testing.ORIGIN + 'hello/world/anchor2.html', $('#a2').href); + testing.expectEqual(testing.ORIGIN + '/hello/world/anchor2.html', $('#a2').href); testing.expectEqual('https://www.openmymind.net/Elixirs-With-Statement/', $('#a3').href); testing.expectEqual(testing.BASE_URL + 'element/html/foo', $('#link').href); diff --git a/src/browser/tests/element/html/form.html b/src/browser/tests/element/html/form.html index f62cb221..17743135 100644 --- a/src/browser/tests/element/html/form.html +++ b/src/browser/tests/element/html/form.html @@ -32,7 +32,7 @@ testing.expectEqual(testing.BASE_URL + 'element/html/hello', form.action) form.action = '/hello'; - testing.expectEqual(testing.ORIGIN + 'hello', form.action) + testing.expectEqual(testing.ORIGIN + '/hello', form.action) form.action = 'https://lightpanda.io/hello'; testing.expectEqual('https://lightpanda.io/hello', form.action) diff --git a/src/browser/tests/element/html/image.html b/src/browser/tests/element/html/image.html index 92cd947d..baa09918 100644 --- a/src/browser/tests/element/html/image.html +++ b/src/browser/tests/element/html/image.html @@ -37,7 +37,7 @@ testing.expectEqual('test.png', img.getAttribute('src')); img.src = '/absolute/path.png'; - testing.expectEqual(testing.ORIGIN + 'absolute/path.png', img.src); + testing.expectEqual(testing.ORIGIN + '/absolute/path.png', img.src); testing.expectEqual('/absolute/path.png', img.getAttribute('src')); img.src = 'https://example.com/image.png'; diff --git a/src/browser/tests/element/html/link.html b/src/browser/tests/element/html/link.html index bed5e6ab..4d967e37 100644 --- a/src/browser/tests/element/html/link.html +++ b/src/browser/tests/element/html/link.html @@ -8,7 +8,7 @@ testing.expectEqual('https://lightpanda.io/opensource-browser/15', l2.href); l2.href = '/over/9000'; - testing.expectEqual(testing.ORIGIN + 'over/9000', l2.href); + testing.expectEqual(testing.ORIGIN + '/over/9000', l2.href); l2.crossOrigin = 'nope'; testing.expectEqual('anonymous', l2.crossOrigin); diff --git a/src/browser/tests/frames/post_message.html b/src/browser/tests/frames/post_message.html new file mode 100644 index 00000000..6d206b74 --- /dev/null +++ b/src/browser/tests/frames/post_message.html @@ -0,0 +1,25 @@ + + + + + + diff --git a/src/browser/tests/frames/support/message_receiver.html b/src/browser/tests/frames/support/message_receiver.html new file mode 100644 index 00000000..55612a7c --- /dev/null +++ b/src/browser/tests/frames/support/message_receiver.html @@ -0,0 +1,9 @@ + + diff --git a/src/browser/tests/testing.js b/src/browser/tests/testing.js index 987ba042..01bb19db 100644 --- a/src/browser/tests/testing.js +++ b/src/browser/tests/testing.js @@ -114,7 +114,7 @@ eventually: eventually, IS_TEST_RUNNER: IS_TEST_RUNNER, HOST: '127.0.0.1', - ORIGIN: 'http://127.0.0.1:9582/', + ORIGIN: 'http://127.0.0.1:9582', BASE_URL: 'http://127.0.0.1:9582/src/browser/tests/', }; @@ -124,7 +124,7 @@ // seemless, namely around adapting paths/urls. console.warn(`The page is not being executed in the test runner, certain behavior has been adjusted`); window.testing.HOST = location.hostname; - window.testing.ORIGIN = location.origin + '/'; + window.testing.ORIGIN = location.origin; window.testing.BASE_URL = location.origin + '/src/browser/tests/'; window.addEventListener('load', testing.assertOk); } diff --git a/src/browser/webapi/Window.zig b/src/browser/webapi/Window.zig index 0f288398..099cad65 100644 --- a/src/browser/webapi/Window.zig +++ b/src/browser/webapi/Window.zig @@ -66,6 +66,7 @@ _on_load: ?js.Function.Global = null, _on_pageshow: ?js.Function.Global = null, _on_popstate: ?js.Function.Global = null, _on_error: ?js.Function.Global = null, +_on_message: ?js.Function.Global = null, _on_unhandled_rejection: ?js.Function.Global = null, // TODO: invoke on error _location: *Location, _timer_id: u30 = 0, @@ -208,6 +209,14 @@ pub fn setOnError(self: *Window, setter: ?FunctionSetter) void { self._on_error = getFunctionFromSetter(setter); } +pub fn getOnMessage(self: *const Window) ?js.Function.Global { + return self._on_message; +} + +pub fn setOnMessage(self: *Window, setter: ?FunctionSetter) void { + self._on_message = getFunctionFromSetter(setter); +} + pub fn getOnUnhandledRejection(self: *const Window) ?js.Function.Global { return self._on_unhandled_rejection; } @@ -369,19 +378,26 @@ pub fn postMessage(self: *Window, message: js.Value.Temp, target_origin: ?[]cons // In a full implementation, we would validate the origin _ = target_origin; - // postMessage queues a task (not a microtask), so use the scheduler - const arena = try page.getArena(.{ .debug = "Window.schedule" }); - errdefer page.releaseArena(arena); + // self = the window that will get the message + // page = the context calling postMessage + const target_page = self._page; + const source_window = target_page.js.getIncumbent().window; - const origin = try self._location.getOrigin(page); + const arena = try target_page.getArena(.{ .debug = "Window.postMessage" }); + errdefer target_page.releaseArena(arena); + + // Origin should be the source window's origin (where the message came from) + const origin = try source_window._location.getOrigin(page); const callback = try arena.create(PostMessageCallback); callback.* = .{ - .page = page, .arena = arena, .message = message, + .page = target_page, + .source = source_window, .origin = try arena.dupe(u8, origin), }; - try page.js.scheduler.add(callback, PostMessageCallback.run, 0, .{ + + try target_page.js.scheduler.add(callback, PostMessageCallback.run, 0, .{ .name = "postMessage", .low_priority = false, .finalizer = PostMessageCallback.cancelled, @@ -702,6 +718,7 @@ const ScheduleCallback = struct { const PostMessageCallback = struct { page: *Page, + source: *Window, arena: Allocator, origin: []const u8, message: js.Value.Temp, @@ -712,7 +729,7 @@ const PostMessageCallback = struct { fn cancelled(ctx: *anyopaque) void { const self: *PostMessageCallback = @ptrCast(@alignCast(ctx)); - self.page.releaseArena(self.arena); + self.deinit(); } fn run(ctx: *anyopaque) !?u32 { @@ -722,14 +739,17 @@ const PostMessageCallback = struct { const page = self.page; const window = page.window; - const event = (try MessageEvent.initTrusted(comptime .wrap("message"), .{ - .data = self.message, - .origin = self.origin, - .source = window, - .bubbles = false, - .cancelable = false, - }, page)).asEvent(); - try page._event_manager.dispatch(window.asEventTarget(), event); + const event_target = window.asEventTarget(); + if (page._event_manager.hasDirectListeners(event_target, "message", window._on_message)) { + const event = (try MessageEvent.initTrusted(comptime .wrap("message"), .{ + .data = self.message, + .origin = self.origin, + .source = self.source, + .bubbles = false, + .cancelable = false, + }, page)).asEvent(); + try page._event_manager.dispatchDirect(event_target, event, window._on_message, .{ .context = "window.postMessage" }); + } return null; } @@ -783,6 +803,7 @@ pub const JsApi = struct { pub const onpageshow = bridge.accessor(Window.getOnPageShow, Window.setOnPageShow, .{}); pub const onpopstate = bridge.accessor(Window.getOnPopState, Window.setOnPopState, .{}); pub const onerror = bridge.accessor(Window.getOnError, Window.setOnError, .{}); + pub const onmessage = bridge.accessor(Window.getOnMessage, Window.setOnMessage, .{}); pub const onunhandledrejection = bridge.accessor(Window.getOnUnhandledRejection, Window.setOnUnhandledRejection, .{}); pub const fetch = bridge.function(Window.fetch, .{}); pub const queueMicrotask = bridge.function(Window.queueMicrotask, .{}); From 3dcdaa0a9bf7c4a138f70dab4672a48d79276c5a Mon Sep 17 00:00:00 2001 From: Matt Van Horn <455140+mvanhorn@users.noreply.github.com> Date: Sat, 14 Mar 2026 14:15:40 -0700 Subject: [PATCH 03/17] Implement charset detection from first 1024 bytes of HTML Per the HTML spec, browsers should detect charset from tags in the first 1024 bytes of a document when the HTTP Content-Type header doesn't specify one. Adds Mime.prescanCharset() which scans for: - - Integrates into the page loading flow to set the detected charset on the Mime when no explicit HTTP charset was provided. Fixes #531 --- src/browser/Mime.zig | 160 +++++++++++++++++++++++++++++++++++++++++++ src/browser/Page.zig | 14 +++- 2 files changed, 173 insertions(+), 1 deletion(-) diff --git a/src/browser/Mime.zig b/src/browser/Mime.zig index 43ca3632..beef2177 100644 --- a/src/browser/Mime.zig +++ b/src/browser/Mime.zig @@ -168,6 +168,137 @@ pub fn parse(input: []u8) !Mime { }; } +/// Prescan the first 1024 bytes of an HTML document for a charset declaration. +/// Looks for `` and ``. +/// Returns the charset value or null if none found. +/// See: https://www.w3.org/International/questions/qa-html-encoding-declarations +pub fn prescanCharset(html: []const u8) ?[]const u8 { + const limit = @min(html.len, 1024); + const data = html[0..limit]; + + // Scan for = data.len) return null; + + // Check for "meta" (case-insensitive) + if (pos + 4 >= data.len) return null; + var tag_buf: [4]u8 = undefined; + _ = std.ascii.lowerString(&tag_buf, data[pos..][0..4]); + if (!std.mem.eql(u8, &tag_buf, "meta")) { + continue; + } + pos += 4; + + // Must be followed by whitespace or end of tag + if (pos >= data.len) return null; + if (data[pos] != ' ' and data[pos] != '\t' and data[pos] != '\n' and + data[pos] != '\r' and data[pos] != '/') + { + continue; + } + + // Scan attributes within this meta tag + const tag_end = std.mem.indexOfScalarPos(u8, data, pos, '>') orelse return null; + const attrs = data[pos..tag_end]; + + // Look for charset= attribute directly + if (findAttrValue(attrs, "charset")) |charset| { + if (charset.len > 0 and charset.len <= 40) return charset; + } + + // Look for http-equiv="content-type" with content="...;charset=X" + if (findAttrValue(attrs, "http-equiv")) |he| { + if (asciiEqlIgnoreCase(he, "content-type")) { + if (findAttrValue(attrs, "content")) |content| { + if (extractCharsetFromContentType(content)) |charset| { + return charset; + } + } + } + } + + pos = tag_end + 1; + } + return null; +} + +fn findAttrValue(attrs: []const u8, name: []const u8) ?[]const u8 { + var pos: usize = 0; + while (pos < attrs.len) { + // Skip whitespace + while (pos < attrs.len and (attrs[pos] == ' ' or attrs[pos] == '\t' or + attrs[pos] == '\n' or attrs[pos] == '\r')) + { + pos += 1; + } + if (pos >= attrs.len) return null; + + // Read attribute name + const attr_start = pos; + while (pos < attrs.len and attrs[pos] != '=' and attrs[pos] != ' ' and + attrs[pos] != '\t' and attrs[pos] != '>' and attrs[pos] != '/') + { + pos += 1; + } + const attr_name = attrs[attr_start..pos]; + + // Skip whitespace around = + while (pos < attrs.len and (attrs[pos] == ' ' or attrs[pos] == '\t')) pos += 1; + if (pos >= attrs.len or attrs[pos] != '=') continue; + pos += 1; // skip '=' + while (pos < attrs.len and (attrs[pos] == ' ' or attrs[pos] == '\t')) pos += 1; + if (pos >= attrs.len) return null; + + // Read attribute value + const value = blk: { + if (attrs[pos] == '"' or attrs[pos] == '\'') { + const quote = attrs[pos]; + pos += 1; + const val_start = pos; + while (pos < attrs.len and attrs[pos] != quote) pos += 1; + const val = attrs[val_start..pos]; + if (pos < attrs.len) pos += 1; // skip closing quote + break :blk val; + } else { + const val_start = pos; + while (pos < attrs.len and attrs[pos] != ' ' and attrs[pos] != '\t' and + attrs[pos] != '>' and attrs[pos] != '/') + { + pos += 1; + } + break :blk attrs[val_start..pos]; + } + }; + + if (asciiEqlIgnoreCase(attr_name, name)) return value; + } + return null; +} + +fn extractCharsetFromContentType(content: []const u8) ?[]const u8 { + var it = std.mem.splitScalar(u8, content, ';'); + while (it.next()) |part| { + const trimmed = std.mem.trimLeft(u8, part, &.{ ' ', '\t' }); + if (trimmed.len > 8 and asciiEqlIgnoreCase(trimmed[0..8], "charset=")) { + const val = std.mem.trim(u8, trimmed[8..], &.{ ' ', '\t', '"', '\'' }); + if (val.len > 0 and val.len <= 40) return val; + } + } + return null; +} + +fn asciiEqlIgnoreCase(a: []const u8, b: []const u8) bool { + if (a.len != b.len) return false; + for (a, b) |ca, cb| { + if (std.ascii.toLower(ca) != std.ascii.toLower(cb)) return false; + } + return true; +} + pub fn sniff(body: []const u8) ?Mime { // 0x0C is form feed const content = std.mem.trimLeft(u8, body, &.{ ' ', '\t', '\n', '\r', 0x0C }); @@ -576,3 +707,32 @@ fn expect(expected: Expectation, input: []const u8) !void { try testing.expectEqual(m.charsetStringZ(), actual.charsetStringZ()); } } + +test "Mime: prescanCharset" { + // + try testing.expectEqual("utf-8", Mime.prescanCharset("").?); + try testing.expectEqual("iso-8859-1", Mime.prescanCharset("").?); + try testing.expectEqual("shift_jis", Mime.prescanCharset("").?); + + // Case-insensitive tag matching + try testing.expectEqual("utf-8", Mime.prescanCharset("").?); + try testing.expectEqual("utf-8", Mime.prescanCharset("").?); + + // + try testing.expectEqual( + "iso-8859-1", + Mime.prescanCharset("").?, + ); + + // No charset found + try testing.expectEqual(@as(?[]const u8, null), Mime.prescanCharset("Test")); + try testing.expectEqual(@as(?[]const u8, null), Mime.prescanCharset("")); + try testing.expectEqual(@as(?[]const u8, null), Mime.prescanCharset("no html here")); + + // Charset after 1024 bytes should not be found + var long_html: [1100]u8 = undefined; + @memset(&long_html, ' '); + const suffix = ""; + @memcpy(long_html[1050 .. 1050 + suffix.len], suffix); + try testing.expectEqual(@as(?[]const u8, null), Mime.prescanCharset(&long_html)); +} diff --git a/src/browser/Page.zig b/src/browser/Page.zig index cb62cb31..ab291bc6 100644 --- a/src/browser/Page.zig +++ b/src/browser/Page.zig @@ -848,13 +848,25 @@ fn pageDataCallback(transfer: *HttpClient.Transfer, data: []const u8) !void { if (self._parse_state == .pre) { // we lazily do this, because we might need the first chunk of data // to sniff the content type - const mime: Mime = blk: { + var mime: Mime = blk: { if (transfer.response_header.?.contentType()) |ct| { break :blk try Mime.parse(ct); } break :blk Mime.sniff(data); } orelse .unknown; + // If the HTTP header didn't specify a charset and this is HTML, + // prescan the first 1024 bytes for a declaration. + if (mime.content_type == .text_html and std.mem.eql(u8, mime.charsetString(), "UTF-8")) { + if (Mime.prescanCharset(data)) |charset| { + if (charset.len <= 40) { + @memcpy(mime.charset[0..charset.len], charset); + mime.charset[charset.len] = 0; + mime.charset_len = charset.len; + } + } + } + if (comptime IS_DEBUG) { log.debug(.page, "navigate first chunk", .{ .content_type = mime.content_type, From 65627c129658fd406ac874bc7b7da30d7154b2a0 Mon Sep 17 00:00:00 2001 From: Karl Seguin Date: Sun, 15 Mar 2026 09:18:13 +0800 Subject: [PATCH 04/17] Move ScriptManager to ArenaPool. This removes the BufferPool. The BufferPool was per-ScriptManager and only usable for the response. The ArenaPool is shared across pages and threads, so can provide much better re-use. Furthermore, the ArenaPool provides an Allocator, so that a Script's URL or inline content can be owned by the arena/ script itself, rather than the page arena. --- src/browser/HttpClient.zig | 54 +++++---- src/browser/ScriptManager.zig | 220 ++++++++++++---------------------- src/browser/js/Context.zig | 6 +- 3 files changed, 108 insertions(+), 172 deletions(-) diff --git a/src/browser/HttpClient.zig b/src/browser/HttpClient.zig index 1e74c046..136b578b 100644 --- a/src/browser/HttpClient.zig +++ b/src/browser/HttpClient.zig @@ -110,6 +110,8 @@ use_proxy: bool, // Current TLS verification state, applied per-connection in makeRequest. tls_verify: bool = true, +obey_robots: bool, + cdp_client: ?CDPClient = null, // libcurl can monitor arbitrary sockets, this lets us use libcurl to poll @@ -154,6 +156,7 @@ pub fn init(allocator: Allocator, network: *Network) !*Client { .http_proxy = http_proxy, .use_proxy = http_proxy != null, .tls_verify = network.config.tlsVerifyHost(), + .obey_robots = network.config.obeyRobots(), .transfer_pool = transfer_pool, }; @@ -257,34 +260,33 @@ pub fn tick(self: *Client, timeout_ms: u32) !PerformStatus { } pub fn request(self: *Client, req: Request) !void { - if (self.network.config.obeyRobots()) { - const robots_url = try URL.getRobotsUrl(self.allocator, req.url); - errdefer self.allocator.free(robots_url); - - // If we have this robots cached, we can take a fast path. - if (self.network.robot_store.get(robots_url)) |robot_entry| { - defer self.allocator.free(robots_url); - - switch (robot_entry) { - // If we have a found robots entry, we check it. - .present => |robots| { - const path = URL.getPathname(req.url); - if (!robots.isAllowed(path)) { - req.error_callback(req.ctx, error.RobotsBlocked); - return; - } - }, - // Otherwise, we assume we won't find it again. - .absent => {}, - } - - return self.processRequest(req); - } - - return self.fetchRobotsThenProcessRequest(robots_url, req); + if (self.obey_robots == false) { + return self.processRequest(req); } - return self.processRequest(req); + const robots_url = try URL.getRobotsUrl(self.allocator, req.url); + errdefer self.allocator.free(robots_url); + + // If we have this robots cached, we can take a fast path. + if (self.network.robot_store.get(robots_url)) |robot_entry| { + defer self.allocator.free(robots_url); + + switch (robot_entry) { + // If we have a found robots entry, we check it. + .present => |robots| { + const path = URL.getPathname(req.url); + if (!robots.isAllowed(path)) { + req.error_callback(req.ctx, error.RobotsBlocked); + return; + } + }, + // Otherwise, we assume we won't find it again. + .absent => {}, + } + + return self.processRequest(req); + } + return self.fetchRobotsThenProcessRequest(robots_url, req); } fn processRequest(self: *Client, req: Request) !void { diff --git a/src/browser/ScriptManager.zig b/src/browser/ScriptManager.zig index 2baeef8d..a37493eb 100644 --- a/src/browser/ScriptManager.zig +++ b/src/browser/ScriptManager.zig @@ -63,9 +63,6 @@ shutdown: bool = false, client: *HttpClient, allocator: Allocator, -buffer_pool: BufferPool, - -script_pool: std.heap.MemoryPool(Script), // We can download multiple sync modules in parallel, but we want to process // them in order. We can't use an std.DoublyLinkedList, like the other script types, @@ -101,18 +98,14 @@ pub fn init(allocator: Allocator, http_client: *HttpClient, page: *Page) ScriptM .imported_modules = .empty, .client = http_client, .static_scripts_done = false, - .buffer_pool = BufferPool.init(allocator, 5), .page_notified_of_completion = false, - .script_pool = std.heap.MemoryPool(Script).init(allocator), }; } pub fn deinit(self: *ScriptManager) void { - // necessary to free any buffers scripts may be referencing + // necessary to free any arenas scripts may be referencing self.reset(); - self.buffer_pool.deinit(); - self.script_pool.deinit(); self.imported_modules.deinit(self.allocator); // we don't deinit self.importmap b/c we use the page's arena for its // allocations. @@ -121,7 +114,10 @@ pub fn deinit(self: *ScriptManager) void { pub fn reset(self: *ScriptManager) void { var it = self.imported_modules.valueIterator(); while (it.next()) |value_ptr| { - self.buffer_pool.release(value_ptr.buffer); + switch (value_ptr.state) { + .done => |script| script.deinit(), + else => {}, + } } self.imported_modules.clearRetainingCapacity(); @@ -138,13 +134,13 @@ pub fn reset(self: *ScriptManager) void { fn clearList(list: *std.DoublyLinkedList) void { while (list.popFirst()) |n| { const script: *Script = @fieldParentPtr("node", n); - script.deinit(true); + script.deinit(); } } -pub fn getHeaders(self: *ScriptManager, url: [:0]const u8) !net_http.Headers { +fn getHeaders(self: *ScriptManager, arena: Allocator, url: [:0]const u8) !net_http.Headers { var headers = try self.client.newHeaders(); - try self.page.headersForRequest(self.page.arena, url, &headers); + try self.page.headersForRequest(arena, url, &headers); return headers; } @@ -191,19 +187,26 @@ pub fn addFromElement(self: *ScriptManager, comptime from_parser: bool, script_e return; }; + var handover = false; const page = self.page; + + const arena = try page.getArena(.{ .debug = "addFromElement" }); + errdefer if (!handover) { + page.releaseArena(arena); + }; + var source: Script.Source = undefined; var remote_url: ?[:0]const u8 = null; const base_url = page.base(); if (element.getAttributeSafe(comptime .wrap("src"))) |src| { - if (try parseDataURI(page.arena, src)) |data_uri| { + if (try parseDataURI(arena, src)) |data_uri| { source = .{ .@"inline" = data_uri }; } else { - remote_url = try URL.resolve(page.arena, base_url, src, .{}); + remote_url = try URL.resolve(arena, base_url, src, .{}); source = .{ .remote = .{} }; } } else { - var buf = std.Io.Writer.Allocating.init(page.arena); + var buf = std.Io.Writer.Allocating.init(arena); try element.asNode().getChildTextContent(&buf.writer); try buf.writer.writeByte(0); const data = buf.written(); @@ -218,15 +221,13 @@ pub fn addFromElement(self: *ScriptManager, comptime from_parser: bool, script_e // Only set _executed (already-started) when we actually have content to execute script_element._executed = true; - - const script = try self.script_pool.create(); - errdefer self.script_pool.destroy(script); - const is_inline = source == .@"inline"; + const script = try arena.create(Script); script.* = .{ .kind = kind, .node = .{}, + .arena = arena, .manager = self, .source = source, .script_element = script_element, @@ -270,7 +271,7 @@ pub fn addFromElement(self: *ScriptManager, comptime from_parser: bool, script_e if (is_blocking == false) { self.scriptList(script).remove(&script.node); } - script.deinit(true); + script.deinit(); } try self.client.request(.{ @@ -278,7 +279,7 @@ pub fn addFromElement(self: *ScriptManager, comptime from_parser: bool, script_e .ctx = script, .method = .GET, .frame_id = page._frame_id, - .headers = try self.getHeaders(url), + .headers = try self.getHeaders(arena, url), .blocking = is_blocking, .cookie_jar = &page._session.cookie_jar, .resource_type = .script, @@ -289,6 +290,7 @@ pub fn addFromElement(self: *ScriptManager, comptime from_parser: bool, script_e .done_callback = Script.doneCallback, .error_callback = Script.errorCallback, }); + handover = true; if (comptime IS_DEBUG) { var ls: js.Local.Scope = undefined; @@ -318,7 +320,7 @@ pub fn addFromElement(self: *ScriptManager, comptime from_parser: bool, script_e } if (script.status == 0) { // an error (that we already logged) - script.deinit(true); + script.deinit(); return; } @@ -327,7 +329,7 @@ pub fn addFromElement(self: *ScriptManager, comptime from_parser: bool, script_e self.is_evaluating = true; defer { self.is_evaluating = was_evaluating; - script.deinit(true); + script.deinit(); } return script.eval(page); } @@ -359,11 +361,14 @@ pub fn preloadImport(self: *ScriptManager, url: [:0]const u8, referrer: []const } errdefer _ = self.imported_modules.remove(url); - const script = try self.script_pool.create(); - errdefer self.script_pool.destroy(script); + const page = self.page; + const arena = try page.getArena(.{ .debug = "preloadImport" }); + errdefer page.releaseArena(arena); + const script = try arena.create(Script); script.* = .{ .kind = .module, + .arena = arena, .url = url, .node = .{}, .manager = self, @@ -373,11 +378,7 @@ pub fn preloadImport(self: *ScriptManager, url: [:0]const u8, referrer: []const .mode = .import, }; - gop.value_ptr.* = ImportedModule{ - .manager = self, - }; - - const page = self.page; + gop.value_ptr.* = ImportedModule{}; if (comptime IS_DEBUG) { var ls: js.Local.Scope = undefined; @@ -392,12 +393,18 @@ pub fn preloadImport(self: *ScriptManager, url: [:0]const u8, referrer: []const }); } - try self.client.request(.{ + // This seems wrong since we're not dealing with an async import (unlike + // getAsyncModule below), but all we're trying to do here is pre-load the + // script for execution at some point in the future (when waitForImport is + // called). + self.async_scripts.append(&script.node); + + self.client.request(.{ .url = url, .ctx = script, .method = .GET, .frame_id = page._frame_id, - .headers = try self.getHeaders(url), + .headers = try self.getHeaders(arena, url), .cookie_jar = &page._session.cookie_jar, .resource_type = .script, .notification = page._session.notification, @@ -406,13 +413,10 @@ pub fn preloadImport(self: *ScriptManager, url: [:0]const u8, referrer: []const .data_callback = Script.dataCallback, .done_callback = Script.doneCallback, .error_callback = Script.errorCallback, - }); - - // This seems wrong since we're not dealing with an async import (unlike - // getAsyncModule below), but all we're trying to do here is pre-load the - // script for execution at some point in the future (when waitForImport is - // called). - self.async_scripts.append(&script.node); + }) catch |err| { + self.async_scripts.remove(&script.node); + return err; + }; } pub fn waitForImport(self: *ScriptManager, url: [:0]const u8) !ModuleSource { @@ -433,12 +437,12 @@ pub fn waitForImport(self: *ScriptManager, url: [:0]const u8) !ModuleSource { _ = try client.tick(200); continue; }, - .done => { + .done => |script| { var shared = false; const buffer = entry.value_ptr.buffer; const waiters = entry.value_ptr.waiters; - if (waiters == 0) { + if (waiters == 1) { self.imported_modules.removeByPtr(entry.key_ptr); } else { shared = true; @@ -447,7 +451,7 @@ pub fn waitForImport(self: *ScriptManager, url: [:0]const u8) !ModuleSource { return .{ .buffer = buffer, .shared = shared, - .buffer_pool = &self.buffer_pool, + .script = script, }; }, .err => return error.Failed, @@ -456,11 +460,14 @@ pub fn waitForImport(self: *ScriptManager, url: [:0]const u8) !ModuleSource { } pub fn getAsyncImport(self: *ScriptManager, url: [:0]const u8, cb: ImportAsync.Callback, cb_data: *anyopaque, referrer: []const u8) !void { - const script = try self.script_pool.create(); - errdefer self.script_pool.destroy(script); + const page = self.page; + const arena = try page.getArena(.{ .debug = "getAsyncImport" }); + errdefer page.releaseArena(arena); + const script = try arena.create(Script); script.* = .{ .kind = .module, + .arena = arena, .url = url, .node = .{}, .manager = self, @@ -473,7 +480,6 @@ pub fn getAsyncImport(self: *ScriptManager, url: [:0]const u8, cb: ImportAsync.C } }, }; - const page = self.page; if (comptime IS_DEBUG) { var ls: js.Local.Scope = undefined; page.js.localScope(&ls); @@ -496,11 +502,12 @@ pub fn getAsyncImport(self: *ScriptManager, url: [:0]const u8, cb: ImportAsync.C self.is_evaluating = true; defer self.is_evaluating = was_evaluating; - try self.client.request(.{ + self.async_scripts.append(&script.node); + self.client.request(.{ .url = url, .method = .GET, .frame_id = page._frame_id, - .headers = try self.getHeaders(url), + .headers = try self.getHeaders(arena, url), .ctx = script, .resource_type = .script, .cookie_jar = &page._session.cookie_jar, @@ -510,9 +517,10 @@ pub fn getAsyncImport(self: *ScriptManager, url: [:0]const u8, cb: ImportAsync.C .data_callback = Script.dataCallback, .done_callback = Script.doneCallback, .error_callback = Script.errorCallback, - }); - - self.async_scripts.append(&script.node); + }) catch |err| { + self.async_scripts.remove(&script.node); + return err; + }; } // Called from the Page to let us know it's done parsing the HTML. Necessary that @@ -537,18 +545,18 @@ fn evaluate(self: *ScriptManager) void { var script: *Script = @fieldParentPtr("node", n); switch (script.mode) { .async => { - defer script.deinit(true); + defer script.deinit(); script.eval(page); }, .import_async => |ia| { - defer script.deinit(false); if (script.status < 200 or script.status > 299) { + script.deinit(); ia.callback(ia.data, error.FailedToLoad); } else { ia.callback(ia.data, .{ .shared = false, + .script = script, .buffer = script.source.remote, - .buffer_pool = &self.buffer_pool, }); } }, @@ -574,7 +582,7 @@ fn evaluate(self: *ScriptManager) void { } defer { _ = self.defer_scripts.popFirst(); - script.deinit(true); + script.deinit(); } script.eval(page); } @@ -625,11 +633,12 @@ fn parseImportmap(self: *ScriptManager, script: *const Script) !void { } pub const Script = struct { - complete: bool, kind: Kind, + complete: bool, status: u16 = 0, source: Source, url: []const u8, + arena: Allocator, mode: ExecutionMode, node: std.DoublyLinkedList.Node, script_element: ?*Element.Html.Script, @@ -680,11 +689,8 @@ pub const Script = struct { import_async: ImportAsync, }; - fn deinit(self: *Script, comptime release_buffer: bool) void { - if ((comptime release_buffer) and self.source == .remote) { - self.manager.buffer_pool.release(self.source.remote); - } - self.manager.script_pool.destroy(self); + fn deinit(self: *Script) void { + self.manager.page.releaseArena(self.arena); } fn startCallback(transfer: *HttpClient.Transfer) !void { @@ -750,9 +756,9 @@ pub const Script = struct { } lp.assert(self.source.remote.capacity == 0, "ScriptManager.Header buffer", .{ .capacity = self.source.remote.capacity }); - var buffer = self.manager.buffer_pool.get(); + var buffer: std.ArrayList(u8) = .empty; if (transfer.getContentLength()) |cl| { - try buffer.ensureTotalCapacity(self.manager.allocator, cl); + try buffer.ensureTotalCapacity(self.arena, cl); } self.source = .{ .remote = buffer }; return true; @@ -766,7 +772,7 @@ pub const Script = struct { }; } fn _dataCallback(self: *Script, _: *HttpClient.Transfer, data: []const u8) !void { - try self.source.remote.appendSlice(self.manager.allocator, data); + try self.source.remote.appendSlice(self.arena, data); } fn doneCallback(ctx: *anyopaque) !void { @@ -783,9 +789,8 @@ pub const Script = struct { } else if (self.mode == .import) { manager.async_scripts.remove(&self.node); const entry = manager.imported_modules.getPtr(self.url).?; - entry.state = .done; + entry.state = .{ .done = self }; entry.buffer = self.source.remote; - self.deinit(false); } manager.evaluate(); } @@ -811,7 +816,7 @@ pub const Script = struct { const manager = self.manager; manager.scriptList(self).remove(&self.node); if (manager.shutdown) { - self.deinit(true); + self.deinit(); return; } @@ -823,7 +828,7 @@ pub const Script = struct { }, else => {}, } - self.deinit(true); + self.deinit(); manager.evaluate(); } @@ -951,76 +956,6 @@ pub const Script = struct { } }; -const BufferPool = struct { - count: usize, - available: List = .{}, - allocator: Allocator, - max_concurrent_transfers: u8, - mem_pool: std.heap.MemoryPool(Container), - - const List = std.SinglyLinkedList; - - const Container = struct { - node: List.Node, - buf: std.ArrayList(u8), - }; - - fn init(allocator: Allocator, max_concurrent_transfers: u8) BufferPool { - return .{ - .available = .{}, - .count = 0, - .allocator = allocator, - .max_concurrent_transfers = max_concurrent_transfers, - .mem_pool = std.heap.MemoryPool(Container).init(allocator), - }; - } - - fn deinit(self: *BufferPool) void { - const allocator = self.allocator; - - var node = self.available.first; - while (node) |n| { - const container: *Container = @fieldParentPtr("node", n); - container.buf.deinit(allocator); - node = n.next; - } - self.mem_pool.deinit(); - } - - fn get(self: *BufferPool) std.ArrayList(u8) { - const node = self.available.popFirst() orelse { - // return a new buffer - return .{}; - }; - - self.count -= 1; - const container: *Container = @fieldParentPtr("node", node); - defer self.mem_pool.destroy(container); - return container.buf; - } - - fn release(self: *BufferPool, buffer: ArrayList(u8)) void { - // create mutable copy - var b = buffer; - - if (self.count == self.max_concurrent_transfers) { - b.deinit(self.allocator); - return; - } - - const container = self.mem_pool.create() catch |err| { - b.deinit(self.allocator); - log.err(.http, "SM BufferPool release", .{ .err = err }); - return; - }; - - b.clearRetainingCapacity(); - container.* = .{ .buf = b, .node = .{} }; - self.count += 1; - self.available.prepend(&container.node); - } -}; - const ImportAsync = struct { data: *anyopaque, callback: ImportAsync.Callback, @@ -1030,12 +965,12 @@ const ImportAsync = struct { pub const ModuleSource = struct { shared: bool, - buffer_pool: *BufferPool, + script: *Script, buffer: std.ArrayList(u8), pub fn deinit(self: *ModuleSource) void { if (self.shared == false) { - self.buffer_pool.release(self.buffer); + self.script.deinit(); } } @@ -1045,15 +980,14 @@ pub const ModuleSource = struct { }; const ImportedModule = struct { - manager: *ScriptManager, + waiters: u16 = 1, state: State = .loading, buffer: std.ArrayList(u8) = .{}, - waiters: u16 = 1, - const State = enum { + const State = union(enum) { err, - done, loading, + done: *Script, }; }; diff --git a/src/browser/js/Context.zig b/src/browser/js/Context.zig index 5c58c5cb..6a933f7e 100644 --- a/src/browser/js/Context.zig +++ b/src/browser/js/Context.zig @@ -303,15 +303,15 @@ pub fn module(self: *Context, comptime want_result: bool, local: *const js.Local } const owned_url = try arena.dupeZ(u8, url); + if (cacheable and !gop.found_existing) { + gop.key_ptr.* = owned_url; + } const m = try compileModule(local, src, owned_url); if (cacheable) { // compileModule is synchronous - nothing can modify the cache during compilation lp.assert(gop.value_ptr.module == null, "Context.module has module", .{}); gop.value_ptr.module = try m.persist(); - if (!gop.found_existing) { - gop.key_ptr.* = owned_url; - } } break :blk .{ m, owned_url }; From a8b147dfc054f1d24710002256c760817fd518ef Mon Sep 17 00:00:00 2001 From: Karl Seguin Date: Sun, 15 Mar 2026 09:24:42 +0800 Subject: [PATCH 05/17] update v8 --- .github/actions/install/action.yml | 2 +- Dockerfile | 2 +- build.zig.zon | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/actions/install/action.yml b/.github/actions/install/action.yml index 4c0a28f9..5fcbfd85 100644 --- a/.github/actions/install/action.yml +++ b/.github/actions/install/action.yml @@ -13,7 +13,7 @@ inputs: zig-v8: description: 'zig v8 version to install' required: false - default: 'v0.3.3' + default: 'v0.3.4' v8: description: 'v8 version to install' required: false diff --git a/Dockerfile b/Dockerfile index f106905a..f5cd202d 100644 --- a/Dockerfile +++ b/Dockerfile @@ -3,7 +3,7 @@ FROM debian:stable-slim ARG MINISIG=0.12 ARG ZIG_MINISIG=RWSGOq2NVecA2UPNdBUZykf1CCb147pkmdtYxgb3Ti+JO/wCYvhbAb/U ARG V8=14.0.365.4 -ARG ZIG_V8=v0.3.3 +ARG ZIG_V8=v0.3.4 ARG TARGETPLATFORM RUN apt-get update -yq && \ diff --git a/build.zig.zon b/build.zig.zon index 33fc0a09..cee52057 100644 --- a/build.zig.zon +++ b/build.zig.zon @@ -5,7 +5,7 @@ .minimum_zig_version = "0.15.2", .dependencies = .{ .v8 = .{ - .url = "https://github.com/lightpanda-io/zig-v8-fork/archive/34cb5f50227047c6cc1b2af73dad958c267f0a83.tar.gz", + .url = "https://github.com/lightpanda-io/zig-v8-fork/archive/refs/tags/v0.3.4.tar.gz", .hash = "v8-0.0.0-xddH6_F3BAAiFvKY6R1H-gkuQlk19BkDQ0--uZuTrSup", }, // .v8 = .{ .path = "../zig-v8-fork" }, From 3e9fa4ca473923ca1d25eea6df831e8327a73d47 Mon Sep 17 00:00:00 2001 From: Karl Seguin Date: Sun, 15 Mar 2026 12:00:42 +0800 Subject: [PATCH 06/17] Fix use-after-free with certain CDP scripts Origins were introduced to group memory/data that can be owned by multiple frames (on the same origin). There's a general idea that the initial "opaque" origin is very transient and should get replaced before any actual JavaScript is executed (because the real origin is setup as soon as we get the header from the response, long before we execute any script). But...with CDP, this guarantee doesn't hold There's nothing stop a CDP script from executing javascript at any point, including while the main page is still being loaded. This can result on allocations made on the opaque origin which is promptly discarded. To solve this, this commit introduced origin takeover. Rather than just transferring any data from one origin (the opaque) to the new one and then deinit' the opaque one (which is what results in user-after-free), the new origin simply maintains a list of opaque origins it has "taken-over"and is responsible for freeing it (in its own deinit). This ensures that any allocation made in the opaque origin remain valid. --- src/browser/js/Context.zig | 7 +++--- src/browser/js/Origin.zig | 49 ++++++++++++++++++++++---------------- 2 files changed, 32 insertions(+), 24 deletions(-) diff --git a/src/browser/js/Context.zig b/src/browser/js/Context.zig index 5c58c5cb..97ea9d9c 100644 --- a/src/browser/js/Context.zig +++ b/src/browser/js/Context.zig @@ -167,12 +167,11 @@ pub fn setOrigin(self: *Context, key: ?[]const u8) !void { const env = self.env; const isolate = env.isolate; + lp.assert(self.origin.rc == 1, "Ref opaque origin", .{ .rc = self.origin.rc }); + const origin = try self.session.getOrCreateOrigin(key); errdefer self.session.releaseOrigin(origin); - - try self.origin.transferTo(origin); - lp.assert(self.origin.rc == 1, "Ref opaque origin", .{ .rc = self.origin.rc }); - self.origin.deinit(env.app); + try origin.takeover(self.origin); self.origin = origin; diff --git a/src/browser/js/Origin.zig b/src/browser/js/Origin.zig index d7e74e4f..180cfd84 100644 --- a/src/browser/js/Origin.zig +++ b/src/browser/js/Origin.zig @@ -68,6 +68,8 @@ temps: std.AutoHashMapUnmanaged(usize, v8.Global) = .empty, // if v8 hasn't called the finalizer directly itself. finalizer_callbacks: std.AutoHashMapUnmanaged(usize, *FinalizerCallback) = .empty, +taken_over: std.ArrayList(*Origin), + pub fn init(app: *App, isolate: js.Isolate, key: []const u8) !*Origin { const arena = try app.arena_pool.acquire(); errdefer app.arena_pool.release(arena); @@ -86,14 +88,19 @@ pub fn init(app: *App, isolate: js.Isolate, key: []const u8) !*Origin { .rc = 1, .arena = arena, .key = owned_key, - .globals = .empty, .temps = .empty, + .globals = .empty, + .taken_over = .empty, .security_token = token_global, }; return self; } pub fn deinit(self: *Origin, app: *App) void { + for (self.taken_over.items) |o| { + o.deinit(app); + } + // Call finalizers before releasing anything { var it = self.finalizer_callbacks.valueIterator(); @@ -196,42 +203,44 @@ pub fn createFinalizerCallback( return fc; } -pub fn transferTo(self: *Origin, dest: *Origin) !void { - const arena = dest.arena; +pub fn takeover(self: *Origin, original: *Origin) !void { + const arena = self.arena; - try dest.globals.ensureUnusedCapacity(arena, self.globals.items.len); - for (self.globals.items) |obj| { - dest.globals.appendAssumeCapacity(obj); + try self.globals.ensureUnusedCapacity(arena, self.globals.items.len); + for (original.globals.items) |obj| { + self.globals.appendAssumeCapacity(obj); } - self.globals.clearRetainingCapacity(); + original.globals.clearRetainingCapacity(); { - try dest.temps.ensureUnusedCapacity(arena, self.temps.count()); - var it = self.temps.iterator(); + try self.temps.ensureUnusedCapacity(arena, original.temps.count()); + var it = original.temps.iterator(); while (it.next()) |kv| { - try dest.temps.put(arena, kv.key_ptr.*, kv.value_ptr.*); + try self.temps.put(arena, kv.key_ptr.*, kv.value_ptr.*); } - self.temps.clearRetainingCapacity(); + original.temps.clearRetainingCapacity(); } { - try dest.finalizer_callbacks.ensureUnusedCapacity(arena, self.finalizer_callbacks.count()); - var it = self.finalizer_callbacks.iterator(); + try self.finalizer_callbacks.ensureUnusedCapacity(arena, original.finalizer_callbacks.count()); + var it = original.finalizer_callbacks.iterator(); while (it.next()) |kv| { - kv.value_ptr.*.origin = dest; - try dest.finalizer_callbacks.put(arena, kv.key_ptr.*, kv.value_ptr.*); + kv.value_ptr.*.origin = self; + try self.finalizer_callbacks.put(arena, kv.key_ptr.*, kv.value_ptr.*); } - self.finalizer_callbacks.clearRetainingCapacity(); + original.finalizer_callbacks.clearRetainingCapacity(); } { - try dest.identity_map.ensureUnusedCapacity(arena, self.identity_map.count()); - var it = self.identity_map.iterator(); + try self.identity_map.ensureUnusedCapacity(arena, original.identity_map.count()); + var it = original.identity_map.iterator(); while (it.next()) |kv| { - try dest.identity_map.put(arena, kv.key_ptr.*, kv.value_ptr.*); + try self.identity_map.put(arena, kv.key_ptr.*, kv.value_ptr.*); } - self.identity_map.clearRetainingCapacity(); + original.identity_map.clearRetainingCapacity(); } + + try self.taken_over.append(self.arena, original); } // A type that has a finalizer can have its finalizer called one of two ways. From b373fb4a424119c314da7c8a62aa3811c6a4698c Mon Sep 17 00:00:00 2001 From: Matt Van Horn <455140+mvanhorn@users.noreply.github.com> Date: Sun, 15 Mar 2026 21:20:45 -0700 Subject: [PATCH 07/17] Address review feedback: fix endless loop, use stdlib, add charset flag - Use std.ascii.eqlIgnoreCase instead of custom asciiEqlIgnoreCase - Fix infinite loop in findAttrValue when attribute has no '=' sign (e.g. self-closing ) - Add is_default_charset flag to Mime struct so prescan only overrides charset when Content-Type header didn't set one explicitly - Add regression test for the self-closing meta loop case Co-Authored-By: Claude Opus 4.6 --- src/browser/Mime.zig | 35 +++++++++++++++++++---------------- src/browser/Page.zig | 4 ++-- 2 files changed, 21 insertions(+), 18 deletions(-) diff --git a/src/browser/Mime.zig b/src/browser/Mime.zig index beef2177..13951259 100644 --- a/src/browser/Mime.zig +++ b/src/browser/Mime.zig @@ -25,6 +25,7 @@ params: []const u8 = "", // We keep 41 for null-termination since HTML parser expects in this format. charset: [41]u8 = default_charset, charset_len: usize = default_charset_len, +is_default_charset: bool = true, /// String "UTF-8" continued by null characters. const default_charset = .{ 'U', 'T', 'F', '-', '8' } ++ .{0} ** 36; @@ -130,6 +131,7 @@ pub fn parse(input: []u8) !Mime { var charset: [41]u8 = default_charset; var charset_len: usize = default_charset_len; + var has_explicit_charset = false; var it = std.mem.splitScalar(u8, params, ';'); while (it.next()) |attr| { @@ -156,6 +158,7 @@ pub fn parse(input: []u8) !Mime { // Null-terminate right after attribute value. charset[attribute_value.len] = 0; charset_len = attribute_value.len; + has_explicit_charset = true; }, } } @@ -165,6 +168,7 @@ pub fn parse(input: []u8) !Mime { .charset = charset, .charset_len = charset_len, .content_type = content_type, + .is_default_charset = !has_explicit_charset, }; } @@ -212,7 +216,7 @@ pub fn prescanCharset(html: []const u8) ?[]const u8 { // Look for http-equiv="content-type" with content="...;charset=X" if (findAttrValue(attrs, "http-equiv")) |he| { - if (asciiEqlIgnoreCase(he, "content-type")) { + if (std.ascii.eqlIgnoreCase(he, "content-type")) { if (findAttrValue(attrs, "content")) |content| { if (extractCharsetFromContentType(content)) |charset| { return charset; @@ -248,7 +252,11 @@ fn findAttrValue(attrs: []const u8, name: []const u8) ?[]const u8 { // Skip whitespace around = while (pos < attrs.len and (attrs[pos] == ' ' or attrs[pos] == '\t')) pos += 1; - if (pos >= attrs.len or attrs[pos] != '=') continue; + if (pos >= attrs.len or attrs[pos] != '=') { + // No '=' found - skip this token. Advance at least one byte to avoid infinite loop. + if (pos == attr_start) pos += 1; + continue; + } pos += 1; // skip '=' while (pos < attrs.len and (attrs[pos] == ' ' or attrs[pos] == '\t')) pos += 1; if (pos >= attrs.len) return null; @@ -274,7 +282,7 @@ fn findAttrValue(attrs: []const u8, name: []const u8) ?[]const u8 { } }; - if (asciiEqlIgnoreCase(attr_name, name)) return value; + if (std.ascii.eqlIgnoreCase(attr_name, name)) return value; } return null; } @@ -283,7 +291,7 @@ fn extractCharsetFromContentType(content: []const u8) ?[]const u8 { var it = std.mem.splitScalar(u8, content, ';'); while (it.next()) |part| { const trimmed = std.mem.trimLeft(u8, part, &.{ ' ', '\t' }); - if (trimmed.len > 8 and asciiEqlIgnoreCase(trimmed[0..8], "charset=")) { + if (trimmed.len > 8 and std.ascii.eqlIgnoreCase(trimmed[0..8], "charset=")) { const val = std.mem.trim(u8, trimmed[8..], &.{ ' ', '\t', '"', '\'' }); if (val.len > 0 and val.len <= 40) return val; } @@ -291,14 +299,6 @@ fn extractCharsetFromContentType(content: []const u8) ?[]const u8 { return null; } -fn asciiEqlIgnoreCase(a: []const u8, b: []const u8) bool { - if (a.len != b.len) return false; - for (a, b) |ca, cb| { - if (std.ascii.toLower(ca) != std.ascii.toLower(cb)) return false; - } - return true; -} - pub fn sniff(body: []const u8) ?Mime { // 0x0C is form feed const content = std.mem.trimLeft(u8, body, &.{ ' ', '\t', '\n', '\r', 0x0C }); @@ -725,14 +725,17 @@ test "Mime: prescanCharset" { ); // No charset found - try testing.expectEqual(@as(?[]const u8, null), Mime.prescanCharset("Test")); - try testing.expectEqual(@as(?[]const u8, null), Mime.prescanCharset("")); - try testing.expectEqual(@as(?[]const u8, null), Mime.prescanCharset("no html here")); + try testing.expectEqual(null, Mime.prescanCharset("Test")); + try testing.expectEqual(null, Mime.prescanCharset("")); + try testing.expectEqual(null, Mime.prescanCharset("no html here")); + + // Self-closing meta without charset must not loop forever + try testing.expectEqual(null, Mime.prescanCharset("")); // Charset after 1024 bytes should not be found var long_html: [1100]u8 = undefined; @memset(&long_html, ' '); const suffix = ""; @memcpy(long_html[1050 .. 1050 + suffix.len], suffix); - try testing.expectEqual(@as(?[]const u8, null), Mime.prescanCharset(&long_html)); + try testing.expectEqual(null, Mime.prescanCharset(&long_html)); } diff --git a/src/browser/Page.zig b/src/browser/Page.zig index ab291bc6..9f7a22a1 100644 --- a/src/browser/Page.zig +++ b/src/browser/Page.zig @@ -855,9 +855,9 @@ fn pageDataCallback(transfer: *HttpClient.Transfer, data: []const u8) !void { break :blk Mime.sniff(data); } orelse .unknown; - // If the HTTP header didn't specify a charset and this is HTML, + // If the HTTP Content-Type header didn't specify a charset and this is HTML, // prescan the first 1024 bytes for a declaration. - if (mime.content_type == .text_html and std.mem.eql(u8, mime.charsetString(), "UTF-8")) { + if (mime.content_type == .text_html and mime.is_default_charset) { if (Mime.prescanCharset(data)) |charset| { if (charset.len <= 40) { @memcpy(mime.charset[0..charset.len], charset); From 21421d5b5306ef23a1f20bd8127e63720ff0fe8e Mon Sep 17 00:00:00 2001 From: jnMetaCode <1394485448@qq.com> Date: Mon, 16 Mar 2026 17:20:29 +0800 Subject: [PATCH 08/17] fix(dom): add default messages for all DOMException error codes The getMessage() fallback returned raw tag names like "wrong_document_error" instead of human-readable messages. Fill in all 18 error codes with messages based on the WebIDL spec error descriptions. Closes #82 Signed-off-by: JiangNan <1394485448@qq.com> --- src/browser/webapi/DOMException.zig | 26 ++++++++++++++++++++------ 1 file changed, 20 insertions(+), 6 deletions(-) diff --git a/src/browser/webapi/DOMException.zig b/src/browser/webapi/DOMException.zig index 3e0da288..46294b8d 100644 --- a/src/browser/webapi/DOMException.zig +++ b/src/browser/webapi/DOMException.zig @@ -104,13 +104,27 @@ pub fn getMessage(self: *const DOMException) []const u8 { } return switch (self._code) { .none => "", - .invalid_character_error => "Invalid Character", .index_size_error => "Index or size is negative or greater than the allowed amount", - .syntax_error => "Syntax Error", - .not_supported => "Not Supported", - .not_found => "Not Found", - .hierarchy_error => "Hierarchy Error", - else => @tagName(self._code), + .hierarchy_error => "The operation would yield an incorrect node tree", + .wrong_document_error => "The object is in the wrong document", + .invalid_character_error => "The string contains invalid characters", + .no_modification_allowed_error => "The object can not be modified", + .not_found => "The object can not be found here", + .not_supported => "The operation is not supported", + .inuse_attribute_error => "The attribute already in use", + .invalid_state_error => "The object is in an invalid state", + .syntax_error => "The string did not match the expected pattern", + .invalid_modification_error => "The object can not be modified in this way", + .namespace_error => "The operation is not allowed by Namespaces in XML", + .invalid_access_error => "The object does not support the operation or argument", + .security_error => "The operation is insecure", + .network_error => "A network error occurred", + .abort_error => "The operation was aborted", + .url_mismatch_error => "The given URL does not match another URL", + .quota_exceeded_error => "The quota has been exceeded", + .timeout_error => "The operation timed out", + .invalid_node_type_error => "The supplied node is incorrect or has an incorrect ancestor for this operation", + .data_clone_error => "The object can not be cloned", }; } From 0380df1cb4e599ee57bd15c5e11b9f119a110f23 Mon Sep 17 00:00:00 2001 From: jnMetaCode <1394485448@qq.com> Date: Mon, 16 Mar 2026 17:21:14 +0800 Subject: [PATCH 09/17] fix(cdp): add missing disable method to Performance Signed-off-by: JiangNan <1394485448@qq.com> --- src/cdp/domains/performance.zig | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/cdp/domains/performance.zig b/src/cdp/domains/performance.zig index dad0cebd..b8dea574 100644 --- a/src/cdp/domains/performance.zig +++ b/src/cdp/domains/performance.zig @@ -21,9 +21,11 @@ const std = @import("std"); pub fn processMessage(cmd: anytype) !void { const action = std.meta.stringToEnum(enum { enable, + disable, }, cmd.input.action) orelse return error.UnknownMethod; switch (action) { .enable => return cmd.sendResult(null, .{}), + .disable => return cmd.sendResult(null, .{}), } } From ac651328c3f3c38b0b804787f8d0758dbaab6a9b Mon Sep 17 00:00:00 2001 From: jnMetaCode <1394485448@qq.com> Date: Mon, 16 Mar 2026 17:21:18 +0800 Subject: [PATCH 10/17] fix(cdp): add missing disable method to Inspector Signed-off-by: JiangNan <1394485448@qq.com> --- src/cdp/domains/inspector.zig | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/cdp/domains/inspector.zig b/src/cdp/domains/inspector.zig index dad0cebd..b8dea574 100644 --- a/src/cdp/domains/inspector.zig +++ b/src/cdp/domains/inspector.zig @@ -21,9 +21,11 @@ const std = @import("std"); pub fn processMessage(cmd: anytype) !void { const action = std.meta.stringToEnum(enum { enable, + disable, }, cmd.input.action) orelse return error.UnknownMethod; switch (action) { .enable => return cmd.sendResult(null, .{}), + .disable => return cmd.sendResult(null, .{}), } } From b09e9f73982045c6955c8df7bf6effafd6138436 Mon Sep 17 00:00:00 2001 From: jnMetaCode <1394485448@qq.com> Date: Mon, 16 Mar 2026 17:21:20 +0800 Subject: [PATCH 11/17] fix(cdp): add missing disable method to Security Signed-off-by: JiangNan <1394485448@qq.com> --- src/cdp/domains/security.zig | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/cdp/domains/security.zig b/src/cdp/domains/security.zig index 0ebfedae..9bbf5b39 100644 --- a/src/cdp/domains/security.zig +++ b/src/cdp/domains/security.zig @@ -21,11 +21,13 @@ const std = @import("std"); pub fn processMessage(cmd: anytype) !void { const action = std.meta.stringToEnum(enum { enable, + disable, setIgnoreCertificateErrors, }, cmd.input.action) orelse return error.UnknownMethod; switch (action) { .enable => return cmd.sendResult(null, .{}), + .disable => return cmd.sendResult(null, .{}), .setIgnoreCertificateErrors => return setIgnoreCertificateErrors(cmd), } } From 7b2895ef081e06948221db025ce44d7151719144 Mon Sep 17 00:00:00 2001 From: Karl Seguin Date: Mon, 16 Mar 2026 17:33:12 +0800 Subject: [PATCH 12/17] click event dispastched from CDP should be trusted --- src/browser/Page.zig | 2 +- src/browser/webapi/event/MouseEvent.zig | 15 +++++++++++++-- 2 files changed, 14 insertions(+), 3 deletions(-) diff --git a/src/browser/Page.zig b/src/browser/Page.zig index cb62cb31..f11ba35c 100644 --- a/src/browser/Page.zig +++ b/src/browser/Page.zig @@ -3255,7 +3255,7 @@ pub fn triggerMouseClick(self: *Page, x: f64, y: f64) !void { .type = self._type, }); } - const event = (try @import("webapi/event/MouseEvent.zig").init("click", .{ + const event = (try @import("webapi/event/MouseEvent.zig").initTrusted(comptime .wrap("click"), .{ .bubbles = true, .cancelable = true, .composed = true, diff --git a/src/browser/webapi/event/MouseEvent.zig b/src/browser/webapi/event/MouseEvent.zig index 6b032433..e13dc1b3 100644 --- a/src/browser/webapi/event/MouseEvent.zig +++ b/src/browser/webapi/event/MouseEvent.zig @@ -28,6 +28,8 @@ const EventTarget = @import("../EventTarget.zig"); const UIEvent = @import("UIEvent.zig"); const PointerEvent = @import("PointerEvent.zig"); +const Allocator = std.mem.Allocator; + const MouseEvent = @This(); pub const MouseButton = enum(u8) { @@ -83,12 +85,21 @@ pub fn init(typ: []const u8, _opts: ?Options, page: *Page) !*MouseEvent { const arena = try page.getArena(.{ .debug = "MouseEvent" }); errdefer page.releaseArena(arena); const type_string = try String.init(arena, typ, .{}); + return initWithTrusted(arena, type_string, _opts, false, page); +} +pub fn initTrusted(typ: String, _opts: ?Options, page: *Page) !*MouseEvent { + const arena = try page.getArena(.{ .debug = "MouseEvent.trusted" }); + errdefer page.releaseArena(arena); + return initWithTrusted(arena, typ, _opts, true, page); +} + +fn initWithTrusted(arena: Allocator, typ: String, _opts: ?Options, trusted: bool, page: *Page) !*MouseEvent { const opts = _opts orelse Options{}; const event = try page._factory.uiEvent( arena, - type_string, + typ, MouseEvent{ ._type = .generic, ._proto = undefined, @@ -106,7 +117,7 @@ pub fn init(typ: []const u8, _opts: ?Options, page: *Page) !*MouseEvent { }, ); - Event.populatePrototypes(event, opts, false); + Event.populatePrototypes(event, opts, trusted); return event; } From 422320d9ac47bb6e105daa078d9e7657c576f1d2 Mon Sep 17 00:00:00 2001 From: Karl Seguin Date: Mon, 16 Mar 2026 17:54:01 +0800 Subject: [PATCH 13/17] Set charset based on BOM Small follow up to https://github.com/lightpanda-io/browser/pull/1837 If we sniff the content type from the byte order mark (BOM), then we should set the charset. This has higher precedence than sniffing the content type from the content of the document (e.g. meta tags) --- src/browser/Mime.zig | 39 ++++++++++++++++++++++++++++++++++++--- 1 file changed, 36 insertions(+), 3 deletions(-) diff --git a/src/browser/Mime.zig b/src/browser/Mime.zig index 13951259..e23d48a2 100644 --- a/src/browser/Mime.zig +++ b/src/browser/Mime.zig @@ -309,15 +309,30 @@ pub fn sniff(body: []const u8) ?Mime { if (content[0] != '<') { if (std.mem.startsWith(u8, content, &.{ 0xEF, 0xBB, 0xBF })) { // UTF-8 BOM - return .{ .content_type = .{ .text_plain = {} } }; + return .{ + .content_type = .{ .text_plain = {} }, + .charset = default_charset, + .charset_len = default_charset_len, + .is_default_charset = false, + }; } if (std.mem.startsWith(u8, content, &.{ 0xFE, 0xFF })) { // UTF-16 big-endian BOM - return .{ .content_type = .{ .text_plain = {} } }; + return .{ + .content_type = .{ .text_plain = {} }, + .charset = .{ 'U', 'T', 'F', '-', '1', '6', 'B', 'E' } ++ .{0} ** 33, + .charset_len = 8, + .is_default_charset = false, + }; } if (std.mem.startsWith(u8, content, &.{ 0xFF, 0xFE })) { // UTF-16 little-endian BOM - return .{ .content_type = .{ .text_plain = {} } }; + return .{ + .content_type = .{ .text_plain = {} }, + .charset = .{ 'U', 'T', 'F', '-', '1', '6', 'L', 'E' } ++ .{0} ** 33, + .charset_len = 8, + .is_default_charset = false, + }; } return null; } @@ -671,6 +686,24 @@ test "Mime: sniff" { try expectHTML(""); try expectHTML(" \n\t "); + + { + const mime = Mime.sniff(&.{ 0xEF, 0xBB, 0xBF }).?; + try testing.expectEqual(.text_plain, std.meta.activeTag(mime.content_type)); + try testing.expectEqual("UTF-8", mime.charsetString()); + } + + { + const mime = Mime.sniff(&.{ 0xFE, 0xFF }).?; + try testing.expectEqual(.text_plain, std.meta.activeTag(mime.content_type)); + try testing.expectEqual("UTF-16BE", mime.charsetString()); + } + + { + const mime = Mime.sniff(&.{ 0xFF, 0xFE }).?; + try testing.expectEqual(.text_plain, std.meta.activeTag(mime.content_type)); + try testing.expectEqual("UTF-16LE", mime.charsetString()); + } } const Expectation = struct { From dac456d98c4b4086ef25f54de944e3f2e6ea699f Mon Sep 17 00:00:00 2001 From: Pierre Tachoire Date: Mon, 16 Mar 2026 09:56:54 +0100 Subject: [PATCH 14/17] ci: fix wba flaky test Sometimes the GHA secret isn't dump in file correctly. So this commit inject the value directly to the command line --- .github/workflows/e2e-test.yml | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/.github/workflows/e2e-test.yml b/.github/workflows/e2e-test.yml index 675dd36b..632200f2 100644 --- a/.github/workflows/e2e-test.yml +++ b/.github/workflows/e2e-test.yml @@ -194,8 +194,6 @@ jobs: repository: 'lightpanda-io/demo' fetch-depth: 0 - - run: echo "${{ secrets.WBA_PRIVATE_KEY_PEM }}" > private_key.pem - - name: download artifact uses: actions/download-artifact@v4 with: @@ -204,17 +202,22 @@ jobs: - run: chmod a+x ./lightpanda - name: run wba test + shell: bash run: | + node webbotauth/validator.js & VALIDATOR_PID=$! sleep 2 - ./lightpanda fetch http://127.0.0.1:8989/ \ - --web_bot_auth_key_file private_key.pem \ + exec 3<<< "${{ secrets.WBA_PRIVATE_KEY_PEM }}" + + ./lightpanda fetch --dump http://127.0.0.1:8989/ \ + --web_bot_auth_key_file /proc/self/fd/3 \ --web_bot_auth_keyid ${{ vars.WBA_KEY_ID }} \ --web_bot_auth_domain ${{ vars.WBA_DOMAIN }} wait $VALIDATOR_PID + exec 3>&- cdp-and-hyperfine-bench: name: cdp-and-hyperfine-bench From 1ceaabe69f2a864343805fc7acbc36628b34bf28 Mon Sep 17 00:00:00 2001 From: Karl Seguin Date: Mon, 16 Mar 2026 20:56:18 +0800 Subject: [PATCH 15/17] Switch to reference counting for Mutation Observer and Intersection Observer This may be a stopgap. Our identity model assumes that v8 won't allow cross-origin access. It turns out that with CDP and Inspector, this isn't true. Inspectors can break / violate cross-origin restrictions. The result is that 2 origins can see the same zig instance, which causes 2 v8::Objects to reference the same Zig instance. This likely causes some consistency issue. Like, if you take mo in 1 context, and write an arbitrary property, mo.hack = true, you won't observe that in the 2nd context (because it's a different v8::Object). But, it _is_ the same Zig instance, so if you set a known/real property, it will be updated. That's probably a pretty minor issue. The bigger issue is that it can result in a use-after-free when using explicit strong/weak ref: 1 - Mutation observer is created in Origin1 2 - It's automatically set to weak 3 - Something is observed, the reference is made strong 4 - The MO is accessed from Origin2 5 - Creates a new v8::Object 6 - Sets it to weak 7 - Object goes out of scope in Origin2 8 - Finalizer is called <- free 9 - MO is manipulated in Origin 1 <- use after free Maybe the right option is to have a single shared identity map. I need to think about it. As a stopgap, switching to reference counting (which we already support) shold prevent the use-after free. While we'll still create 2 v8::Objects, they'll each acquireRef (_rc = 2) and thus it won't be freed until they both release i Maybe the right option is to have a single shared identity map. I need to think about it. As a stopgap, switching to reference counting (which we already support) shold prevent the use-after free. While we'll still create 2 v8::Objects, they'll each acquireRef (_rc = 2) and thus it won't be freed until they both release it. --- src/browser/webapi/IntersectionObserver.zig | 37 ++++++++++++++++----- src/browser/webapi/MutationObserver.zig | 34 ++++++++++++++----- 2 files changed, 54 insertions(+), 17 deletions(-) diff --git a/src/browser/webapi/IntersectionObserver.zig b/src/browser/webapi/IntersectionObserver.zig index 74a5d79e..b4c07e77 100644 --- a/src/browser/webapi/IntersectionObserver.zig +++ b/src/browser/webapi/IntersectionObserver.zig @@ -37,6 +37,7 @@ pub fn registerTypes() []const type { const IntersectionObserver = @This(); +_rc: u8 = 0, _arena: Allocator, _callback: js.Function.Temp, _observing: std.ArrayList(*Element) = .{}, @@ -93,12 +94,24 @@ pub fn init(callback: js.Function.Temp, options: ?ObserverInit, page: *Page) !*I } pub fn deinit(self: *IntersectionObserver, shutdown: bool, session: *Session) void { - self._callback.release(); - if ((comptime IS_DEBUG) and !shutdown) { - std.debug.assert(self._observing.items.len == 0); + const rc = self._rc; + if (comptime IS_DEBUG) { + std.debug.assert(rc != 0); } - session.releaseArena(self._arena); + if (rc == 1 or shutdown) { + self._callback.release(); + if ((comptime IS_DEBUG) and !shutdown) { + std.debug.assert(self._observing.items.len == 0); + } + session.releaseArena(self._arena); + } else { + self._rc = rc - 1; + } +} + +pub fn acquireRef(self: *IntersectionObserver) void { + self._rc += 1; } pub fn observe(self: *IntersectionObserver, target: *Element, page: *Page) !void { @@ -111,7 +124,7 @@ pub fn observe(self: *IntersectionObserver, target: *Element, page: *Page) !void // Register with page if this is our first observation if (self._observing.items.len == 0) { - page.js.strongRef(self); + self._rc += 1; try page.registerIntersectionObserver(self); } @@ -148,20 +161,26 @@ pub fn unobserve(self: *IntersectionObserver, target: *Element, page: *Page) voi } if (self._observing.items.len == 0) { - page.js.safeWeakRef(self); + self.deinit(false, page._session); } } pub fn disconnect(self: *IntersectionObserver, page: *Page) void { - page.unregisterIntersectionObserver(self); - self._observing.clearRetainingCapacity(); self._previous_states.clearRetainingCapacity(); for (self._pending_entries.items) |entry| { entry.deinit(false, page._session); } self._pending_entries.clearRetainingCapacity(); - page.js.safeWeakRef(self); + + const observing_count = self._observing.items.len; + self._observing.clearRetainingCapacity(); + + if (observing_count > 0) { + self.deinit(false, page._session); + } + + page.unregisterIntersectionObserver(self); } pub fn takeRecords(self: *IntersectionObserver, page: *Page) ![]*IntersectionObserverEntry { diff --git a/src/browser/webapi/MutationObserver.zig b/src/browser/webapi/MutationObserver.zig index b8608381..8b625fa8 100644 --- a/src/browser/webapi/MutationObserver.zig +++ b/src/browser/webapi/MutationObserver.zig @@ -39,6 +39,7 @@ pub fn registerTypes() []const type { const MutationObserver = @This(); +_rc: u8 = 0, _arena: Allocator, _callback: js.Function.Temp, _observing: std.ArrayList(Observing) = .{}, @@ -86,12 +87,24 @@ pub fn init(callback: js.Function.Temp, page: *Page) !*MutationObserver { } pub fn deinit(self: *MutationObserver, shutdown: bool, session: *Session) void { - self._callback.release(); - if ((comptime IS_DEBUG) and !shutdown) { - std.debug.assert(self._observing.items.len == 0); + const rc = self._rc; + if (comptime IS_DEBUG) { + std.debug.assert(rc != 0); } - session.releaseArena(self._arena); + if (rc == 1 or shutdown) { + self._callback.release(); + if ((comptime IS_DEBUG) and !shutdown) { + std.debug.assert(self._observing.items.len == 0); + } + session.releaseArena(self._arena); + } else { + self._rc = rc - 1; + } +} + +pub fn acquireRef(self: *MutationObserver) void { + self._rc += 1; } pub fn observe(self: *MutationObserver, target: *Node, options: ObserveOptions, page: *Page) !void { @@ -158,7 +171,7 @@ pub fn observe(self: *MutationObserver, target: *Node, options: ObserveOptions, // Register with page if this is our first observation if (self._observing.items.len == 0) { - page.js.strongRef(self); + self._rc += 1; try page.registerMutationObserver(self); } @@ -169,13 +182,18 @@ pub fn observe(self: *MutationObserver, target: *Node, options: ObserveOptions, } pub fn disconnect(self: *MutationObserver, page: *Page) void { - page.unregisterMutationObserver(self); - self._observing.clearRetainingCapacity(); for (self._pending_records.items) |record| { record.deinit(false, page._session); } self._pending_records.clearRetainingCapacity(); - page.js.safeWeakRef(self); + + const observing_count = self._observing.items.len; + self._observing.clearRetainingCapacity(); + + if (observing_count > 0) { + self.deinit(false, page._session); + } + page.unregisterMutationObserver(self); } pub fn takeRecords(self: *MutationObserver, page: *Page) ![]*MutationRecord { From 84190e1e064eb922a76707f7e862b2951d5f8af3 Mon Sep 17 00:00:00 2001 From: Karl Seguin Date: Tue, 17 Mar 2026 07:07:16 +0800 Subject: [PATCH 16/17] fix test for new messages --- .../tests/document/query_selector.html | 3 +-- .../tests/document/query_selector_all.html | 3 +-- src/browser/tests/domexception.html | 2 +- src/browser/tests/element/attributes.html | 1 - src/browser/tests/element/matches.html | 3 +-- src/browser/tests/element/query_selector.html | 3 +-- .../tests/element/query_selector_all.html | 3 +-- .../tests/element/selector_invalid.html | 4 ++-- src/browser/tests/node/insert_before.html | 1 - src/browser/tests/node/remove_child.html | 1 - src/browser/tests/node/replace_child.html | 1 - src/browser/tests/range.html | 22 +++++++++---------- src/browser/tests/window/window.html | 2 +- 13 files changed, 20 insertions(+), 29 deletions(-) diff --git a/src/browser/tests/document/query_selector.html b/src/browser/tests/document/query_selector.html index b333069e..0837999e 100644 --- a/src/browser/tests/document/query_selector.html +++ b/src/browser/tests/document/query_selector.html @@ -24,11 +24,10 @@ diff --git a/src/browser/tests/domexception.html b/src/browser/tests/domexception.html index 1ed43e8d..05bdc837 100644 --- a/src/browser/tests/domexception.html +++ b/src/browser/tests/domexception.html @@ -127,7 +127,7 @@ testing.withError((err) => { testing.expectEqual(3, err.code); - testing.expectEqual('Hierarchy Error', err.message); + testing.expectEqual('HierarchyRequestError', err.name); testing.expectEqual(true, err instanceof DOMException); testing.expectEqual(true, err instanceof Error); }, () => link.appendChild(content)); diff --git a/src/browser/tests/element/attributes.html b/src/browser/tests/element/attributes.html index 9b8c29d3..0929a3d9 100644 --- a/src/browser/tests/element/attributes.html +++ b/src/browser/tests/element/attributes.html @@ -36,7 +36,6 @@ testing.withError((err) => { testing.expectEqual(8, err.code); testing.expectEqual("NotFoundError", err.name); - testing.expectEqual("Not Found", err.message); }, () => el1.removeAttributeNode(script_id_node)); testing.expectEqual(an1, el1.removeAttributeNode(an1)); diff --git a/src/browser/tests/element/matches.html b/src/browser/tests/element/matches.html index 5e1721b5..f28d7a71 100644 --- a/src/browser/tests/element/matches.html +++ b/src/browser/tests/element/matches.html @@ -66,11 +66,10 @@ { const container = $('#test-container'); - testing.expectError("SyntaxError: Syntax Error", () => container.matches('')); + testing.expectError("SyntaxError", () => container.matches('')); testing.withError((err) => { testing.expectEqual(12, err.code); testing.expectEqual("SyntaxError", err.name); - testing.expectEqual("Syntax Error", err.message); }, () => container.matches('')); } diff --git a/src/browser/tests/element/query_selector.html b/src/browser/tests/element/query_selector.html index 9564ca6d..203524b6 100644 --- a/src/browser/tests/element/query_selector.html +++ b/src/browser/tests/element/query_selector.html @@ -12,11 +12,10 @@ const p1 = $('#p1'); testing.expectEqual(null, p1.querySelector('#p1')); - testing.expectError("SyntaxError: Syntax Error", () => p1.querySelector('')); + testing.expectError("SyntaxError", () => p1.querySelector('')); testing.withError((err) => { testing.expectEqual(12, err.code); testing.expectEqual("SyntaxError", err.name); - testing.expectEqual("Syntax Error", err.message); }, () => p1.querySelector('')); testing.expectEqual($('#c2'), p1.querySelector('#c2')); diff --git a/src/browser/tests/element/query_selector_all.html b/src/browser/tests/element/query_selector_all.html index eeedc876..3b4013c2 100644 --- a/src/browser/tests/element/query_selector_all.html +++ b/src/browser/tests/element/query_selector_all.html @@ -24,11 +24,10 @@ diff --git a/src/browser/tests/element/selector_invalid.html b/src/browser/tests/element/selector_invalid.html index 35409c19..c0d16d59 100644 --- a/src/browser/tests/element/selector_invalid.html +++ b/src/browser/tests/element/selector_invalid.html @@ -43,8 +43,8 @@ const container = $('#container'); // Empty selectors - testing.expectError("SyntaxError: Syntax Error", () => container.querySelector('')); - testing.expectError("SyntaxError: Syntax Error", () => document.querySelectorAll('')); + testing.expectError("SyntaxError", () => container.querySelector('')); + testing.expectError("SyntaxError", () => document.querySelectorAll('')); } diff --git a/src/browser/tests/node/insert_before.html b/src/browser/tests/node/insert_before.html index 8be48e56..50dff07c 100644 --- a/src/browser/tests/node/insert_before.html +++ b/src/browser/tests/node/insert_before.html @@ -19,7 +19,6 @@ testing.withError((err) => { testing.expectEqual(8, err.code); testing.expectEqual("NotFoundError", err.name); - testing.expectEqual("Not Found", err.message); }, () => d1.insertBefore(document.createElement('div'), d2)); let c1 = document.createElement('div'); diff --git a/src/browser/tests/node/remove_child.html b/src/browser/tests/node/remove_child.html index fdf0b813..1118e4cf 100644 --- a/src/browser/tests/node/remove_child.html +++ b/src/browser/tests/node/remove_child.html @@ -7,7 +7,6 @@ testing.withError((err) => { testing.expectEqual(8, err.code); testing.expectEqual("NotFoundError", err.name); - testing.expectEqual("Not Found", err.message); }, () => $('#d1').removeChild($('#p1'))); const p1 = $('#p1'); diff --git a/src/browser/tests/node/replace_child.html b/src/browser/tests/node/replace_child.html index 45ed1bc5..51b0a173 100644 --- a/src/browser/tests/node/replace_child.html +++ b/src/browser/tests/node/replace_child.html @@ -25,7 +25,6 @@ testing.withError((err) => { testing.expectEqual(3, err.code); testing.expectEqual("HierarchyRequestError", err.name); - testing.expectEqual("Hierarchy Error", err.message); }, () => d1.replaceChild(c4, c3)); testing.expectEqual(c2, d1.replaceChild(c4, c2)); diff --git a/src/browser/tests/range.html b/src/browser/tests/range.html index d9a8637b..8440c187 100644 --- a/src/browser/tests/range.html +++ b/src/browser/tests/range.html @@ -451,12 +451,12 @@ const p1 = $('#p1'); // Test setStart with offset beyond node length - testing.expectError('IndexSizeError: Index or size is negative or greater than the allowed amount', () => { + testing.expectError('IndexSizeError:', () => { range.setStart(p1, 999); }); // Test with negative offset (wraps to large u32) - testing.expectError('IndexSizeError: Index or size is negative or greater than the allowed amount', () => { + testing.expectError('IndexSizeError:', () => { range.setStart(p1.firstChild, -1); }); } @@ -468,12 +468,12 @@ const p1 = $('#p1'); // Test setEnd with offset beyond node length - testing.expectError('IndexSizeError: Index or size is negative or greater than the allowed amount', () => { + testing.expectError('IndexSizeError:', () => { range.setEnd(p1, 999); }); // Test with text node - testing.expectError('IndexSizeError: Index or size is negative or greater than the allowed amount', () => { + testing.expectError('IndexSizeError:', () => { range.setEnd(p1.firstChild, 9999); }); } @@ -525,11 +525,11 @@ range.setEnd(p1, 1); // Test comparePoint with invalid offset - testing.expectError('IndexSizeError: Index or size is negative or greater than the allowed amount', () => { + testing.expectError('IndexSizeError:', () => { range.comparePoint(p1, 20); }); - testing.expectError('IndexSizeError: Index or size is negative or greater than the allowed amount', () => { + testing.expectError('IndexSizeError:', () => { range.comparePoint(p1.firstChild, -1); }); } @@ -650,11 +650,11 @@ range.setEnd(p1, 1); // Invalid offset should throw IndexSizeError - testing.expectError('IndexSizeError: Index or size is negative or greater than the allowed amount', () => { + testing.expectError('IndexSizeError:', () => { range.isPointInRange(p1, 999); }); - testing.expectError('IndexSizeError: Index or size is negative or greater than the allowed amount', () => { + testing.expectError('IndexSizeError:', () => { range.isPointInRange(p1.firstChild, 9999); }); } @@ -854,11 +854,11 @@ range2.setStart(p, 0); // Invalid how parameter should throw NotSupportedError - testing.expectError('NotSupportedError: Not Supported', () => { + testing.expectError('NotSupportedError:', () => { range1.compareBoundaryPoints(4, range2); }); - testing.expectError('NotSupportedError: Not Supported', () => { + testing.expectError('NotSupportedError:', () => { range1.compareBoundaryPoints(99, range2); }); } @@ -883,7 +883,7 @@ range2.setEnd(foreignP, 1); // Comparing ranges in different documents should throw WrongDocumentError - testing.expectError('WrongDocumentError: wrong_document_error', () => { + testing.expectError('WrongDocumentError:', () => { range1.compareBoundaryPoints(Range.START_TO_START, range2); }); } diff --git a/src/browser/tests/window/window.html b/src/browser/tests/window/window.html index 01025b86..e4094f9b 100644 --- a/src/browser/tests/window/window.html +++ b/src/browser/tests/window/window.html @@ -82,7 +82,7 @@ testing.expectEqual('ceil', atob('Y2VpbA')); // 6 chars, len%4==2, needs '==' // length % 4 == 1 must still throw - testing.expectError('InvalidCharacterError: Invalid Character', () => { + testing.expectError('InvalidCharacterError', () => { atob('Y'); }); From deb08b788004805f40374480e5f452f372171732 Mon Sep 17 00:00:00 2001 From: Karl Seguin Date: Tue, 17 Mar 2026 08:15:49 +0800 Subject: [PATCH 17/17] Try to improve stability of history test Tests cannot navigate away from the page page. If they do, the testRunner will crash, as it tries to access `assertOk` on a page that no longer exists. This commit hacks the history test, using an iframe, to try to test the history API without navigating off the main page. --- src/browser/tests/history.html | 42 +++++++------------------- src/browser/tests/support/history.html | 33 ++++++++++++++++++++ src/browser/tests/testing.js | 5 ++- 3 files changed, 46 insertions(+), 34 deletions(-) create mode 100644 src/browser/tests/support/history.html diff --git a/src/browser/tests/history.html b/src/browser/tests/history.html index 1508e232..e2aa0d35 100644 --- a/src/browser/tests/history.html +++ b/src/browser/tests/history.html @@ -2,37 +2,17 @@ + diff --git a/src/browser/tests/support/history.html b/src/browser/tests/support/history.html new file mode 100644 index 00000000..d3356de3 --- /dev/null +++ b/src/browser/tests/support/history.html @@ -0,0 +1,33 @@ + + + + diff --git a/src/browser/tests/testing.js b/src/browser/tests/testing.js index 01bb19db..2e33c1d3 100644 --- a/src/browser/tests/testing.js +++ b/src/browser/tests/testing.js @@ -99,8 +99,7 @@ } } - // our test runner sets this to true - const IS_TEST_RUNNER = window._lightpanda_skip_auto_assert === true; + const IS_TEST_RUNNER = window.navigator.userAgent.startsWith("Lightpanda/"); window.testing = { fail: fail, @@ -118,7 +117,7 @@ BASE_URL: 'http://127.0.0.1:9582/src/browser/tests/', }; - if (window.navigator.userAgent.startsWith("Lightpanda/") == false) { + if (IS_TEST_RUNNER === false) { // The page is running in a different browser. Probably a developer making sure // a test is correct. There are a few tweaks we need to do to make this a // seemless, namely around adapting paths/urls.