diff --git a/.github/actions/install/action.yml b/.github/actions/install/action.yml
index 4c0a28f9..0c4069fb 100644
--- a/.github/actions/install/action.yml
+++ b/.github/actions/install/action.yml
@@ -13,7 +13,7 @@ inputs:
zig-v8:
description: 'zig v8 version to install'
required: false
- default: 'v0.3.3'
+ default: 'v0.3.4'
v8:
description: 'v8 version to install'
required: false
@@ -46,7 +46,7 @@ runs:
- name: Cache v8
id: cache-v8
- uses: actions/cache@v4
+ uses: actions/cache@v5
env:
cache-name: cache-v8
with:
diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml
index d2bcde3b..7cb213a9 100644
--- a/.github/workflows/build.yml
+++ b/.github/workflows/build.yml
@@ -72,7 +72,7 @@ jobs:
timeout-minutes: 20
steps:
- - uses: actions/checkout@v4
+ - uses: actions/checkout@v6
with:
fetch-depth: 0
# fetch submodules recusively, to get zig-js-runtime submodules also.
@@ -116,7 +116,7 @@ jobs:
timeout-minutes: 20
steps:
- - uses: actions/checkout@v4
+ - uses: actions/checkout@v6
with:
fetch-depth: 0
# fetch submodules recusively, to get zig-js-runtime submodules also.
@@ -158,7 +158,7 @@ jobs:
timeout-minutes: 20
steps:
- - uses: actions/checkout@v4
+ - uses: actions/checkout@v6
with:
fetch-depth: 0
# fetch submodules recusively, to get zig-js-runtime submodules also.
diff --git a/.github/workflows/e2e-integration-test.yml b/.github/workflows/e2e-integration-test.yml
index 1a0217bb..dff1fb59 100644
--- a/.github/workflows/e2e-integration-test.yml
+++ b/.github/workflows/e2e-integration-test.yml
@@ -20,7 +20,7 @@ jobs:
if: github.event.pull_request.draft == false
steps:
- - uses: actions/checkout@v4
+ - uses: actions/checkout@v6
with:
fetch-depth: 0
# fetch submodules recusively, to get zig-js-runtime submodules also.
@@ -32,7 +32,7 @@ jobs:
run: zig build -Dprebuilt_v8_path=v8/libc_v8.a -Doptimize=ReleaseFast -Dcpu=x86_64 -Dgit_commit=$(git rev-parse --short ${{ github.sha }})
- name: upload artifact
- uses: actions/upload-artifact@v4
+ uses: actions/upload-artifact@v7
with:
name: lightpanda-build-release
path: |
@@ -47,7 +47,7 @@ jobs:
timeout-minutes: 15
steps:
- - uses: actions/checkout@v4
+ - uses: actions/checkout@v6
with:
repository: 'lightpanda-io/demo'
fetch-depth: 0
@@ -55,7 +55,7 @@ jobs:
- run: npm install
- name: download artifact
- uses: actions/download-artifact@v4
+ uses: actions/download-artifact@v8
with:
name: lightpanda-build-release
diff --git a/.github/workflows/e2e-test.yml b/.github/workflows/e2e-test.yml
index 675dd36b..4b31e193 100644
--- a/.github/workflows/e2e-test.yml
+++ b/.github/workflows/e2e-test.yml
@@ -61,7 +61,7 @@ jobs:
run: zig build -Dprebuilt_v8_path=v8/libc_v8.a -Doptimize=ReleaseFast -Dcpu=x86_64 -Dgit_commit=$(git rev-parse --short ${{ github.sha }})
- name: upload artifact
- uses: actions/upload-artifact@v4
+ uses: actions/upload-artifact@v7
with:
name: lightpanda-build-release
path: |
@@ -76,7 +76,7 @@ jobs:
timeout-minutes: 15
steps:
- - uses: actions/checkout@v4
+ - uses: actions/checkout@v6
with:
repository: 'lightpanda-io/demo'
fetch-depth: 0
@@ -84,7 +84,7 @@ jobs:
- run: npm install
- name: download artifact
- uses: actions/download-artifact@v4
+ uses: actions/download-artifact@v8
with:
name: lightpanda-build-release
@@ -126,7 +126,7 @@ jobs:
timeout-minutes: 15
steps:
- - uses: actions/checkout@v4
+ - uses: actions/checkout@v6
with:
repository: 'lightpanda-io/demo'
fetch-depth: 0
@@ -134,7 +134,7 @@ jobs:
- run: npm install
- name: download artifact
- uses: actions/download-artifact@v4
+ uses: actions/download-artifact@v8
with:
name: lightpanda-build-release
@@ -189,32 +189,35 @@ jobs:
timeout-minutes: 5
steps:
- - uses: actions/checkout@v4
+ - uses: actions/checkout@v6
with:
repository: 'lightpanda-io/demo'
fetch-depth: 0
- - run: echo "${{ secrets.WBA_PRIVATE_KEY_PEM }}" > private_key.pem
-
- name: download artifact
- uses: actions/download-artifact@v4
+ uses: actions/download-artifact@v8
with:
name: lightpanda-build-release
- run: chmod a+x ./lightpanda
- name: run wba test
+ shell: bash
run: |
+
node webbotauth/validator.js &
VALIDATOR_PID=$!
sleep 2
- ./lightpanda fetch http://127.0.0.1:8989/ \
- --web_bot_auth_key_file private_key.pem \
+ exec 3<<< "${{ secrets.WBA_PRIVATE_KEY_PEM }}"
+
+ ./lightpanda fetch --dump http://127.0.0.1:8989/ \
+ --web_bot_auth_key_file /proc/self/fd/3 \
--web_bot_auth_keyid ${{ vars.WBA_KEY_ID }} \
--web_bot_auth_domain ${{ vars.WBA_DOMAIN }}
wait $VALIDATOR_PID
+ exec 3>&-
cdp-and-hyperfine-bench:
name: cdp-and-hyperfine-bench
@@ -239,7 +242,7 @@ jobs:
timeout-minutes: 15
steps:
- - uses: actions/checkout@v4
+ - uses: actions/checkout@v6
with:
repository: 'lightpanda-io/demo'
fetch-depth: 0
@@ -247,7 +250,7 @@ jobs:
- run: npm install
- name: download artifact
- uses: actions/download-artifact@v4
+ uses: actions/download-artifact@v8
with:
name: lightpanda-build-release
@@ -333,7 +336,7 @@ jobs:
echo "${{github.sha}}" > commit.txt
- name: upload artifact
- uses: actions/upload-artifact@v4
+ uses: actions/upload-artifact@v7
with:
name: bench-results
path: |
@@ -361,7 +364,7 @@ jobs:
steps:
- name: download artifact
- uses: actions/download-artifact@v4
+ uses: actions/download-artifact@v8
with:
name: bench-results
@@ -379,7 +382,7 @@ jobs:
steps:
- name: download artifact
- uses: actions/download-artifact@v4
+ uses: actions/download-artifact@v8
with:
name: lightpanda-build-release
diff --git a/.github/workflows/wpt.yml b/.github/workflows/wpt.yml
index 71d485d0..ef887612 100644
--- a/.github/workflows/wpt.yml
+++ b/.github/workflows/wpt.yml
@@ -35,7 +35,7 @@ jobs:
run: zig build -Dprebuilt_v8_path=v8/libc_v8.a -Doptimize=ReleaseFast -Dcpu=x86_64 -Dgit_commit=$(git rev-parse --short ${{ github.sha }})
- name: upload artifact
- uses: actions/upload-artifact@v4
+ uses: actions/upload-artifact@v7
with:
name: lightpanda-build-release
path: |
@@ -59,7 +59,7 @@ jobs:
CGO_ENABLED=0 go build
- name: upload artifact
- uses: actions/upload-artifact@v4
+ uses: actions/upload-artifact@v7
with:
name: wptrunner
path: |
@@ -91,14 +91,14 @@ jobs:
run: ./wpt manifest
- name: download lightpanda release
- uses: actions/download-artifact@v4
+ uses: actions/download-artifact@v8
with:
name: lightpanda-build-release
- run: chmod a+x ./lightpanda
- name: download wptrunner
- uses: actions/download-artifact@v4
+ uses: actions/download-artifact@v8
with:
name: wptrunner
@@ -116,7 +116,7 @@ jobs:
echo "${{github.sha}}" > commit.txt
- name: upload artifact
- uses: actions/upload-artifact@v4
+ uses: actions/upload-artifact@v7
with:
name: wpt-results
path: |
@@ -139,7 +139,7 @@ jobs:
steps:
- name: download artifact
- uses: actions/download-artifact@v4
+ uses: actions/download-artifact@v8
with:
name: wpt-results
diff --git a/.github/workflows/zig-test.yml b/.github/workflows/zig-test.yml
index db2f362d..ca967c3a 100644
--- a/.github/workflows/zig-test.yml
+++ b/.github/workflows/zig-test.yml
@@ -44,7 +44,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- - uses: actions/checkout@v4
+ - uses: actions/checkout@v6
with:
fetch-depth: 0
# fetch submodules recusively, to get zig-js-runtime submodules also.
@@ -67,7 +67,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- - uses: actions/checkout@v4
+ - uses: actions/checkout@v6
with:
fetch-depth: 0
# fetch submodules recusively, to get zig-js-runtime submodules also.
@@ -83,7 +83,7 @@ jobs:
echo "${{github.sha}}" > commit.txt
- name: upload artifact
- uses: actions/upload-artifact@v4
+ uses: actions/upload-artifact@v7
with:
name: bench-results
path: |
@@ -109,7 +109,7 @@ jobs:
steps:
- name: download artifact
- uses: actions/download-artifact@v4
+ uses: actions/download-artifact@v8
with:
name: bench-results
diff --git a/Dockerfile b/Dockerfile
index f106905a..f5cd202d 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -3,7 +3,7 @@ FROM debian:stable-slim
ARG MINISIG=0.12
ARG ZIG_MINISIG=RWSGOq2NVecA2UPNdBUZykf1CCb147pkmdtYxgb3Ti+JO/wCYvhbAb/U
ARG V8=14.0.365.4
-ARG ZIG_V8=v0.3.3
+ARG ZIG_V8=v0.3.4
ARG TARGETPLATFORM
RUN apt-get update -yq && \
diff --git a/build.zig.zon b/build.zig.zon
index 9a28408b..cee52057 100644
--- a/build.zig.zon
+++ b/build.zig.zon
@@ -5,8 +5,8 @@
.minimum_zig_version = "0.15.2",
.dependencies = .{
.v8 = .{
- .url = "https://github.com/lightpanda-io/zig-v8-fork/archive/refs/tags/v0.3.3.tar.gz",
- .hash = "v8-0.0.0-xddH6yx3BAAGD9jSoq_ttt_bk9MectTU44s_HZxxE5LD",
+ .url = "https://github.com/lightpanda-io/zig-v8-fork/archive/refs/tags/v0.3.4.tar.gz",
+ .hash = "v8-0.0.0-xddH6_F3BAAiFvKY6R1H-gkuQlk19BkDQ0--uZuTrSup",
},
// .v8 = .{ .path = "../zig-v8-fork" },
.brotli = .{
diff --git a/src/browser/Browser.zig b/src/browser/Browser.zig
index 8f8c4aa2..50a7c037 100644
--- a/src/browser/Browser.zig
+++ b/src/browser/Browser.zig
@@ -91,25 +91,32 @@ pub fn runMicrotasks(self: *Browser) void {
self.env.runMicrotasks();
}
-pub fn runMacrotasks(self: *Browser) !?u64 {
+pub fn runMacrotasks(self: *Browser) !void {
const env = &self.env;
- const time_to_next = try self.env.runMacrotasks();
+ try self.env.runMacrotasks();
env.pumpMessageLoop();
// either of the above could have queued more microtasks
env.runMicrotasks();
-
- return time_to_next;
}
pub fn hasBackgroundTasks(self: *Browser) bool {
return self.env.hasBackgroundTasks();
}
+
pub fn waitForBackgroundTasks(self: *Browser) void {
self.env.waitForBackgroundTasks();
}
+pub fn msToNextMacrotask(self: *Browser) ?u64 {
+ return self.env.msToNextMacrotask();
+}
+
+pub fn msTo(self: *Browser) bool {
+ return self.env.hasBackgroundTasks();
+}
+
pub fn runIdleTasks(self: *const Browser) void {
self.env.runIdleTasks();
}
diff --git a/src/browser/HttpClient.zig b/src/browser/HttpClient.zig
index 1e74c046..136b578b 100644
--- a/src/browser/HttpClient.zig
+++ b/src/browser/HttpClient.zig
@@ -110,6 +110,8 @@ use_proxy: bool,
// Current TLS verification state, applied per-connection in makeRequest.
tls_verify: bool = true,
+obey_robots: bool,
+
cdp_client: ?CDPClient = null,
// libcurl can monitor arbitrary sockets, this lets us use libcurl to poll
@@ -154,6 +156,7 @@ pub fn init(allocator: Allocator, network: *Network) !*Client {
.http_proxy = http_proxy,
.use_proxy = http_proxy != null,
.tls_verify = network.config.tlsVerifyHost(),
+ .obey_robots = network.config.obeyRobots(),
.transfer_pool = transfer_pool,
};
@@ -257,34 +260,33 @@ pub fn tick(self: *Client, timeout_ms: u32) !PerformStatus {
}
pub fn request(self: *Client, req: Request) !void {
- if (self.network.config.obeyRobots()) {
- const robots_url = try URL.getRobotsUrl(self.allocator, req.url);
- errdefer self.allocator.free(robots_url);
-
- // If we have this robots cached, we can take a fast path.
- if (self.network.robot_store.get(robots_url)) |robot_entry| {
- defer self.allocator.free(robots_url);
-
- switch (robot_entry) {
- // If we have a found robots entry, we check it.
- .present => |robots| {
- const path = URL.getPathname(req.url);
- if (!robots.isAllowed(path)) {
- req.error_callback(req.ctx, error.RobotsBlocked);
- return;
- }
- },
- // Otherwise, we assume we won't find it again.
- .absent => {},
- }
-
- return self.processRequest(req);
- }
-
- return self.fetchRobotsThenProcessRequest(robots_url, req);
+ if (self.obey_robots == false) {
+ return self.processRequest(req);
}
- return self.processRequest(req);
+ const robots_url = try URL.getRobotsUrl(self.allocator, req.url);
+ errdefer self.allocator.free(robots_url);
+
+ // If we have this robots cached, we can take a fast path.
+ if (self.network.robot_store.get(robots_url)) |robot_entry| {
+ defer self.allocator.free(robots_url);
+
+ switch (robot_entry) {
+ // If we have a found robots entry, we check it.
+ .present => |robots| {
+ const path = URL.getPathname(req.url);
+ if (!robots.isAllowed(path)) {
+ req.error_callback(req.ctx, error.RobotsBlocked);
+ return;
+ }
+ },
+ // Otherwise, we assume we won't find it again.
+ .absent => {},
+ }
+
+ return self.processRequest(req);
+ }
+ return self.fetchRobotsThenProcessRequest(robots_url, req);
}
fn processRequest(self: *Client, req: Request) !void {
diff --git a/src/browser/Mime.zig b/src/browser/Mime.zig
index 43ca3632..e23d48a2 100644
--- a/src/browser/Mime.zig
+++ b/src/browser/Mime.zig
@@ -25,6 +25,7 @@ params: []const u8 = "",
// We keep 41 for null-termination since HTML parser expects in this format.
charset: [41]u8 = default_charset,
charset_len: usize = default_charset_len,
+is_default_charset: bool = true,
/// String "UTF-8" continued by null characters.
const default_charset = .{ 'U', 'T', 'F', '-', '8' } ++ .{0} ** 36;
@@ -130,6 +131,7 @@ pub fn parse(input: []u8) !Mime {
var charset: [41]u8 = default_charset;
var charset_len: usize = default_charset_len;
+ var has_explicit_charset = false;
var it = std.mem.splitScalar(u8, params, ';');
while (it.next()) |attr| {
@@ -156,6 +158,7 @@ pub fn parse(input: []u8) !Mime {
// Null-terminate right after attribute value.
charset[attribute_value.len] = 0;
charset_len = attribute_value.len;
+ has_explicit_charset = true;
},
}
}
@@ -165,9 +168,137 @@ pub fn parse(input: []u8) !Mime {
.charset = charset,
.charset_len = charset_len,
.content_type = content_type,
+ .is_default_charset = !has_explicit_charset,
};
}
+/// Prescan the first 1024 bytes of an HTML document for a charset declaration.
+/// Looks for `` and ``.
+/// Returns the charset value or null if none found.
+/// See: https://www.w3.org/International/questions/qa-html-encoding-declarations
+pub fn prescanCharset(html: []const u8) ?[]const u8 {
+ const limit = @min(html.len, 1024);
+ const data = html[0..limit];
+
+ // Scan for = data.len) return null;
+
+ // Check for "meta" (case-insensitive)
+ if (pos + 4 >= data.len) return null;
+ var tag_buf: [4]u8 = undefined;
+ _ = std.ascii.lowerString(&tag_buf, data[pos..][0..4]);
+ if (!std.mem.eql(u8, &tag_buf, "meta")) {
+ continue;
+ }
+ pos += 4;
+
+ // Must be followed by whitespace or end of tag
+ if (pos >= data.len) return null;
+ if (data[pos] != ' ' and data[pos] != '\t' and data[pos] != '\n' and
+ data[pos] != '\r' and data[pos] != '/')
+ {
+ continue;
+ }
+
+ // Scan attributes within this meta tag
+ const tag_end = std.mem.indexOfScalarPos(u8, data, pos, '>') orelse return null;
+ const attrs = data[pos..tag_end];
+
+ // Look for charset= attribute directly
+ if (findAttrValue(attrs, "charset")) |charset| {
+ if (charset.len > 0 and charset.len <= 40) return charset;
+ }
+
+ // Look for http-equiv="content-type" with content="...;charset=X"
+ if (findAttrValue(attrs, "http-equiv")) |he| {
+ if (std.ascii.eqlIgnoreCase(he, "content-type")) {
+ if (findAttrValue(attrs, "content")) |content| {
+ if (extractCharsetFromContentType(content)) |charset| {
+ return charset;
+ }
+ }
+ }
+ }
+
+ pos = tag_end + 1;
+ }
+ return null;
+}
+
+fn findAttrValue(attrs: []const u8, name: []const u8) ?[]const u8 {
+ var pos: usize = 0;
+ while (pos < attrs.len) {
+ // Skip whitespace
+ while (pos < attrs.len and (attrs[pos] == ' ' or attrs[pos] == '\t' or
+ attrs[pos] == '\n' or attrs[pos] == '\r'))
+ {
+ pos += 1;
+ }
+ if (pos >= attrs.len) return null;
+
+ // Read attribute name
+ const attr_start = pos;
+ while (pos < attrs.len and attrs[pos] != '=' and attrs[pos] != ' ' and
+ attrs[pos] != '\t' and attrs[pos] != '>' and attrs[pos] != '/')
+ {
+ pos += 1;
+ }
+ const attr_name = attrs[attr_start..pos];
+
+ // Skip whitespace around =
+ while (pos < attrs.len and (attrs[pos] == ' ' or attrs[pos] == '\t')) pos += 1;
+ if (pos >= attrs.len or attrs[pos] != '=') {
+ // No '=' found - skip this token. Advance at least one byte to avoid infinite loop.
+ if (pos == attr_start) pos += 1;
+ continue;
+ }
+ pos += 1; // skip '='
+ while (pos < attrs.len and (attrs[pos] == ' ' or attrs[pos] == '\t')) pos += 1;
+ if (pos >= attrs.len) return null;
+
+ // Read attribute value
+ const value = blk: {
+ if (attrs[pos] == '"' or attrs[pos] == '\'') {
+ const quote = attrs[pos];
+ pos += 1;
+ const val_start = pos;
+ while (pos < attrs.len and attrs[pos] != quote) pos += 1;
+ const val = attrs[val_start..pos];
+ if (pos < attrs.len) pos += 1; // skip closing quote
+ break :blk val;
+ } else {
+ const val_start = pos;
+ while (pos < attrs.len and attrs[pos] != ' ' and attrs[pos] != '\t' and
+ attrs[pos] != '>' and attrs[pos] != '/')
+ {
+ pos += 1;
+ }
+ break :blk attrs[val_start..pos];
+ }
+ };
+
+ if (std.ascii.eqlIgnoreCase(attr_name, name)) return value;
+ }
+ return null;
+}
+
+fn extractCharsetFromContentType(content: []const u8) ?[]const u8 {
+ var it = std.mem.splitScalar(u8, content, ';');
+ while (it.next()) |part| {
+ const trimmed = std.mem.trimLeft(u8, part, &.{ ' ', '\t' });
+ if (trimmed.len > 8 and std.ascii.eqlIgnoreCase(trimmed[0..8], "charset=")) {
+ const val = std.mem.trim(u8, trimmed[8..], &.{ ' ', '\t', '"', '\'' });
+ if (val.len > 0 and val.len <= 40) return val;
+ }
+ }
+ return null;
+}
+
pub fn sniff(body: []const u8) ?Mime {
// 0x0C is form feed
const content = std.mem.trimLeft(u8, body, &.{ ' ', '\t', '\n', '\r', 0x0C });
@@ -178,15 +309,30 @@ pub fn sniff(body: []const u8) ?Mime {
if (content[0] != '<') {
if (std.mem.startsWith(u8, content, &.{ 0xEF, 0xBB, 0xBF })) {
// UTF-8 BOM
- return .{ .content_type = .{ .text_plain = {} } };
+ return .{
+ .content_type = .{ .text_plain = {} },
+ .charset = default_charset,
+ .charset_len = default_charset_len,
+ .is_default_charset = false,
+ };
}
if (std.mem.startsWith(u8, content, &.{ 0xFE, 0xFF })) {
// UTF-16 big-endian BOM
- return .{ .content_type = .{ .text_plain = {} } };
+ return .{
+ .content_type = .{ .text_plain = {} },
+ .charset = .{ 'U', 'T', 'F', '-', '1', '6', 'B', 'E' } ++ .{0} ** 33,
+ .charset_len = 8,
+ .is_default_charset = false,
+ };
}
if (std.mem.startsWith(u8, content, &.{ 0xFF, 0xFE })) {
// UTF-16 little-endian BOM
- return .{ .content_type = .{ .text_plain = {} } };
+ return .{
+ .content_type = .{ .text_plain = {} },
+ .charset = .{ 'U', 'T', 'F', '-', '1', '6', 'L', 'E' } ++ .{0} ** 33,
+ .charset_len = 8,
+ .is_default_charset = false,
+ };
}
return null;
}
@@ -540,6 +686,24 @@ test "Mime: sniff" {
try expectHTML("");
try expectHTML(" \n\t ");
+
+ {
+ const mime = Mime.sniff(&.{ 0xEF, 0xBB, 0xBF }).?;
+ try testing.expectEqual(.text_plain, std.meta.activeTag(mime.content_type));
+ try testing.expectEqual("UTF-8", mime.charsetString());
+ }
+
+ {
+ const mime = Mime.sniff(&.{ 0xFE, 0xFF }).?;
+ try testing.expectEqual(.text_plain, std.meta.activeTag(mime.content_type));
+ try testing.expectEqual("UTF-16BE", mime.charsetString());
+ }
+
+ {
+ const mime = Mime.sniff(&.{ 0xFF, 0xFE }).?;
+ try testing.expectEqual(.text_plain, std.meta.activeTag(mime.content_type));
+ try testing.expectEqual("UTF-16LE", mime.charsetString());
+ }
}
const Expectation = struct {
@@ -576,3 +740,35 @@ fn expect(expected: Expectation, input: []const u8) !void {
try testing.expectEqual(m.charsetStringZ(), actual.charsetStringZ());
}
}
+
+test "Mime: prescanCharset" {
+ //
+ try testing.expectEqual("utf-8", Mime.prescanCharset("
").?);
+ try testing.expectEqual("iso-8859-1", Mime.prescanCharset("").?);
+ try testing.expectEqual("shift_jis", Mime.prescanCharset("").?);
+
+ // Case-insensitive tag matching
+ try testing.expectEqual("utf-8", Mime.prescanCharset("").?);
+ try testing.expectEqual("utf-8", Mime.prescanCharset("").?);
+
+ //
+ try testing.expectEqual(
+ "iso-8859-1",
+ Mime.prescanCharset("").?,
+ );
+
+ // No charset found
+ try testing.expectEqual(null, Mime.prescanCharset("Test"));
+ try testing.expectEqual(null, Mime.prescanCharset(""));
+ try testing.expectEqual(null, Mime.prescanCharset("no html here"));
+
+ // Self-closing meta without charset must not loop forever
+ try testing.expectEqual(null, Mime.prescanCharset(""));
+
+ // Charset after 1024 bytes should not be found
+ var long_html: [1100]u8 = undefined;
+ @memset(&long_html, ' ');
+ const suffix = "";
+ @memcpy(long_html[1050 .. 1050 + suffix.len], suffix);
+ try testing.expectEqual(null, Mime.prescanCharset(&long_html));
+}
diff --git a/src/browser/Page.zig b/src/browser/Page.zig
index 0fa5bba3..c46a99cd 100644
--- a/src/browser/Page.zig
+++ b/src/browser/Page.zig
@@ -710,11 +710,14 @@ pub fn scriptsCompletedLoading(self: *Page) void {
}
pub fn iframeCompletedLoading(self: *Page, iframe: *IFrame) void {
- blk: {
- var ls: JS.Local.Scope = undefined;
- self.js.localScope(&ls);
- defer ls.deinit();
+ var ls: JS.Local.Scope = undefined;
+ self.js.localScope(&ls);
+ defer ls.deinit();
+ const entered = self.js.enter(&ls.handle_scope);
+ defer entered.exit();
+
+ blk: {
const event = Event.initTrusted(comptime .wrap("load"), .{}, self) catch |err| {
log.err(.page, "iframe event init", .{ .err = err, .url = iframe._src });
break :blk;
@@ -723,6 +726,7 @@ pub fn iframeCompletedLoading(self: *Page, iframe: *IFrame) void {
log.warn(.js, "iframe onload", .{ .err = err, .url = iframe._src });
};
}
+
self.pendingLoadCompleted();
}
@@ -849,13 +853,25 @@ fn pageDataCallback(transfer: *HttpClient.Transfer, data: []const u8) !void {
if (self._parse_state == .pre) {
// we lazily do this, because we might need the first chunk of data
// to sniff the content type
- const mime: Mime = blk: {
+ var mime: Mime = blk: {
if (transfer.response_header.?.contentType()) |ct| {
break :blk try Mime.parse(ct);
}
break :blk Mime.sniff(data);
} orelse .unknown;
+ // If the HTTP Content-Type header didn't specify a charset and this is HTML,
+ // prescan the first 1024 bytes for a declaration.
+ if (mime.content_type == .text_html and mime.is_default_charset) {
+ if (Mime.prescanCharset(data)) |charset| {
+ if (charset.len <= 40) {
+ @memcpy(mime.charset[0..charset.len], charset);
+ mime.charset[charset.len] = 0;
+ mime.charset_len = charset.len;
+ }
+ }
+ }
+
if (comptime IS_DEBUG) {
log.debug(.page, "navigate first chunk", .{
.content_type = mime.content_type,
diff --git a/src/browser/ScriptManager.zig b/src/browser/ScriptManager.zig
index 2baeef8d..a37493eb 100644
--- a/src/browser/ScriptManager.zig
+++ b/src/browser/ScriptManager.zig
@@ -63,9 +63,6 @@ shutdown: bool = false,
client: *HttpClient,
allocator: Allocator,
-buffer_pool: BufferPool,
-
-script_pool: std.heap.MemoryPool(Script),
// We can download multiple sync modules in parallel, but we want to process
// them in order. We can't use an std.DoublyLinkedList, like the other script types,
@@ -101,18 +98,14 @@ pub fn init(allocator: Allocator, http_client: *HttpClient, page: *Page) ScriptM
.imported_modules = .empty,
.client = http_client,
.static_scripts_done = false,
- .buffer_pool = BufferPool.init(allocator, 5),
.page_notified_of_completion = false,
- .script_pool = std.heap.MemoryPool(Script).init(allocator),
};
}
pub fn deinit(self: *ScriptManager) void {
- // necessary to free any buffers scripts may be referencing
+ // necessary to free any arenas scripts may be referencing
self.reset();
- self.buffer_pool.deinit();
- self.script_pool.deinit();
self.imported_modules.deinit(self.allocator);
// we don't deinit self.importmap b/c we use the page's arena for its
// allocations.
@@ -121,7 +114,10 @@ pub fn deinit(self: *ScriptManager) void {
pub fn reset(self: *ScriptManager) void {
var it = self.imported_modules.valueIterator();
while (it.next()) |value_ptr| {
- self.buffer_pool.release(value_ptr.buffer);
+ switch (value_ptr.state) {
+ .done => |script| script.deinit(),
+ else => {},
+ }
}
self.imported_modules.clearRetainingCapacity();
@@ -138,13 +134,13 @@ pub fn reset(self: *ScriptManager) void {
fn clearList(list: *std.DoublyLinkedList) void {
while (list.popFirst()) |n| {
const script: *Script = @fieldParentPtr("node", n);
- script.deinit(true);
+ script.deinit();
}
}
-pub fn getHeaders(self: *ScriptManager, url: [:0]const u8) !net_http.Headers {
+fn getHeaders(self: *ScriptManager, arena: Allocator, url: [:0]const u8) !net_http.Headers {
var headers = try self.client.newHeaders();
- try self.page.headersForRequest(self.page.arena, url, &headers);
+ try self.page.headersForRequest(arena, url, &headers);
return headers;
}
@@ -191,19 +187,26 @@ pub fn addFromElement(self: *ScriptManager, comptime from_parser: bool, script_e
return;
};
+ var handover = false;
const page = self.page;
+
+ const arena = try page.getArena(.{ .debug = "addFromElement" });
+ errdefer if (!handover) {
+ page.releaseArena(arena);
+ };
+
var source: Script.Source = undefined;
var remote_url: ?[:0]const u8 = null;
const base_url = page.base();
if (element.getAttributeSafe(comptime .wrap("src"))) |src| {
- if (try parseDataURI(page.arena, src)) |data_uri| {
+ if (try parseDataURI(arena, src)) |data_uri| {
source = .{ .@"inline" = data_uri };
} else {
- remote_url = try URL.resolve(page.arena, base_url, src, .{});
+ remote_url = try URL.resolve(arena, base_url, src, .{});
source = .{ .remote = .{} };
}
} else {
- var buf = std.Io.Writer.Allocating.init(page.arena);
+ var buf = std.Io.Writer.Allocating.init(arena);
try element.asNode().getChildTextContent(&buf.writer);
try buf.writer.writeByte(0);
const data = buf.written();
@@ -218,15 +221,13 @@ pub fn addFromElement(self: *ScriptManager, comptime from_parser: bool, script_e
// Only set _executed (already-started) when we actually have content to execute
script_element._executed = true;
-
- const script = try self.script_pool.create();
- errdefer self.script_pool.destroy(script);
-
const is_inline = source == .@"inline";
+ const script = try arena.create(Script);
script.* = .{
.kind = kind,
.node = .{},
+ .arena = arena,
.manager = self,
.source = source,
.script_element = script_element,
@@ -270,7 +271,7 @@ pub fn addFromElement(self: *ScriptManager, comptime from_parser: bool, script_e
if (is_blocking == false) {
self.scriptList(script).remove(&script.node);
}
- script.deinit(true);
+ script.deinit();
}
try self.client.request(.{
@@ -278,7 +279,7 @@ pub fn addFromElement(self: *ScriptManager, comptime from_parser: bool, script_e
.ctx = script,
.method = .GET,
.frame_id = page._frame_id,
- .headers = try self.getHeaders(url),
+ .headers = try self.getHeaders(arena, url),
.blocking = is_blocking,
.cookie_jar = &page._session.cookie_jar,
.resource_type = .script,
@@ -289,6 +290,7 @@ pub fn addFromElement(self: *ScriptManager, comptime from_parser: bool, script_e
.done_callback = Script.doneCallback,
.error_callback = Script.errorCallback,
});
+ handover = true;
if (comptime IS_DEBUG) {
var ls: js.Local.Scope = undefined;
@@ -318,7 +320,7 @@ pub fn addFromElement(self: *ScriptManager, comptime from_parser: bool, script_e
}
if (script.status == 0) {
// an error (that we already logged)
- script.deinit(true);
+ script.deinit();
return;
}
@@ -327,7 +329,7 @@ pub fn addFromElement(self: *ScriptManager, comptime from_parser: bool, script_e
self.is_evaluating = true;
defer {
self.is_evaluating = was_evaluating;
- script.deinit(true);
+ script.deinit();
}
return script.eval(page);
}
@@ -359,11 +361,14 @@ pub fn preloadImport(self: *ScriptManager, url: [:0]const u8, referrer: []const
}
errdefer _ = self.imported_modules.remove(url);
- const script = try self.script_pool.create();
- errdefer self.script_pool.destroy(script);
+ const page = self.page;
+ const arena = try page.getArena(.{ .debug = "preloadImport" });
+ errdefer page.releaseArena(arena);
+ const script = try arena.create(Script);
script.* = .{
.kind = .module,
+ .arena = arena,
.url = url,
.node = .{},
.manager = self,
@@ -373,11 +378,7 @@ pub fn preloadImport(self: *ScriptManager, url: [:0]const u8, referrer: []const
.mode = .import,
};
- gop.value_ptr.* = ImportedModule{
- .manager = self,
- };
-
- const page = self.page;
+ gop.value_ptr.* = ImportedModule{};
if (comptime IS_DEBUG) {
var ls: js.Local.Scope = undefined;
@@ -392,12 +393,18 @@ pub fn preloadImport(self: *ScriptManager, url: [:0]const u8, referrer: []const
});
}
- try self.client.request(.{
+ // This seems wrong since we're not dealing with an async import (unlike
+ // getAsyncModule below), but all we're trying to do here is pre-load the
+ // script for execution at some point in the future (when waitForImport is
+ // called).
+ self.async_scripts.append(&script.node);
+
+ self.client.request(.{
.url = url,
.ctx = script,
.method = .GET,
.frame_id = page._frame_id,
- .headers = try self.getHeaders(url),
+ .headers = try self.getHeaders(arena, url),
.cookie_jar = &page._session.cookie_jar,
.resource_type = .script,
.notification = page._session.notification,
@@ -406,13 +413,10 @@ pub fn preloadImport(self: *ScriptManager, url: [:0]const u8, referrer: []const
.data_callback = Script.dataCallback,
.done_callback = Script.doneCallback,
.error_callback = Script.errorCallback,
- });
-
- // This seems wrong since we're not dealing with an async import (unlike
- // getAsyncModule below), but all we're trying to do here is pre-load the
- // script for execution at some point in the future (when waitForImport is
- // called).
- self.async_scripts.append(&script.node);
+ }) catch |err| {
+ self.async_scripts.remove(&script.node);
+ return err;
+ };
}
pub fn waitForImport(self: *ScriptManager, url: [:0]const u8) !ModuleSource {
@@ -433,12 +437,12 @@ pub fn waitForImport(self: *ScriptManager, url: [:0]const u8) !ModuleSource {
_ = try client.tick(200);
continue;
},
- .done => {
+ .done => |script| {
var shared = false;
const buffer = entry.value_ptr.buffer;
const waiters = entry.value_ptr.waiters;
- if (waiters == 0) {
+ if (waiters == 1) {
self.imported_modules.removeByPtr(entry.key_ptr);
} else {
shared = true;
@@ -447,7 +451,7 @@ pub fn waitForImport(self: *ScriptManager, url: [:0]const u8) !ModuleSource {
return .{
.buffer = buffer,
.shared = shared,
- .buffer_pool = &self.buffer_pool,
+ .script = script,
};
},
.err => return error.Failed,
@@ -456,11 +460,14 @@ pub fn waitForImport(self: *ScriptManager, url: [:0]const u8) !ModuleSource {
}
pub fn getAsyncImport(self: *ScriptManager, url: [:0]const u8, cb: ImportAsync.Callback, cb_data: *anyopaque, referrer: []const u8) !void {
- const script = try self.script_pool.create();
- errdefer self.script_pool.destroy(script);
+ const page = self.page;
+ const arena = try page.getArena(.{ .debug = "getAsyncImport" });
+ errdefer page.releaseArena(arena);
+ const script = try arena.create(Script);
script.* = .{
.kind = .module,
+ .arena = arena,
.url = url,
.node = .{},
.manager = self,
@@ -473,7 +480,6 @@ pub fn getAsyncImport(self: *ScriptManager, url: [:0]const u8, cb: ImportAsync.C
} },
};
- const page = self.page;
if (comptime IS_DEBUG) {
var ls: js.Local.Scope = undefined;
page.js.localScope(&ls);
@@ -496,11 +502,12 @@ pub fn getAsyncImport(self: *ScriptManager, url: [:0]const u8, cb: ImportAsync.C
self.is_evaluating = true;
defer self.is_evaluating = was_evaluating;
- try self.client.request(.{
+ self.async_scripts.append(&script.node);
+ self.client.request(.{
.url = url,
.method = .GET,
.frame_id = page._frame_id,
- .headers = try self.getHeaders(url),
+ .headers = try self.getHeaders(arena, url),
.ctx = script,
.resource_type = .script,
.cookie_jar = &page._session.cookie_jar,
@@ -510,9 +517,10 @@ pub fn getAsyncImport(self: *ScriptManager, url: [:0]const u8, cb: ImportAsync.C
.data_callback = Script.dataCallback,
.done_callback = Script.doneCallback,
.error_callback = Script.errorCallback,
- });
-
- self.async_scripts.append(&script.node);
+ }) catch |err| {
+ self.async_scripts.remove(&script.node);
+ return err;
+ };
}
// Called from the Page to let us know it's done parsing the HTML. Necessary that
@@ -537,18 +545,18 @@ fn evaluate(self: *ScriptManager) void {
var script: *Script = @fieldParentPtr("node", n);
switch (script.mode) {
.async => {
- defer script.deinit(true);
+ defer script.deinit();
script.eval(page);
},
.import_async => |ia| {
- defer script.deinit(false);
if (script.status < 200 or script.status > 299) {
+ script.deinit();
ia.callback(ia.data, error.FailedToLoad);
} else {
ia.callback(ia.data, .{
.shared = false,
+ .script = script,
.buffer = script.source.remote,
- .buffer_pool = &self.buffer_pool,
});
}
},
@@ -574,7 +582,7 @@ fn evaluate(self: *ScriptManager) void {
}
defer {
_ = self.defer_scripts.popFirst();
- script.deinit(true);
+ script.deinit();
}
script.eval(page);
}
@@ -625,11 +633,12 @@ fn parseImportmap(self: *ScriptManager, script: *const Script) !void {
}
pub const Script = struct {
- complete: bool,
kind: Kind,
+ complete: bool,
status: u16 = 0,
source: Source,
url: []const u8,
+ arena: Allocator,
mode: ExecutionMode,
node: std.DoublyLinkedList.Node,
script_element: ?*Element.Html.Script,
@@ -680,11 +689,8 @@ pub const Script = struct {
import_async: ImportAsync,
};
- fn deinit(self: *Script, comptime release_buffer: bool) void {
- if ((comptime release_buffer) and self.source == .remote) {
- self.manager.buffer_pool.release(self.source.remote);
- }
- self.manager.script_pool.destroy(self);
+ fn deinit(self: *Script) void {
+ self.manager.page.releaseArena(self.arena);
}
fn startCallback(transfer: *HttpClient.Transfer) !void {
@@ -750,9 +756,9 @@ pub const Script = struct {
}
lp.assert(self.source.remote.capacity == 0, "ScriptManager.Header buffer", .{ .capacity = self.source.remote.capacity });
- var buffer = self.manager.buffer_pool.get();
+ var buffer: std.ArrayList(u8) = .empty;
if (transfer.getContentLength()) |cl| {
- try buffer.ensureTotalCapacity(self.manager.allocator, cl);
+ try buffer.ensureTotalCapacity(self.arena, cl);
}
self.source = .{ .remote = buffer };
return true;
@@ -766,7 +772,7 @@ pub const Script = struct {
};
}
fn _dataCallback(self: *Script, _: *HttpClient.Transfer, data: []const u8) !void {
- try self.source.remote.appendSlice(self.manager.allocator, data);
+ try self.source.remote.appendSlice(self.arena, data);
}
fn doneCallback(ctx: *anyopaque) !void {
@@ -783,9 +789,8 @@ pub const Script = struct {
} else if (self.mode == .import) {
manager.async_scripts.remove(&self.node);
const entry = manager.imported_modules.getPtr(self.url).?;
- entry.state = .done;
+ entry.state = .{ .done = self };
entry.buffer = self.source.remote;
- self.deinit(false);
}
manager.evaluate();
}
@@ -811,7 +816,7 @@ pub const Script = struct {
const manager = self.manager;
manager.scriptList(self).remove(&self.node);
if (manager.shutdown) {
- self.deinit(true);
+ self.deinit();
return;
}
@@ -823,7 +828,7 @@ pub const Script = struct {
},
else => {},
}
- self.deinit(true);
+ self.deinit();
manager.evaluate();
}
@@ -951,76 +956,6 @@ pub const Script = struct {
}
};
-const BufferPool = struct {
- count: usize,
- available: List = .{},
- allocator: Allocator,
- max_concurrent_transfers: u8,
- mem_pool: std.heap.MemoryPool(Container),
-
- const List = std.SinglyLinkedList;
-
- const Container = struct {
- node: List.Node,
- buf: std.ArrayList(u8),
- };
-
- fn init(allocator: Allocator, max_concurrent_transfers: u8) BufferPool {
- return .{
- .available = .{},
- .count = 0,
- .allocator = allocator,
- .max_concurrent_transfers = max_concurrent_transfers,
- .mem_pool = std.heap.MemoryPool(Container).init(allocator),
- };
- }
-
- fn deinit(self: *BufferPool) void {
- const allocator = self.allocator;
-
- var node = self.available.first;
- while (node) |n| {
- const container: *Container = @fieldParentPtr("node", n);
- container.buf.deinit(allocator);
- node = n.next;
- }
- self.mem_pool.deinit();
- }
-
- fn get(self: *BufferPool) std.ArrayList(u8) {
- const node = self.available.popFirst() orelse {
- // return a new buffer
- return .{};
- };
-
- self.count -= 1;
- const container: *Container = @fieldParentPtr("node", node);
- defer self.mem_pool.destroy(container);
- return container.buf;
- }
-
- fn release(self: *BufferPool, buffer: ArrayList(u8)) void {
- // create mutable copy
- var b = buffer;
-
- if (self.count == self.max_concurrent_transfers) {
- b.deinit(self.allocator);
- return;
- }
-
- const container = self.mem_pool.create() catch |err| {
- b.deinit(self.allocator);
- log.err(.http, "SM BufferPool release", .{ .err = err });
- return;
- };
-
- b.clearRetainingCapacity();
- container.* = .{ .buf = b, .node = .{} };
- self.count += 1;
- self.available.prepend(&container.node);
- }
-};
-
const ImportAsync = struct {
data: *anyopaque,
callback: ImportAsync.Callback,
@@ -1030,12 +965,12 @@ const ImportAsync = struct {
pub const ModuleSource = struct {
shared: bool,
- buffer_pool: *BufferPool,
+ script: *Script,
buffer: std.ArrayList(u8),
pub fn deinit(self: *ModuleSource) void {
if (self.shared == false) {
- self.buffer_pool.release(self.buffer);
+ self.script.deinit();
}
}
@@ -1045,15 +980,14 @@ pub const ModuleSource = struct {
};
const ImportedModule = struct {
- manager: *ScriptManager,
+ waiters: u16 = 1,
state: State = .loading,
buffer: std.ArrayList(u8) = .{},
- waiters: u16 = 1,
- const State = enum {
+ const State = union(enum) {
err,
- done,
loading,
+ done: *Script,
};
};
diff --git a/src/browser/Session.zig b/src/browser/Session.zig
index 404a8bc4..73b6b26e 100644
--- a/src/browser/Session.zig
+++ b/src/browser/Session.zig
@@ -401,7 +401,7 @@ fn _wait(self: *Session, page: *Page, wait_ms: u32) !WaitResult {
// scheduler.run could trigger new http transfers, so do not
// store http_client.active BEFORE this call and then use
// it AFTER.
- const ms_to_next_task = try browser.runMacrotasks();
+ try browser.runMacrotasks();
// Each call to this runs scheduled load events.
try page.dispatchLoad();
@@ -423,16 +423,16 @@ fn _wait(self: *Session, page: *Page, wait_ms: u32) !WaitResult {
std.debug.assert(http_client.intercepted == 0);
}
- var ms: u64 = ms_to_next_task orelse blk: {
- if (wait_ms - ms_remaining < 100) {
- if (comptime builtin.is_test) {
- return .done;
- }
- // Look, we want to exit ASAP, but we don't want
- // to exit so fast that we've run none of the
- // background jobs.
- break :blk 50;
- }
+ var ms = blk: {
+ // if (wait_ms - ms_remaining < 100) {
+ // if (comptime builtin.is_test) {
+ // return .done;
+ // }
+ // // Look, we want to exit ASAP, but we don't want
+ // // to exit so fast that we've run none of the
+ // // background jobs.
+ // break :blk 50;
+ // }
if (browser.hasBackgroundTasks()) {
// _we_ have nothing to run, but v8 is working on
@@ -441,9 +441,7 @@ fn _wait(self: *Session, page: *Page, wait_ms: u32) !WaitResult {
break :blk 20;
}
- // No http transfers, no cdp extra socket, no
- // scheduled tasks, we're done.
- return .done;
+ break :blk browser.msToNextMacrotask() orelse return .done;
};
if (ms > ms_remaining) {
@@ -470,9 +468,9 @@ fn _wait(self: *Session, page: *Page, wait_ms: u32) !WaitResult {
// We're here because we either have active HTTP
// connections, or exit_when_done == false (aka, there's
// an cdp_socket registered with the http client).
- // We should continue to run lowPriority tasks, so we
- // minimize how long we'll poll for network I/O.
- var ms_to_wait = @min(200, ms_to_next_task orelse 200);
+ // We should continue to run tasks, so we minimize how long
+ // we'll poll for network I/O.
+ var ms_to_wait = @min(200, browser.msToNextMacrotask() orelse 200);
if (ms_to_wait > 10 and browser.hasBackgroundTasks()) {
// if we have background tasks, we don't want to wait too
// long for a message from the client. We want to go back
diff --git a/src/browser/js/Context.zig b/src/browser/js/Context.zig
index 70af9d24..ea1b6f7a 100644
--- a/src/browser/js/Context.zig
+++ b/src/browser/js/Context.zig
@@ -167,12 +167,11 @@ pub fn setOrigin(self: *Context, key: ?[]const u8) !void {
const env = self.env;
const isolate = env.isolate;
+ lp.assert(self.origin.rc == 1, "Ref opaque origin", .{ .rc = self.origin.rc });
+
const origin = try self.session.getOrCreateOrigin(key);
errdefer self.session.releaseOrigin(origin);
-
- try self.origin.transferTo(origin);
- lp.assert(self.origin.rc == 1, "Ref opaque origin", .{ .rc = self.origin.rc });
- self.origin.deinit(env.app);
+ try origin.takeover(self.origin);
self.origin = origin;
@@ -255,6 +254,10 @@ pub fn toLocal(self: *Context, global: anytype) js.Local.ToLocalReturnType(@Type
return l.toLocal(global);
}
+pub fn getIncumbent(self: *Context) *Page {
+ return fromC(v8.v8__Isolate__GetIncumbentContext(self.env.isolate.handle).?).page;
+}
+
pub fn stringToPersistedFunction(
self: *Context,
function_body: []const u8,
@@ -306,15 +309,15 @@ pub fn module(self: *Context, comptime want_result: bool, local: *const js.Local
}
const owned_url = try arena.dupeZ(u8, url);
+ if (cacheable and !gop.found_existing) {
+ gop.key_ptr.* = owned_url;
+ }
const m = try compileModule(local, src, owned_url);
if (cacheable) {
// compileModule is synchronous - nothing can modify the cache during compilation
lp.assert(gop.value_ptr.module == null, "Context.module has module", .{});
gop.value_ptr.module = try m.persist();
- if (!gop.found_existing) {
- gop.key_ptr.* = owned_url;
- }
}
break :blk .{ m, owned_url };
diff --git a/src/browser/js/Env.zig b/src/browser/js/Env.zig
index ba2e3e5a..1ac9e6b3 100644
--- a/src/browser/js/Env.zig
+++ b/src/browser/js/Env.zig
@@ -382,8 +382,7 @@ pub fn runMicrotasks(self: *Env) void {
}
}
-pub fn runMacrotasks(self: *Env) !?u64 {
- var ms_to_next_task: ?u64 = null;
+pub fn runMacrotasks(self: *Env) !void {
for (self.contexts[0..self.context_count]) |ctx| {
if (comptime builtin.is_test == false) {
// I hate this comptime check as much as you do. But we have tests
@@ -398,13 +397,17 @@ pub fn runMacrotasks(self: *Env) !?u64 {
var hs: js.HandleScope = undefined;
const entered = ctx.enter(&hs);
defer entered.exit();
-
- const ms = (try ctx.scheduler.run()) orelse continue;
- if (ms_to_next_task == null or ms < ms_to_next_task.?) {
- ms_to_next_task = ms;
- }
+ try ctx.scheduler.run();
}
- return ms_to_next_task;
+}
+
+pub fn msToNextMacrotask(self: *Env) ?u64 {
+ var next_task: u64 = std.math.maxInt(u64);
+ for (self.contexts[0..self.context_count]) |ctx| {
+ const candidate = ctx.scheduler.msToNextHigh() orelse continue;
+ next_task = @min(candidate, next_task);
+ }
+ return if (next_task == std.math.maxInt(u64)) null else next_task;
}
pub fn pumpMessageLoop(self: *const Env) void {
diff --git a/src/browser/js/Origin.zig b/src/browser/js/Origin.zig
index d7e74e4f..180cfd84 100644
--- a/src/browser/js/Origin.zig
+++ b/src/browser/js/Origin.zig
@@ -68,6 +68,8 @@ temps: std.AutoHashMapUnmanaged(usize, v8.Global) = .empty,
// if v8 hasn't called the finalizer directly itself.
finalizer_callbacks: std.AutoHashMapUnmanaged(usize, *FinalizerCallback) = .empty,
+taken_over: std.ArrayList(*Origin),
+
pub fn init(app: *App, isolate: js.Isolate, key: []const u8) !*Origin {
const arena = try app.arena_pool.acquire();
errdefer app.arena_pool.release(arena);
@@ -86,14 +88,19 @@ pub fn init(app: *App, isolate: js.Isolate, key: []const u8) !*Origin {
.rc = 1,
.arena = arena,
.key = owned_key,
- .globals = .empty,
.temps = .empty,
+ .globals = .empty,
+ .taken_over = .empty,
.security_token = token_global,
};
return self;
}
pub fn deinit(self: *Origin, app: *App) void {
+ for (self.taken_over.items) |o| {
+ o.deinit(app);
+ }
+
// Call finalizers before releasing anything
{
var it = self.finalizer_callbacks.valueIterator();
@@ -196,42 +203,44 @@ pub fn createFinalizerCallback(
return fc;
}
-pub fn transferTo(self: *Origin, dest: *Origin) !void {
- const arena = dest.arena;
+pub fn takeover(self: *Origin, original: *Origin) !void {
+ const arena = self.arena;
- try dest.globals.ensureUnusedCapacity(arena, self.globals.items.len);
- for (self.globals.items) |obj| {
- dest.globals.appendAssumeCapacity(obj);
+ try self.globals.ensureUnusedCapacity(arena, self.globals.items.len);
+ for (original.globals.items) |obj| {
+ self.globals.appendAssumeCapacity(obj);
}
- self.globals.clearRetainingCapacity();
+ original.globals.clearRetainingCapacity();
{
- try dest.temps.ensureUnusedCapacity(arena, self.temps.count());
- var it = self.temps.iterator();
+ try self.temps.ensureUnusedCapacity(arena, original.temps.count());
+ var it = original.temps.iterator();
while (it.next()) |kv| {
- try dest.temps.put(arena, kv.key_ptr.*, kv.value_ptr.*);
+ try self.temps.put(arena, kv.key_ptr.*, kv.value_ptr.*);
}
- self.temps.clearRetainingCapacity();
+ original.temps.clearRetainingCapacity();
}
{
- try dest.finalizer_callbacks.ensureUnusedCapacity(arena, self.finalizer_callbacks.count());
- var it = self.finalizer_callbacks.iterator();
+ try self.finalizer_callbacks.ensureUnusedCapacity(arena, original.finalizer_callbacks.count());
+ var it = original.finalizer_callbacks.iterator();
while (it.next()) |kv| {
- kv.value_ptr.*.origin = dest;
- try dest.finalizer_callbacks.put(arena, kv.key_ptr.*, kv.value_ptr.*);
+ kv.value_ptr.*.origin = self;
+ try self.finalizer_callbacks.put(arena, kv.key_ptr.*, kv.value_ptr.*);
}
- self.finalizer_callbacks.clearRetainingCapacity();
+ original.finalizer_callbacks.clearRetainingCapacity();
}
{
- try dest.identity_map.ensureUnusedCapacity(arena, self.identity_map.count());
- var it = self.identity_map.iterator();
+ try self.identity_map.ensureUnusedCapacity(arena, original.identity_map.count());
+ var it = original.identity_map.iterator();
while (it.next()) |kv| {
- try dest.identity_map.put(arena, kv.key_ptr.*, kv.value_ptr.*);
+ try self.identity_map.put(arena, kv.key_ptr.*, kv.value_ptr.*);
}
- self.identity_map.clearRetainingCapacity();
+ original.identity_map.clearRetainingCapacity();
}
+
+ try self.taken_over.append(self.arena, original);
}
// A type that has a finalizer can have its finalizer called one of two ways.
diff --git a/src/browser/js/Scheduler.zig b/src/browser/js/Scheduler.zig
index e667a872..322351f3 100644
--- a/src/browser/js/Scheduler.zig
+++ b/src/browser/js/Scheduler.zig
@@ -74,9 +74,10 @@ pub fn add(self: *Scheduler, ctx: *anyopaque, cb: Callback, run_in_ms: u32, opts
});
}
-pub fn run(self: *Scheduler) !?u64 {
- _ = try self.runQueue(&self.low_priority);
- return self.runQueue(&self.high_priority);
+pub fn run(self: *Scheduler) !void {
+ const now = milliTimestamp(.monotonic);
+ try self.runQueue(&self.low_priority, now);
+ try self.runQueue(&self.high_priority, now);
}
pub fn hasReadyTasks(self: *Scheduler) bool {
@@ -84,16 +85,23 @@ pub fn hasReadyTasks(self: *Scheduler) bool {
return queueuHasReadyTask(&self.low_priority, now) or queueuHasReadyTask(&self.high_priority, now);
}
-fn runQueue(self: *Scheduler, queue: *Queue) !?u64 {
- if (queue.count() == 0) {
- return null;
- }
-
+pub fn msToNextHigh(self: *Scheduler) ?u64 {
+ const task = self.high_priority.peek() orelse return null;
const now = milliTimestamp(.monotonic);
+ if (task.run_at <= now) {
+ return 0;
+ }
+ return @intCast(task.run_at - now);
+}
+
+fn runQueue(self: *Scheduler, queue: *Queue, now: u64) !void {
+ if (queue.count() == 0) {
+ return;
+ }
while (queue.peek()) |*task_| {
if (task_.run_at > now) {
- return @intCast(task_.run_at - now);
+ return;
}
var task = queue.remove();
if (comptime IS_DEBUG) {
@@ -114,7 +122,7 @@ fn runQueue(self: *Scheduler, queue: *Queue) !?u64 {
try self.low_priority.add(task);
}
}
- return null;
+ return;
}
fn queueuHasReadyTask(queue: *Queue, now: u64) bool {
diff --git a/src/browser/tests/document/query_selector.html b/src/browser/tests/document/query_selector.html
index b333069e..0837999e 100644
--- a/src/browser/tests/document/query_selector.html
+++ b/src/browser/tests/document/query_selector.html
@@ -24,11 +24,10 @@
diff --git a/src/browser/tests/domexception.html b/src/browser/tests/domexception.html
index 1ed43e8d..05bdc837 100644
--- a/src/browser/tests/domexception.html
+++ b/src/browser/tests/domexception.html
@@ -127,7 +127,7 @@
testing.withError((err) => {
testing.expectEqual(3, err.code);
- testing.expectEqual('Hierarchy Error', err.message);
+ testing.expectEqual('HierarchyRequestError', err.name);
testing.expectEqual(true, err instanceof DOMException);
testing.expectEqual(true, err instanceof Error);
}, () => link.appendChild(content));
diff --git a/src/browser/tests/element/attributes.html b/src/browser/tests/element/attributes.html
index 9b8c29d3..0929a3d9 100644
--- a/src/browser/tests/element/attributes.html
+++ b/src/browser/tests/element/attributes.html
@@ -36,7 +36,6 @@
testing.withError((err) => {
testing.expectEqual(8, err.code);
testing.expectEqual("NotFoundError", err.name);
- testing.expectEqual("Not Found", err.message);
}, () => el1.removeAttributeNode(script_id_node));
testing.expectEqual(an1, el1.removeAttributeNode(an1));
diff --git a/src/browser/tests/element/html/anchor.html b/src/browser/tests/element/html/anchor.html
index 0522163f..74bf486c 100644
--- a/src/browser/tests/element/html/anchor.html
+++ b/src/browser/tests/element/html/anchor.html
@@ -12,7 +12,7 @@
testing.expectEqual('', $('#a0').href);
testing.expectEqual(testing.BASE_URL + 'element/anchor1.html', $('#a1').href);
- testing.expectEqual(testing.ORIGIN + 'hello/world/anchor2.html', $('#a2').href);
+ testing.expectEqual(testing.ORIGIN + '/hello/world/anchor2.html', $('#a2').href);
testing.expectEqual('https://www.openmymind.net/Elixirs-With-Statement/', $('#a3').href);
testing.expectEqual(testing.BASE_URL + 'element/html/foo', $('#link').href);
diff --git a/src/browser/tests/element/html/form.html b/src/browser/tests/element/html/form.html
index f62cb221..17743135 100644
--- a/src/browser/tests/element/html/form.html
+++ b/src/browser/tests/element/html/form.html
@@ -32,7 +32,7 @@
testing.expectEqual(testing.BASE_URL + 'element/html/hello', form.action)
form.action = '/hello';
- testing.expectEqual(testing.ORIGIN + 'hello', form.action)
+ testing.expectEqual(testing.ORIGIN + '/hello', form.action)
form.action = 'https://lightpanda.io/hello';
testing.expectEqual('https://lightpanda.io/hello', form.action)
diff --git a/src/browser/tests/element/html/image.html b/src/browser/tests/element/html/image.html
index 92cd947d..baa09918 100644
--- a/src/browser/tests/element/html/image.html
+++ b/src/browser/tests/element/html/image.html
@@ -37,7 +37,7 @@
testing.expectEqual('test.png', img.getAttribute('src'));
img.src = '/absolute/path.png';
- testing.expectEqual(testing.ORIGIN + 'absolute/path.png', img.src);
+ testing.expectEqual(testing.ORIGIN + '/absolute/path.png', img.src);
testing.expectEqual('/absolute/path.png', img.getAttribute('src'));
img.src = 'https://example.com/image.png';
diff --git a/src/browser/tests/element/html/link.html b/src/browser/tests/element/html/link.html
index bed5e6ab..4d967e37 100644
--- a/src/browser/tests/element/html/link.html
+++ b/src/browser/tests/element/html/link.html
@@ -8,7 +8,7 @@
testing.expectEqual('https://lightpanda.io/opensource-browser/15', l2.href);
l2.href = '/over/9000';
- testing.expectEqual(testing.ORIGIN + 'over/9000', l2.href);
+ testing.expectEqual(testing.ORIGIN + '/over/9000', l2.href);
l2.crossOrigin = 'nope';
testing.expectEqual('anonymous', l2.crossOrigin);
diff --git a/src/browser/tests/element/matches.html b/src/browser/tests/element/matches.html
index 5e1721b5..f28d7a71 100644
--- a/src/browser/tests/element/matches.html
+++ b/src/browser/tests/element/matches.html
@@ -66,11 +66,10 @@
{
const container = $('#test-container');
- testing.expectError("SyntaxError: Syntax Error", () => container.matches(''));
+ testing.expectError("SyntaxError", () => container.matches(''));
testing.withError((err) => {
testing.expectEqual(12, err.code);
testing.expectEqual("SyntaxError", err.name);
- testing.expectEqual("Syntax Error", err.message);
}, () => container.matches(''));
}
diff --git a/src/browser/tests/element/query_selector.html b/src/browser/tests/element/query_selector.html
index 9564ca6d..203524b6 100644
--- a/src/browser/tests/element/query_selector.html
+++ b/src/browser/tests/element/query_selector.html
@@ -12,11 +12,10 @@
const p1 = $('#p1');
testing.expectEqual(null, p1.querySelector('#p1'));
- testing.expectError("SyntaxError: Syntax Error", () => p1.querySelector(''));
+ testing.expectError("SyntaxError", () => p1.querySelector(''));
testing.withError((err) => {
testing.expectEqual(12, err.code);
testing.expectEqual("SyntaxError", err.name);
- testing.expectEqual("Syntax Error", err.message);
}, () => p1.querySelector(''));
testing.expectEqual($('#c2'), p1.querySelector('#c2'));
diff --git a/src/browser/tests/element/query_selector_all.html b/src/browser/tests/element/query_selector_all.html
index eeedc876..3b4013c2 100644
--- a/src/browser/tests/element/query_selector_all.html
+++ b/src/browser/tests/element/query_selector_all.html
@@ -24,11 +24,10 @@
diff --git a/src/browser/tests/element/selector_invalid.html b/src/browser/tests/element/selector_invalid.html
index 35409c19..c0d16d59 100644
--- a/src/browser/tests/element/selector_invalid.html
+++ b/src/browser/tests/element/selector_invalid.html
@@ -43,8 +43,8 @@
const container = $('#container');
// Empty selectors
- testing.expectError("SyntaxError: Syntax Error", () => container.querySelector(''));
- testing.expectError("SyntaxError: Syntax Error", () => document.querySelectorAll(''));
+ testing.expectError("SyntaxError", () => container.querySelector(''));
+ testing.expectError("SyntaxError", () => document.querySelectorAll(''));
}
diff --git a/src/browser/tests/frames/post_message.html b/src/browser/tests/frames/post_message.html
new file mode 100644
index 00000000..6d206b74
--- /dev/null
+++ b/src/browser/tests/frames/post_message.html
@@ -0,0 +1,25 @@
+
+
+
+
+
+
diff --git a/src/browser/tests/frames/support/message_receiver.html b/src/browser/tests/frames/support/message_receiver.html
new file mode 100644
index 00000000..55612a7c
--- /dev/null
+++ b/src/browser/tests/frames/support/message_receiver.html
@@ -0,0 +1,9 @@
+
+
diff --git a/src/browser/tests/history.html b/src/browser/tests/history.html
index 1508e232..e2aa0d35 100644
--- a/src/browser/tests/history.html
+++ b/src/browser/tests/history.html
@@ -2,37 +2,17 @@
+
diff --git a/src/browser/tests/node/insert_before.html b/src/browser/tests/node/insert_before.html
index 8be48e56..50dff07c 100644
--- a/src/browser/tests/node/insert_before.html
+++ b/src/browser/tests/node/insert_before.html
@@ -19,7 +19,6 @@
testing.withError((err) => {
testing.expectEqual(8, err.code);
testing.expectEqual("NotFoundError", err.name);
- testing.expectEqual("Not Found", err.message);
}, () => d1.insertBefore(document.createElement('div'), d2));
let c1 = document.createElement('div');
diff --git a/src/browser/tests/node/remove_child.html b/src/browser/tests/node/remove_child.html
index fdf0b813..1118e4cf 100644
--- a/src/browser/tests/node/remove_child.html
+++ b/src/browser/tests/node/remove_child.html
@@ -7,7 +7,6 @@
testing.withError((err) => {
testing.expectEqual(8, err.code);
testing.expectEqual("NotFoundError", err.name);
- testing.expectEqual("Not Found", err.message);
}, () => $('#d1').removeChild($('#p1')));
const p1 = $('#p1');
diff --git a/src/browser/tests/node/replace_child.html b/src/browser/tests/node/replace_child.html
index 45ed1bc5..51b0a173 100644
--- a/src/browser/tests/node/replace_child.html
+++ b/src/browser/tests/node/replace_child.html
@@ -25,7 +25,6 @@
testing.withError((err) => {
testing.expectEqual(3, err.code);
testing.expectEqual("HierarchyRequestError", err.name);
- testing.expectEqual("Hierarchy Error", err.message);
}, () => d1.replaceChild(c4, c3));
testing.expectEqual(c2, d1.replaceChild(c4, c2));
diff --git a/src/browser/tests/range.html b/src/browser/tests/range.html
index d9a8637b..8440c187 100644
--- a/src/browser/tests/range.html
+++ b/src/browser/tests/range.html
@@ -451,12 +451,12 @@
const p1 = $('#p1');
// Test setStart with offset beyond node length
- testing.expectError('IndexSizeError: Index or size is negative or greater than the allowed amount', () => {
+ testing.expectError('IndexSizeError:', () => {
range.setStart(p1, 999);
});
// Test with negative offset (wraps to large u32)
- testing.expectError('IndexSizeError: Index or size is negative or greater than the allowed amount', () => {
+ testing.expectError('IndexSizeError:', () => {
range.setStart(p1.firstChild, -1);
});
}
@@ -468,12 +468,12 @@
const p1 = $('#p1');
// Test setEnd with offset beyond node length
- testing.expectError('IndexSizeError: Index or size is negative or greater than the allowed amount', () => {
+ testing.expectError('IndexSizeError:', () => {
range.setEnd(p1, 999);
});
// Test with text node
- testing.expectError('IndexSizeError: Index or size is negative or greater than the allowed amount', () => {
+ testing.expectError('IndexSizeError:', () => {
range.setEnd(p1.firstChild, 9999);
});
}
@@ -525,11 +525,11 @@
range.setEnd(p1, 1);
// Test comparePoint with invalid offset
- testing.expectError('IndexSizeError: Index or size is negative or greater than the allowed amount', () => {
+ testing.expectError('IndexSizeError:', () => {
range.comparePoint(p1, 20);
});
- testing.expectError('IndexSizeError: Index or size is negative or greater than the allowed amount', () => {
+ testing.expectError('IndexSizeError:', () => {
range.comparePoint(p1.firstChild, -1);
});
}
@@ -650,11 +650,11 @@
range.setEnd(p1, 1);
// Invalid offset should throw IndexSizeError
- testing.expectError('IndexSizeError: Index or size is negative or greater than the allowed amount', () => {
+ testing.expectError('IndexSizeError:', () => {
range.isPointInRange(p1, 999);
});
- testing.expectError('IndexSizeError: Index or size is negative or greater than the allowed amount', () => {
+ testing.expectError('IndexSizeError:', () => {
range.isPointInRange(p1.firstChild, 9999);
});
}
@@ -854,11 +854,11 @@
range2.setStart(p, 0);
// Invalid how parameter should throw NotSupportedError
- testing.expectError('NotSupportedError: Not Supported', () => {
+ testing.expectError('NotSupportedError:', () => {
range1.compareBoundaryPoints(4, range2);
});
- testing.expectError('NotSupportedError: Not Supported', () => {
+ testing.expectError('NotSupportedError:', () => {
range1.compareBoundaryPoints(99, range2);
});
}
@@ -883,7 +883,7 @@
range2.setEnd(foreignP, 1);
// Comparing ranges in different documents should throw WrongDocumentError
- testing.expectError('WrongDocumentError: wrong_document_error', () => {
+ testing.expectError('WrongDocumentError:', () => {
range1.compareBoundaryPoints(Range.START_TO_START, range2);
});
}
diff --git a/src/browser/tests/support/history.html b/src/browser/tests/support/history.html
new file mode 100644
index 00000000..d3356de3
--- /dev/null
+++ b/src/browser/tests/support/history.html
@@ -0,0 +1,33 @@
+
+
+
+
diff --git a/src/browser/tests/testing.js b/src/browser/tests/testing.js
index 987ba042..2e33c1d3 100644
--- a/src/browser/tests/testing.js
+++ b/src/browser/tests/testing.js
@@ -99,8 +99,7 @@
}
}
- // our test runner sets this to true
- const IS_TEST_RUNNER = window._lightpanda_skip_auto_assert === true;
+ const IS_TEST_RUNNER = window.navigator.userAgent.startsWith("Lightpanda/");
window.testing = {
fail: fail,
@@ -114,17 +113,17 @@
eventually: eventually,
IS_TEST_RUNNER: IS_TEST_RUNNER,
HOST: '127.0.0.1',
- ORIGIN: 'http://127.0.0.1:9582/',
+ ORIGIN: 'http://127.0.0.1:9582',
BASE_URL: 'http://127.0.0.1:9582/src/browser/tests/',
};
- if (window.navigator.userAgent.startsWith("Lightpanda/") == false) {
+ if (IS_TEST_RUNNER === false) {
// The page is running in a different browser. Probably a developer making sure
// a test is correct. There are a few tweaks we need to do to make this a
// seemless, namely around adapting paths/urls.
console.warn(`The page is not being executed in the test runner, certain behavior has been adjusted`);
window.testing.HOST = location.hostname;
- window.testing.ORIGIN = location.origin + '/';
+ window.testing.ORIGIN = location.origin;
window.testing.BASE_URL = location.origin + '/src/browser/tests/';
window.addEventListener('load', testing.assertOk);
}
diff --git a/src/browser/tests/window/window.html b/src/browser/tests/window/window.html
index 01025b86..e4094f9b 100644
--- a/src/browser/tests/window/window.html
+++ b/src/browser/tests/window/window.html
@@ -82,7 +82,7 @@
testing.expectEqual('ceil', atob('Y2VpbA')); // 6 chars, len%4==2, needs '=='
// length % 4 == 1 must still throw
- testing.expectError('InvalidCharacterError: Invalid Character', () => {
+ testing.expectError('InvalidCharacterError', () => {
atob('Y');
});
diff --git a/src/browser/webapi/DOMException.zig b/src/browser/webapi/DOMException.zig
index 3e0da288..46294b8d 100644
--- a/src/browser/webapi/DOMException.zig
+++ b/src/browser/webapi/DOMException.zig
@@ -104,13 +104,27 @@ pub fn getMessage(self: *const DOMException) []const u8 {
}
return switch (self._code) {
.none => "",
- .invalid_character_error => "Invalid Character",
.index_size_error => "Index or size is negative or greater than the allowed amount",
- .syntax_error => "Syntax Error",
- .not_supported => "Not Supported",
- .not_found => "Not Found",
- .hierarchy_error => "Hierarchy Error",
- else => @tagName(self._code),
+ .hierarchy_error => "The operation would yield an incorrect node tree",
+ .wrong_document_error => "The object is in the wrong document",
+ .invalid_character_error => "The string contains invalid characters",
+ .no_modification_allowed_error => "The object can not be modified",
+ .not_found => "The object can not be found here",
+ .not_supported => "The operation is not supported",
+ .inuse_attribute_error => "The attribute already in use",
+ .invalid_state_error => "The object is in an invalid state",
+ .syntax_error => "The string did not match the expected pattern",
+ .invalid_modification_error => "The object can not be modified in this way",
+ .namespace_error => "The operation is not allowed by Namespaces in XML",
+ .invalid_access_error => "The object does not support the operation or argument",
+ .security_error => "The operation is insecure",
+ .network_error => "A network error occurred",
+ .abort_error => "The operation was aborted",
+ .url_mismatch_error => "The given URL does not match another URL",
+ .quota_exceeded_error => "The quota has been exceeded",
+ .timeout_error => "The operation timed out",
+ .invalid_node_type_error => "The supplied node is incorrect or has an incorrect ancestor for this operation",
+ .data_clone_error => "The object can not be cloned",
};
}
diff --git a/src/browser/webapi/IntersectionObserver.zig b/src/browser/webapi/IntersectionObserver.zig
index 74a5d79e..b4c07e77 100644
--- a/src/browser/webapi/IntersectionObserver.zig
+++ b/src/browser/webapi/IntersectionObserver.zig
@@ -37,6 +37,7 @@ pub fn registerTypes() []const type {
const IntersectionObserver = @This();
+_rc: u8 = 0,
_arena: Allocator,
_callback: js.Function.Temp,
_observing: std.ArrayList(*Element) = .{},
@@ -93,12 +94,24 @@ pub fn init(callback: js.Function.Temp, options: ?ObserverInit, page: *Page) !*I
}
pub fn deinit(self: *IntersectionObserver, shutdown: bool, session: *Session) void {
- self._callback.release();
- if ((comptime IS_DEBUG) and !shutdown) {
- std.debug.assert(self._observing.items.len == 0);
+ const rc = self._rc;
+ if (comptime IS_DEBUG) {
+ std.debug.assert(rc != 0);
}
- session.releaseArena(self._arena);
+ if (rc == 1 or shutdown) {
+ self._callback.release();
+ if ((comptime IS_DEBUG) and !shutdown) {
+ std.debug.assert(self._observing.items.len == 0);
+ }
+ session.releaseArena(self._arena);
+ } else {
+ self._rc = rc - 1;
+ }
+}
+
+pub fn acquireRef(self: *IntersectionObserver) void {
+ self._rc += 1;
}
pub fn observe(self: *IntersectionObserver, target: *Element, page: *Page) !void {
@@ -111,7 +124,7 @@ pub fn observe(self: *IntersectionObserver, target: *Element, page: *Page) !void
// Register with page if this is our first observation
if (self._observing.items.len == 0) {
- page.js.strongRef(self);
+ self._rc += 1;
try page.registerIntersectionObserver(self);
}
@@ -148,20 +161,26 @@ pub fn unobserve(self: *IntersectionObserver, target: *Element, page: *Page) voi
}
if (self._observing.items.len == 0) {
- page.js.safeWeakRef(self);
+ self.deinit(false, page._session);
}
}
pub fn disconnect(self: *IntersectionObserver, page: *Page) void {
- page.unregisterIntersectionObserver(self);
- self._observing.clearRetainingCapacity();
self._previous_states.clearRetainingCapacity();
for (self._pending_entries.items) |entry| {
entry.deinit(false, page._session);
}
self._pending_entries.clearRetainingCapacity();
- page.js.safeWeakRef(self);
+
+ const observing_count = self._observing.items.len;
+ self._observing.clearRetainingCapacity();
+
+ if (observing_count > 0) {
+ self.deinit(false, page._session);
+ }
+
+ page.unregisterIntersectionObserver(self);
}
pub fn takeRecords(self: *IntersectionObserver, page: *Page) ![]*IntersectionObserverEntry {
diff --git a/src/browser/webapi/MutationObserver.zig b/src/browser/webapi/MutationObserver.zig
index b8608381..8b625fa8 100644
--- a/src/browser/webapi/MutationObserver.zig
+++ b/src/browser/webapi/MutationObserver.zig
@@ -39,6 +39,7 @@ pub fn registerTypes() []const type {
const MutationObserver = @This();
+_rc: u8 = 0,
_arena: Allocator,
_callback: js.Function.Temp,
_observing: std.ArrayList(Observing) = .{},
@@ -86,12 +87,24 @@ pub fn init(callback: js.Function.Temp, page: *Page) !*MutationObserver {
}
pub fn deinit(self: *MutationObserver, shutdown: bool, session: *Session) void {
- self._callback.release();
- if ((comptime IS_DEBUG) and !shutdown) {
- std.debug.assert(self._observing.items.len == 0);
+ const rc = self._rc;
+ if (comptime IS_DEBUG) {
+ std.debug.assert(rc != 0);
}
- session.releaseArena(self._arena);
+ if (rc == 1 or shutdown) {
+ self._callback.release();
+ if ((comptime IS_DEBUG) and !shutdown) {
+ std.debug.assert(self._observing.items.len == 0);
+ }
+ session.releaseArena(self._arena);
+ } else {
+ self._rc = rc - 1;
+ }
+}
+
+pub fn acquireRef(self: *MutationObserver) void {
+ self._rc += 1;
}
pub fn observe(self: *MutationObserver, target: *Node, options: ObserveOptions, page: *Page) !void {
@@ -158,7 +171,7 @@ pub fn observe(self: *MutationObserver, target: *Node, options: ObserveOptions,
// Register with page if this is our first observation
if (self._observing.items.len == 0) {
- page.js.strongRef(self);
+ self._rc += 1;
try page.registerMutationObserver(self);
}
@@ -169,13 +182,18 @@ pub fn observe(self: *MutationObserver, target: *Node, options: ObserveOptions,
}
pub fn disconnect(self: *MutationObserver, page: *Page) void {
- page.unregisterMutationObserver(self);
- self._observing.clearRetainingCapacity();
for (self._pending_records.items) |record| {
record.deinit(false, page._session);
}
self._pending_records.clearRetainingCapacity();
- page.js.safeWeakRef(self);
+
+ const observing_count = self._observing.items.len;
+ self._observing.clearRetainingCapacity();
+
+ if (observing_count > 0) {
+ self.deinit(false, page._session);
+ }
+ page.unregisterMutationObserver(self);
}
pub fn takeRecords(self: *MutationObserver, page: *Page) ![]*MutationRecord {
diff --git a/src/browser/webapi/Window.zig b/src/browser/webapi/Window.zig
index 0f288398..099cad65 100644
--- a/src/browser/webapi/Window.zig
+++ b/src/browser/webapi/Window.zig
@@ -66,6 +66,7 @@ _on_load: ?js.Function.Global = null,
_on_pageshow: ?js.Function.Global = null,
_on_popstate: ?js.Function.Global = null,
_on_error: ?js.Function.Global = null,
+_on_message: ?js.Function.Global = null,
_on_unhandled_rejection: ?js.Function.Global = null, // TODO: invoke on error
_location: *Location,
_timer_id: u30 = 0,
@@ -208,6 +209,14 @@ pub fn setOnError(self: *Window, setter: ?FunctionSetter) void {
self._on_error = getFunctionFromSetter(setter);
}
+pub fn getOnMessage(self: *const Window) ?js.Function.Global {
+ return self._on_message;
+}
+
+pub fn setOnMessage(self: *Window, setter: ?FunctionSetter) void {
+ self._on_message = getFunctionFromSetter(setter);
+}
+
pub fn getOnUnhandledRejection(self: *const Window) ?js.Function.Global {
return self._on_unhandled_rejection;
}
@@ -369,19 +378,26 @@ pub fn postMessage(self: *Window, message: js.Value.Temp, target_origin: ?[]cons
// In a full implementation, we would validate the origin
_ = target_origin;
- // postMessage queues a task (not a microtask), so use the scheduler
- const arena = try page.getArena(.{ .debug = "Window.schedule" });
- errdefer page.releaseArena(arena);
+ // self = the window that will get the message
+ // page = the context calling postMessage
+ const target_page = self._page;
+ const source_window = target_page.js.getIncumbent().window;
- const origin = try self._location.getOrigin(page);
+ const arena = try target_page.getArena(.{ .debug = "Window.postMessage" });
+ errdefer target_page.releaseArena(arena);
+
+ // Origin should be the source window's origin (where the message came from)
+ const origin = try source_window._location.getOrigin(page);
const callback = try arena.create(PostMessageCallback);
callback.* = .{
- .page = page,
.arena = arena,
.message = message,
+ .page = target_page,
+ .source = source_window,
.origin = try arena.dupe(u8, origin),
};
- try page.js.scheduler.add(callback, PostMessageCallback.run, 0, .{
+
+ try target_page.js.scheduler.add(callback, PostMessageCallback.run, 0, .{
.name = "postMessage",
.low_priority = false,
.finalizer = PostMessageCallback.cancelled,
@@ -702,6 +718,7 @@ const ScheduleCallback = struct {
const PostMessageCallback = struct {
page: *Page,
+ source: *Window,
arena: Allocator,
origin: []const u8,
message: js.Value.Temp,
@@ -712,7 +729,7 @@ const PostMessageCallback = struct {
fn cancelled(ctx: *anyopaque) void {
const self: *PostMessageCallback = @ptrCast(@alignCast(ctx));
- self.page.releaseArena(self.arena);
+ self.deinit();
}
fn run(ctx: *anyopaque) !?u32 {
@@ -722,14 +739,17 @@ const PostMessageCallback = struct {
const page = self.page;
const window = page.window;
- const event = (try MessageEvent.initTrusted(comptime .wrap("message"), .{
- .data = self.message,
- .origin = self.origin,
- .source = window,
- .bubbles = false,
- .cancelable = false,
- }, page)).asEvent();
- try page._event_manager.dispatch(window.asEventTarget(), event);
+ const event_target = window.asEventTarget();
+ if (page._event_manager.hasDirectListeners(event_target, "message", window._on_message)) {
+ const event = (try MessageEvent.initTrusted(comptime .wrap("message"), .{
+ .data = self.message,
+ .origin = self.origin,
+ .source = self.source,
+ .bubbles = false,
+ .cancelable = false,
+ }, page)).asEvent();
+ try page._event_manager.dispatchDirect(event_target, event, window._on_message, .{ .context = "window.postMessage" });
+ }
return null;
}
@@ -783,6 +803,7 @@ pub const JsApi = struct {
pub const onpageshow = bridge.accessor(Window.getOnPageShow, Window.setOnPageShow, .{});
pub const onpopstate = bridge.accessor(Window.getOnPopState, Window.setOnPopState, .{});
pub const onerror = bridge.accessor(Window.getOnError, Window.setOnError, .{});
+ pub const onmessage = bridge.accessor(Window.getOnMessage, Window.setOnMessage, .{});
pub const onunhandledrejection = bridge.accessor(Window.getOnUnhandledRejection, Window.setOnUnhandledRejection, .{});
pub const fetch = bridge.function(Window.fetch, .{});
pub const queueMicrotask = bridge.function(Window.queueMicrotask, .{});
diff --git a/src/browser/webapi/event/MouseEvent.zig b/src/browser/webapi/event/MouseEvent.zig
index cae21509..e13dc1b3 100644
--- a/src/browser/webapi/event/MouseEvent.zig
+++ b/src/browser/webapi/event/MouseEvent.zig
@@ -28,6 +28,8 @@ const EventTarget = @import("../EventTarget.zig");
const UIEvent = @import("UIEvent.zig");
const PointerEvent = @import("PointerEvent.zig");
+const Allocator = std.mem.Allocator;
+
const MouseEvent = @This();
pub const MouseButton = enum(u8) {
@@ -92,7 +94,7 @@ pub fn initTrusted(typ: String, _opts: ?Options, page: *Page) !*MouseEvent {
return initWithTrusted(arena, typ, _opts, true, page);
}
-fn initWithTrusted(arena: std.mem.Allocator, typ: String, _opts: ?Options, trusted: bool, page: *Page) !*MouseEvent {
+fn initWithTrusted(arena: Allocator, typ: String, _opts: ?Options, trusted: bool, page: *Page) !*MouseEvent {
const opts = _opts orelse Options{};
const event = try page._factory.uiEvent(
diff --git a/src/cdp/domains/inspector.zig b/src/cdp/domains/inspector.zig
index dad0cebd..b8dea574 100644
--- a/src/cdp/domains/inspector.zig
+++ b/src/cdp/domains/inspector.zig
@@ -21,9 +21,11 @@ const std = @import("std");
pub fn processMessage(cmd: anytype) !void {
const action = std.meta.stringToEnum(enum {
enable,
+ disable,
}, cmd.input.action) orelse return error.UnknownMethod;
switch (action) {
.enable => return cmd.sendResult(null, .{}),
+ .disable => return cmd.sendResult(null, .{}),
}
}
diff --git a/src/cdp/domains/performance.zig b/src/cdp/domains/performance.zig
index dad0cebd..b8dea574 100644
--- a/src/cdp/domains/performance.zig
+++ b/src/cdp/domains/performance.zig
@@ -21,9 +21,11 @@ const std = @import("std");
pub fn processMessage(cmd: anytype) !void {
const action = std.meta.stringToEnum(enum {
enable,
+ disable,
}, cmd.input.action) orelse return error.UnknownMethod;
switch (action) {
.enable => return cmd.sendResult(null, .{}),
+ .disable => return cmd.sendResult(null, .{}),
}
}
diff --git a/src/cdp/domains/security.zig b/src/cdp/domains/security.zig
index 0ebfedae..9bbf5b39 100644
--- a/src/cdp/domains/security.zig
+++ b/src/cdp/domains/security.zig
@@ -21,11 +21,13 @@ const std = @import("std");
pub fn processMessage(cmd: anytype) !void {
const action = std.meta.stringToEnum(enum {
enable,
+ disable,
setIgnoreCertificateErrors,
}, cmd.input.action) orelse return error.UnknownMethod;
switch (action) {
.enable => return cmd.sendResult(null, .{}),
+ .disable => return cmd.sendResult(null, .{}),
.setIgnoreCertificateErrors => return setIgnoreCertificateErrors(cmd),
}
}