diff --git a/src/browser/Page.zig b/src/browser/Page.zig
index fb70f9d8..9bfd17cf 100644
--- a/src/browser/Page.zig
+++ b/src/browser/Page.zig
@@ -1276,7 +1276,7 @@ fn createHtmlElementT(self: *Page, comptime E: type, namespace: Element.Namespac
const node = element.asNode();
if (@hasDecl(E, "Build") and @hasDecl(E.Build, "created")) {
@call(.auto, @field(E.Build, "created"), .{ node, self }) catch |err| {
- log.err(.page, "build.created", .{ .tag = node.getNodeName(self), .err = err });
+ log.err(.page, "build.created", .{ .tag = node.getNodeName(&self.buf), .err = err });
return err;
};
}
diff --git a/src/browser/tests/cdp/registry1.html b/src/browser/tests/cdp/registry1.html
new file mode 100644
index 00000000..b603ad24
--- /dev/null
+++ b/src/browser/tests/cdp/registry1.html
@@ -0,0 +1 @@
+link1
diff --git a/src/browser/tests/cdp/registry2.html b/src/browser/tests/cdp/registry2.html
new file mode 100644
index 00000000..136680aa
--- /dev/null
+++ b/src/browser/tests/cdp/registry2.html
@@ -0,0 +1 @@
+
diff --git a/src/browser/tests/cdp/registry3.html b/src/browser/tests/cdp/registry3.html
new file mode 100644
index 00000000..a5b16fc5
--- /dev/null
+++ b/src/browser/tests/cdp/registry3.html
@@ -0,0 +1 @@
+
diff --git a/src/browser/webapi/Element.zig b/src/browser/webapi/Element.zig
index ecaf35b9..cd6b003e 100644
--- a/src/browser/webapi/Element.zig
+++ b/src/browser/webapi/Element.zig
@@ -225,6 +225,15 @@ pub fn getNamespaceURI(self: *const Element) []const u8 {
return self._namespace.toUri();
}
+pub fn getLocalName(self: *Element) []const u8 {
+ const name = self.getTagNameLower();
+ if (std.mem.indexOfPos(u8, name, 0, ":")) |pos| {
+ return name[pos + 1 ..];
+ }
+
+ return name;
+}
+
// innerText represents the **rendered** text content of a node and its
// descendants.
pub fn getInnerText(self: *Element, writer: *std.Io.Writer) !void {
@@ -1091,16 +1100,7 @@ pub const JsApi = struct {
return null;
}
- pub const localName = bridge.accessor(_localName, null, .{});
- fn _localName(self: *Element) []const u8 {
- const name = self.getTagNameLower();
- if (std.mem.indexOfPos(u8, name, 0, ":")) |pos| {
- return name[pos + 1 ..];
- }
-
- return name;
- }
-
+ pub const localName = bridge.accessor(Element.getLocalName, null, .{});
pub const id = bridge.accessor(Element.getId, Element.setId, .{});
pub const className = bridge.accessor(Element.getClassName, Element.setClassName, .{});
pub const classList = bridge.accessor(Element.getClassList, null, .{});
diff --git a/src/browser/webapi/Navigator.zig b/src/browser/webapi/Navigator.zig
index 3fa8154f..23efd49f 100644
--- a/src/browser/webapi/Navigator.zig
+++ b/src/browser/webapi/Navigator.zig
@@ -98,7 +98,6 @@ pub const JsApi = struct {
pub const name = "Navigator";
pub const prototype_chain = bridge.prototypeChain();
pub var class_id: bridge.ClassId = undefined;
- // ZIGDOM (currently no optimization for empty types)
pub const empty_with_no_proto = true;
};
diff --git a/src/browser/webapi/Node.zig b/src/browser/webapi/Node.zig
index 67245cb3..619518f7 100644
--- a/src/browser/webapi/Node.zig
+++ b/src/browser/webapi/Node.zig
@@ -256,9 +256,9 @@ pub fn setTextContent(self: *Node, data: []const u8, page: *Page) !void {
}
}
-pub fn getNodeName(self: *const Node, page: *Page) []const u8 {
+pub fn getNodeName(self: *const Node, buf: []u8) []const u8 {
return switch (self._type) {
- .element => |el| el.getTagNameSpec(&page.buf),
+ .element => |el| el.getTagNameSpec(buf),
.cdata => |cd| switch (cd._type) {
.text => "#text",
.cdata_section => "#cdata-section",
@@ -271,7 +271,7 @@ pub fn getNodeName(self: *const Node, page: *Page) []const u8 {
};
}
-pub fn nodeType(self: *const Node) u8 {
+pub fn getNodeType(self: *const Node) u8 {
return switch (self._type) {
.element => 1,
.attribute => 2,
@@ -491,6 +491,13 @@ pub fn childrenIterator(self: *Node) NodeIterator {
};
}
+pub fn getChildrenCount(self: *Node) usize {
+ return switch (self._type) {
+ .element, .document, .document_fragment => self.getLength(),
+ .document_type, .attribute, .cdata => return 0,
+ };
+}
+
pub fn getLength(self: *Node) u32 {
switch (self._type) {
.cdata => |cdata| {
@@ -770,8 +777,12 @@ pub const JsApi = struct {
pub const DOCUMENT_POSITION_CONTAINED_BY = bridge.property(0x10);
pub const DOCUMENT_POSITION_IMPLEMENTATION_SPECIFIC = bridge.property(0x20);
- pub const nodeName = bridge.accessor(Node.getNodeName, null, .{});
- pub const nodeType = bridge.accessor(Node.nodeType, null, .{});
+ pub const nodeName = bridge.accessor(struct{
+ fn wrap(self: *const Node, page: *Page) []const u8 {
+ return self.getNodeName(&page.buf);
+ }
+ }.wrap, null, .{});
+ pub const nodeType = bridge.accessor(Node.getNodeType, null, .{});
pub const textContent = bridge.accessor(_textContext, Node.setTextContent, .{});
fn _textContext(self: *Node, page: *const Page) !?[]const u8 {
diff --git a/src/browser/webapi/NodeFilter.zig b/src/browser/webapi/NodeFilter.zig
index 232355dc..bdb523a8 100644
--- a/src/browser/webapi/NodeFilter.zig
+++ b/src/browser/webapi/NodeFilter.zig
@@ -72,7 +72,7 @@ pub fn shouldShow(node: *const Node, what_to_show: u32) bool {
// TODO: Test this mapping thoroughly!
// nodeType values (1=ELEMENT, 3=TEXT, 9=DOCUMENT, etc.) need to map to
// SHOW_* bitmask positions (0x1, 0x4, 0x100, etc.)
- const node_type_value = node.nodeType();
+ const node_type_value = node.getNodeType();
const bit_position = node_type_value - 1;
const node_type_bit: u32 = @as(u32, 1) << @intCast(bit_position);
return (what_to_show & node_type_bit) != 0;
diff --git a/src/browser/webapi/net/Fetch.zig b/src/browser/webapi/net/Fetch.zig
index 7f85741f..9f708e0a 100644
--- a/src/browser/webapi/net/Fetch.zig
+++ b/src/browser/webapi/net/Fetch.zig
@@ -42,7 +42,6 @@ _resolver: js.PersistentPromiseResolver,
pub const Input = Request.Input;
pub const InitOpts = Request.InitOpts;
-// @ZIGDOM just enough to get campfire demo working
pub fn init(input: Input, options: ?InitOpts, page: *Page) !js.Promise {
const request = try Request.init(input, options, page);
diff --git a/src/cdp/Node.zig b/src/cdp/Node.zig
index c5109312..ca7c43ca 100644
--- a/src/cdp/Node.zig
+++ b/src/cdp/Node.zig
@@ -1,587 +1,592 @@
-// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
-//
+// Copyright (C) 2023-2025 Lightpanda (Selecy SAS)
+
// Francis Bouvier
// Pierre Tachoire
-//
+
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Affero General Public License as
// published by the Free Software Foundation, either version 3 of the
// License, or (at your option) any later version.
-//
+
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Affero General Public License for more details.
-//
+
// You should have received a copy of the GNU Affero General Public License
// along with this program. If not, see .
-// @ZIGDOM
-// const std = @import("std");
-// const Allocator = std.mem.Allocator;
-
-// const log = @import("../log.zig");
-// const parser = @import("../browser/netsurf.zig");
-
-// pub const Id = u32;
-
-// const Node = @This();
-
-// id: Id,
-// _node: *parser.Node,
-// set_child_nodes_event: bool,
-
-// // Whenever we send a node to the client, we register it here for future lookup.
-// // We maintain a node -> id and id -> node lookup.
-// pub const Registry = struct {
-// node_id: u32,
-// allocator: Allocator,
-// arena: std.heap.ArenaAllocator,
-// node_pool: std.heap.MemoryPool(Node),
-// lookup_by_id: std.AutoHashMapUnmanaged(Id, *Node),
-// lookup_by_node: std.HashMapUnmanaged(*parser.Node, *Node, NodeContext, std.hash_map.default_max_load_percentage),
-
-// pub fn init(allocator: Allocator) Registry {
-// return .{
-// .node_id = 1,
-// .lookup_by_id = .{},
-// .lookup_by_node = .{},
-// .allocator = allocator,
-// .arena = std.heap.ArenaAllocator.init(allocator),
-// .node_pool = std.heap.MemoryPool(Node).init(allocator),
-// };
-// }
-
-// pub fn deinit(self: *Registry) void {
-// const allocator = self.allocator;
-// self.lookup_by_id.deinit(allocator);
-// self.lookup_by_node.deinit(allocator);
-// self.node_pool.deinit();
-// self.arena.deinit();
-// }
-
-// pub fn reset(self: *Registry) void {
-// self.lookup_by_id.clearRetainingCapacity();
-// self.lookup_by_node.clearRetainingCapacity();
-// _ = self.arena.reset(.{ .retain_with_limit = 1024 });
-// _ = self.node_pool.reset(.{ .retain_with_limit = 1024 });
-// }
-
-// pub fn register(self: *Registry, n: *parser.Node) !*Node {
-// const node_lookup_gop = try self.lookup_by_node.getOrPut(self.allocator, n);
-// if (node_lookup_gop.found_existing) {
-// return node_lookup_gop.value_ptr.*;
-// }
-
-// // on error, we're probably going to abort the entire browser context
-// // but, just in case, let's try to keep things tidy.
-// errdefer _ = self.lookup_by_node.remove(n);
-
-// const node = try self.node_pool.create();
-// errdefer self.node_pool.destroy(node);
-
-// const id = self.node_id;
-// self.node_id = id + 1;
-
-// node.* = .{
-// ._node = n,
-// .id = id,
-// .set_child_nodes_event = false,
-// };
-
-// node_lookup_gop.value_ptr.* = node;
-// try self.lookup_by_id.putNoClobber(self.allocator, id, node);
-// return node;
-// }
-// };
-
-// const NodeContext = struct {
-// pub fn hash(_: NodeContext, n: *parser.Node) u64 {
-// return std.hash.Wyhash.hash(0, std.mem.asBytes(&@intFromPtr(n)));
-// }
-
-// pub fn eql(_: NodeContext, a: *parser.Node, b: *parser.Node) bool {
-// return @intFromPtr(a) == @intFromPtr(b);
-// }
-// };
-
-// // Searches are a 3 step process:
-// // 1 - Dom.performSearch
-// // 2 - Dom.getSearchResults
-// // 3 - Dom.discardSearchResults
-// //
-// // For a given browser context, we can have multiple active searches. I.e.
-// // performSearch could be called multiple times without getSearchResults or
-// // discardSearchResults being called. We keep these active searches in the
-// // browser context's node_search_list, which is a SearchList. Since we don't
-// // expect many active searches (mostly just 1), a list is fine to scan through.
-// pub const Search = struct {
-// name: []const u8,
-// node_ids: []const Id,
-
-// pub const List = struct {
-// registry: *Registry,
-// search_id: u16 = 0,
-// arena: std.heap.ArenaAllocator,
-// searches: std.ArrayListUnmanaged(Search) = .{},
-
-// pub fn init(allocator: Allocator, registry: *Registry) List {
-// return .{
-// .registry = registry,
-// .arena = std.heap.ArenaAllocator.init(allocator),
-// };
-// }
-
-// pub fn deinit(self: *List) void {
-// self.arena.deinit();
-// }
-
-// pub fn reset(self: *List) void {
-// self.search_id = 0;
-// self.searches = .{};
-// _ = self.arena.reset(.{ .retain_with_limit = 4096 });
-// }
-
-// pub fn create(self: *List, nodes: []const *parser.Node) !Search {
-// const id = self.search_id;
-// defer self.search_id = id +% 1;
-
-// const arena = self.arena.allocator();
-
-// const name = switch (id) {
-// 0 => "0",
-// 1 => "1",
-// 2 => "2",
-// 3 => "3",
-// 4 => "4",
-// 5 => "5",
-// 6 => "6",
-// 7 => "7",
-// 8 => "8",
-// 9 => "9",
-// else => try std.fmt.allocPrint(arena, "{d}", .{id}),
-// };
-
-// var registry = self.registry;
-// const node_ids = try arena.alloc(Id, nodes.len);
-// for (nodes, node_ids) |node, *node_id| {
-// node_id.* = (try registry.register(node)).id;
-// }
-
-// const search = Search{
-// .name = name,
-// .node_ids = node_ids,
-// };
-// try self.searches.append(arena, search);
-// return search;
-// }
-
-// pub fn remove(self: *List, name: []const u8) void {
-// for (self.searches.items, 0..) |search, i| {
-// if (std.mem.eql(u8, name, search.name)) {
-// _ = self.searches.swapRemove(i);
-// return;
-// }
-// }
-// }
-
-// pub fn get(self: *const List, name: []const u8) ?Search {
-// for (self.searches.items) |search| {
-// if (std.mem.eql(u8, name, search.name)) {
-// return search;
-// }
-// }
-// return null;
-// }
-// };
-// };
-
-// // Need a custom writer, because we can't just serialize the node as-is.
-// // Sometimes we want to serializ the node without chidren, sometimes with just
-// // its direct children, and sometimes the entire tree.
-// // (For now, we only support direct children)
-
-// pub const Writer = struct {
-// depth: i32,
-// exclude_root: bool,
-// root: *const Node,
-// registry: *Registry,
-
-// pub const Opts = struct {
-// depth: i32 = 0,
-// exclude_root: bool = false,
-// };
-
-// pub fn jsonStringify(self: *const Writer, w: anytype) error{WriteFailed}!void {
-// if (self.exclude_root) {
-// _ = self.writeChildren(self.root, 1, w) catch |err| {
-// log.err(.cdp, "node writeChildren", .{ .err = err });
-// return error.WriteFailed;
-// };
-// } else {
-// self.toJSON(self.root, 0, w) catch |err| {
-// // The only error our jsonStringify method can return is
-// // @TypeOf(w).Error. In other words, our code can't return its own
-// // error, we can only return a writer error. Kinda sucks.
-// log.err(.cdp, "node toJSON stringify", .{ .err = err });
-// return error.WriteFailed;
-// };
-// }
-// }
-
-// fn toJSON(self: *const Writer, node: *const Node, depth: usize, w: anytype) !void {
-// try w.beginObject();
-// try self.writeCommon(node, false, w);
-
-// try w.objectField("children");
-// const child_count = try self.writeChildren(node, depth, w);
-// try w.objectField("childNodeCount");
-// try w.write(child_count);
-
-// try w.endObject();
-// }
-
-// fn writeChildren(self: *const Writer, node: *const Node, depth: usize, w: anytype) anyerror!usize {
-// var registry = self.registry;
-// const child_nodes = try parser.nodeGetChildNodes(node._node);
-// const child_count = parser.nodeListLength(child_nodes);
-// const full_child = self.depth < 0 or self.depth < depth;
-
-// var i: usize = 0;
-// try w.beginArray();
-// for (0..child_count) |_| {
-// const child = (parser.nodeListItem(child_nodes, @intCast(i))) orelse break;
-// const child_node = try registry.register(child);
-// if (full_child) {
-// try self.toJSON(child_node, depth + 1, w);
-// } else {
-// try w.beginObject();
-// try self.writeCommon(child_node, true, w);
-// try w.endObject();
-// }
-
-// i += 1;
-// }
-// try w.endArray();
-
-// return i;
-// }
-
-// fn writeCommon(self: *const Writer, node: *const Node, include_child_count: bool, w: anytype) !void {
-// try w.objectField("nodeId");
-// try w.write(node.id);
-
-// try w.objectField("backendNodeId");
-// try w.write(node.id);
-
-// const n = node._node;
-
-// if (parser.nodeParentNode(n)) |p| {
-// const parent_node = try self.registry.register(p);
-// try w.objectField("parentId");
-// try w.write(parent_node.id);
-// }
-
-// const _map = try parser.nodeGetAttributes(n);
-// if (_map) |map| {
-// const attr_count = try parser.namedNodeMapGetLength(map);
-// try w.objectField("attributes");
-// try w.beginArray();
-// for (0..attr_count) |i| {
-// const attr = try parser.namedNodeMapItem(map, @intCast(i)) orelse continue;
-// try w.write(try parser.attributeGetName(attr));
-// try w.write(try parser.attributeGetValue(attr) orelse continue);
-// }
-// try w.endArray();
-// }
-
-// try w.objectField("nodeType");
-// try w.write(@intFromEnum(parser.nodeType(n)));
-
-// try w.objectField("nodeName");
-// try w.write(try parser.nodeName(n));
-
-// try w.objectField("localName");
-// try w.write(try parser.nodeLocalName(n));
-
-// try w.objectField("nodeValue");
-// try w.write((parser.nodeValue(n)) orelse "");
-
-// if (include_child_count) {
-// try w.objectField("childNodeCount");
-// const child_nodes = try parser.nodeGetChildNodes(n);
-// try w.write(parser.nodeListLength(child_nodes));
-// }
-
-// try w.objectField("documentURL");
-// try w.write(null);
-
-// try w.objectField("baseURL");
-// try w.write(null);
-
-// try w.objectField("xmlVersion");
-// try w.write("");
-
-// try w.objectField("compatibilityMode");
-// try w.write("NoQuirksMode");
-
-// try w.objectField("isScrollable");
-// try w.write(false);
-// }
-// };
-
-// const testing = @import("testing.zig");
-// test "cdp Node: Registry register" {
-// parser.init();
-// defer parser.deinit();
-
-// var registry = Registry.init(testing.allocator);
-// defer registry.deinit();
-
-// try testing.expectEqual(0, registry.lookup_by_id.count());
-// try testing.expectEqual(0, registry.lookup_by_node.count());
-
-// var doc = try testing.Document.init("link1");
-// defer doc.deinit();
-
-// {
-// const n = (try doc.querySelector("#a1")).?;
-// const node = try registry.register(n);
-// const n1b = registry.lookup_by_id.get(1).?;
-// const n1c = registry.lookup_by_node.get(node._node).?;
-// try testing.expectEqual(node, n1b);
-// try testing.expectEqual(node, n1c);
-
-// try testing.expectEqual(1, node.id);
-// try testing.expectEqual(n, node._node);
-// }
-
-// {
-// const n = (try doc.querySelector("p")).?;
-// const node = try registry.register(n);
-// const n1b = registry.lookup_by_id.get(2).?;
-// const n1c = registry.lookup_by_node.get(node._node).?;
-// try testing.expectEqual(node, n1b);
-// try testing.expectEqual(node, n1c);
-
-// try testing.expectEqual(2, node.id);
-// try testing.expectEqual(n, node._node);
-// }
-// }
-
-// test "cdp Node: search list" {
-// parser.init();
-// defer parser.deinit();
-
-// var registry = Registry.init(testing.allocator);
-// defer registry.deinit();
-
-// var search_list = Search.List.init(testing.allocator, ®istry);
-// defer search_list.deinit();
-
-// {
-// // empty search list, noops
-// search_list.remove("0");
-// try testing.expectEqual(null, search_list.get("0"));
-// }
-
-// {
-// // empty nodes
-// const s1 = try search_list.create(&.{});
-// try testing.expectEqual("0", s1.name);
-// try testing.expectEqual(0, s1.node_ids.len);
-
-// const s2 = search_list.get("0").?;
-// try testing.expectEqual("0", s2.name);
-// try testing.expectEqual(0, s2.node_ids.len);
-
-// search_list.remove("0");
-// try testing.expectEqual(null, search_list.get("0"));
-// }
-
-// {
-// var doc = try testing.Document.init("");
-// defer doc.deinit();
-
-// const s1 = try search_list.create(try doc.querySelectorAll("a"));
-// try testing.expectEqual("1", s1.name);
-// try testing.expectEqualSlices(u32, &.{ 1, 2 }, s1.node_ids);
-
-// try testing.expectEqual(2, registry.lookup_by_id.count());
-// try testing.expectEqual(2, registry.lookup_by_node.count());
-
-// const s2 = try search_list.create(try doc.querySelectorAll("#a1"));
-// try testing.expectEqual("2", s2.name);
-// try testing.expectEqualSlices(u32, &.{1}, s2.node_ids);
-
-// const s3 = try search_list.create(try doc.querySelectorAll("#a2"));
-// try testing.expectEqual("3", s3.name);
-// try testing.expectEqualSlices(u32, &.{2}, s3.node_ids);
-
-// try testing.expectEqual(2, registry.lookup_by_id.count());
-// try testing.expectEqual(2, registry.lookup_by_node.count());
-// }
-// }
-
-// test "cdp Node: Writer" {
-// parser.init();
-// defer parser.deinit();
-
-// var registry = Registry.init(testing.allocator);
-// defer registry.deinit();
-
-// var doc = try testing.Document.init("");
-// defer doc.deinit();
-
-// {
-// const node = try registry.register(doc.asNode());
-// const json = try std.json.Stringify.valueAlloc(testing.allocator, Writer{
-// .root = node,
-// .depth = 0,
-// .exclude_root = false,
-// .registry = ®istry,
-// }, .{});
-// defer testing.allocator.free(json);
-
-// try testing.expectJson(.{
-// .nodeId = 1,
-// .backendNodeId = 1,
-// .nodeType = 9,
-// .nodeName = "#document",
-// .localName = "",
-// .nodeValue = "",
-// .documentURL = null,
-// .baseURL = null,
-// .xmlVersion = "",
-// .isScrollable = false,
-// .compatibilityMode = "NoQuirksMode",
-// .childNodeCount = 1,
-// .children = &.{.{
-// .nodeId = 2,
-// .backendNodeId = 2,
-// .nodeType = 1,
-// .nodeName = "HTML",
-// .localName = "html",
-// .nodeValue = "",
-// .childNodeCount = 2,
-// .documentURL = null,
-// .baseURL = null,
-// .xmlVersion = "",
-// .compatibilityMode = "NoQuirksMode",
-// .isScrollable = false,
-// }},
-// }, json);
-// }
-
-// {
-// const node = registry.lookup_by_id.get(2).?;
-// const json = try std.json.Stringify.valueAlloc(testing.allocator, Writer{
-// .root = node,
-// .depth = 1,
-// .exclude_root = false,
-// .registry = ®istry,
-// }, .{});
-// defer testing.allocator.free(json);
-
-// try testing.expectJson(.{
-// .nodeId = 2,
-// .backendNodeId = 2,
-// .nodeType = 1,
-// .nodeName = "HTML",
-// .localName = "html",
-// .nodeValue = "",
-// .childNodeCount = 2,
-// .documentURL = null,
-// .baseURL = null,
-// .xmlVersion = "",
-// .compatibilityMode = "NoQuirksMode",
-// .isScrollable = false,
-// .children = &.{ .{
-// .nodeId = 3,
-// .backendNodeId = 3,
-// .nodeType = 1,
-// .nodeName = "HEAD",
-// .localName = "head",
-// .nodeValue = "",
-// .childNodeCount = 0,
-// .documentURL = null,
-// .baseURL = null,
-// .xmlVersion = "",
-// .compatibilityMode = "NoQuirksMode",
-// .isScrollable = false,
-// .parentId = 2,
-// }, .{
-// .nodeId = 4,
-// .backendNodeId = 4,
-// .nodeType = 1,
-// .nodeName = "BODY",
-// .localName = "body",
-// .nodeValue = "",
-// .childNodeCount = 2,
-// .documentURL = null,
-// .baseURL = null,
-// .xmlVersion = "",
-// .compatibilityMode = "NoQuirksMode",
-// .isScrollable = false,
-// .parentId = 2,
-// } },
-// }, json);
-// }
-
-// {
-// const node = registry.lookup_by_id.get(2).?;
-// const json = try std.json.Stringify.valueAlloc(testing.allocator, Writer{
-// .root = node,
-// .depth = -1,
-// .exclude_root = true,
-// .registry = ®istry,
-// }, .{});
-// defer testing.allocator.free(json);
-
-// try testing.expectJson(&.{ .{
-// .nodeId = 3,
-// .backendNodeId = 3,
-// .nodeType = 1,
-// .nodeName = "HEAD",
-// .localName = "head",
-// .nodeValue = "",
-// .childNodeCount = 0,
-// .documentURL = null,
-// .baseURL = null,
-// .xmlVersion = "",
-// .compatibilityMode = "NoQuirksMode",
-// .isScrollable = false,
-// .parentId = 2,
-// }, .{
-// .nodeId = 4,
-// .backendNodeId = 4,
-// .nodeType = 1,
-// .nodeName = "BODY",
-// .localName = "body",
-// .nodeValue = "",
-// .childNodeCount = 2,
-// .documentURL = null,
-// .baseURL = null,
-// .xmlVersion = "",
-// .compatibilityMode = "NoQuirksMode",
-// .isScrollable = false,
-// .children = &.{ .{
-// .nodeId = 5,
-// .localName = "a",
-// .childNodeCount = 0,
-// .parentId = 4,
-// }, .{
-// .nodeId = 6,
-// .localName = "div",
-// .childNodeCount = 1,
-// .parentId = 4,
-// .children = &.{.{
-// .nodeId = 7,
-// .localName = "a",
-// .childNodeCount = 0,
-// .parentId = 6,
-// }},
-// } },
-// } }, json);
-// }
-// }
+const std = @import("std");
+const Allocator = std.mem.Allocator;
+
+const log = @import("../log.zig");
+const Page = @import("../browser/Page.zig");
+const DOMNode = @import("../browser/webapi/Node.zig");
+
+pub const Id = u32;
+
+const Node = @This();
+
+id: Id,
+dom: *DOMNode,
+set_child_nodes_event: bool,
+
+// Whenever we send a node to the client, we register it here for future lookup.
+// We maintain a node -> id and id -> node lookup.
+pub const Registry = struct {
+ node_id: u32,
+ allocator: Allocator,
+ arena: std.heap.ArenaAllocator,
+ node_pool: std.heap.MemoryPool(Node),
+ lookup_by_id: std.AutoHashMapUnmanaged(Id, *Node),
+ lookup_by_node: std.HashMapUnmanaged(*DOMNode, *Node, NodeContext, std.hash_map.default_max_load_percentage),
+
+ pub fn init(allocator: Allocator) Registry {
+ return .{
+ .node_id = 1,
+ .lookup_by_id = .{},
+ .lookup_by_node = .{},
+ .allocator = allocator,
+ .arena = std.heap.ArenaAllocator.init(allocator),
+ .node_pool = std.heap.MemoryPool(Node).init(allocator),
+ };
+ }
+
+ pub fn deinit(self: *Registry) void {
+ const allocator = self.allocator;
+ self.lookup_by_id.deinit(allocator);
+ self.lookup_by_node.deinit(allocator);
+ self.node_pool.deinit();
+ self.arena.deinit();
+ }
+
+ pub fn reset(self: *Registry) void {
+ self.lookup_by_id.clearRetainingCapacity();
+ self.lookup_by_node.clearRetainingCapacity();
+ _ = self.arena.reset(.{ .retain_with_limit = 1024 });
+ _ = self.node_pool.reset(.{ .retain_with_limit = 1024 });
+ }
+
+ pub fn register(self: *Registry, dom_node: *DOMNode) !*Node {
+ const node_lookup_gop = try self.lookup_by_node.getOrPut(self.allocator, dom_node);
+ if (node_lookup_gop.found_existing) {
+ return node_lookup_gop.value_ptr.*;
+ }
+
+ // on error, we're probably going to abort the entire browser context
+ // but, just in case, let's try to keep things tidy.
+ errdefer _ = self.lookup_by_node.remove(dom_node);
+
+ const node = try self.node_pool.create();
+ errdefer self.node_pool.destroy(node);
+
+ const id = self.node_id;
+ self.node_id = id + 1;
+
+ node.* = .{
+ .id = id,
+ .dom = dom_node,
+ .set_child_nodes_event = false,
+ };
+
+ node_lookup_gop.value_ptr.* = node;
+ try self.lookup_by_id.putNoClobber(self.allocator, id, node);
+ return node;
+ }
+};
+
+const NodeContext = struct {
+ pub fn hash(_: NodeContext, dom_node: *DOMNode) u64 {
+ return std.hash.Wyhash.hash(0, std.mem.asBytes(&@intFromPtr(dom_node)));
+ }
+
+ pub fn eql(_: NodeContext, a: *DOMNode, b: *DOMNode) bool {
+ return @intFromPtr(a) == @intFromPtr(b);
+ }
+};
+
+// Searches are a 3 step process:
+// 1 - Dom.performSearch
+// 2 - Dom.getSearchResults
+// 3 - Dom.discardSearchResults
+//
+// For a given browser context, we can have multiple active searches. I.e.
+// performSearch could be called multiple times without getSearchResults or
+// discardSearchResults being called. We keep these active searches in the
+// browser context's node_search_list, which is a SearchList. Since we don't
+// expect many active searches (mostly just 1), a list is fine to scan through.
+pub const Search = struct {
+ name: []const u8,
+ node_ids: []const Id,
+
+ pub const List = struct {
+ search_id: u16 = 0,
+ registry: *Registry,
+ arena: std.heap.ArenaAllocator,
+ searches: std.ArrayListUnmanaged(Search) = .{},
+
+ pub fn init(allocator: Allocator, registry: *Registry) List {
+ return .{
+ .registry = registry,
+ .arena = std.heap.ArenaAllocator.init(allocator),
+ };
+ }
+
+ pub fn deinit(self: *List) void {
+ self.arena.deinit();
+ }
+
+ pub fn reset(self: *List) void {
+ self.search_id = 0;
+ self.searches = .{};
+ _ = self.arena.reset(.{ .retain_with_limit = 4096 });
+ }
+
+ pub fn create(self: *List, nodes: []const *DOMNode) !Search {
+ const id = self.search_id;
+ defer self.search_id = id +% 1;
+
+ const arena = self.arena.allocator();
+
+ const name = switch (id) {
+ 0 => "0",
+ 1 => "1",
+ 2 => "2",
+ 3 => "3",
+ 4 => "4",
+ 5 => "5",
+ 6 => "6",
+ 7 => "7",
+ 8 => "8",
+ 9 => "9",
+ else => try std.fmt.allocPrint(arena, "{d}", .{id}),
+ };
+
+ var registry = self.registry;
+ const node_ids = try arena.alloc(Id, nodes.len);
+ for (nodes, node_ids) |node, *node_id| {
+ node_id.* = (try registry.register(node)).id;
+ }
+
+ const search = Search{
+ .name = name,
+ .node_ids = node_ids,
+ };
+ try self.searches.append(arena, search);
+ return search;
+ }
+
+ pub fn remove(self: *List, name: []const u8) void {
+ for (self.searches.items, 0..) |search, i| {
+ if (std.mem.eql(u8, name, search.name)) {
+ _ = self.searches.swapRemove(i);
+ return;
+ }
+ }
+ }
+
+ pub fn get(self: *const List, name: []const u8) ?Search {
+ for (self.searches.items) |search| {
+ if (std.mem.eql(u8, name, search.name)) {
+ return search;
+ }
+ }
+ return null;
+ }
+ };
+};
+
+// Need a custom writer, because we can't just serialize the node as-is.
+// Sometimes we want to serializ the node without chidren, sometimes with just
+// its direct children, and sometimes the entire tree.
+// (For now, we only support direct children)
+
+pub const Writer = struct {
+ depth: i32,
+ exclude_root: bool,
+ root: *const Node,
+ registry: *Registry,
+
+ pub const Opts = struct {
+ depth: i32 = 0,
+ exclude_root: bool = false,
+ };
+
+ pub fn jsonStringify(self: *const Writer, w: anytype) error{WriteFailed}!void {
+ if (self.exclude_root) {
+ _ = self.writeChildren(self.root, 1, w) catch |err| {
+ log.err(.cdp, "node writeChildren", .{ .err = err });
+ return error.WriteFailed;
+ };
+ } else {
+ self.toJSON(self.root, 0, w) catch |err| {
+ // The only error our jsonStringify method can return is
+ // @TypeOf(w).Error. In other words, our code can't return its own
+ // error, we can only return a writer error. Kinda sucks.
+ log.err(.cdp, "node toJSON stringify", .{ .err = err });
+ return error.WriteFailed;
+ };
+ }
+ }
+
+ fn toJSON(self: *const Writer, node: *const Node, depth: usize, w: anytype) !void {
+ try w.beginObject();
+ try self.writeCommon(node, false, w);
+
+ try w.objectField("children");
+ const child_count = try self.writeChildren(node, depth, w);
+ try w.objectField("childNodeCount");
+ try w.write(child_count);
+
+ try w.endObject();
+ }
+
+ fn writeChildren(self: *const Writer, node: *const Node, depth: usize, w: anytype) anyerror!usize {
+ var count: usize = 0;
+ var it = node.dom.childrenIterator();
+
+ var registry = self.registry;
+ const full_child = self.depth < 0 or self.depth < depth;
+
+ try w.beginArray();
+ while (it.next()) |dom_child| {
+ const child_node = try registry.register(dom_child);
+ if (full_child) {
+ try self.toJSON(child_node, depth + 1, w);
+ } else {
+ try w.beginObject();
+ try self.writeCommon(child_node, true, w);
+ try w.endObject();
+ }
+ count += 1;
+ }
+ try w.endArray();
+
+ return count;
+ }
+
+ fn writeCommon(self: *const Writer, node: *const Node, include_child_count: bool, w: anytype) !void {
+ try w.objectField("nodeId");
+ try w.write(node.id);
+
+ try w.objectField("backendNodeId");
+ try w.write(node.id);
+
+ const dom_node = node.dom;
+
+ if (dom_node._parent) |dom_parent| {
+ const parent_node = try self.registry.register(dom_parent);
+ try w.objectField("parentId");
+ try w.write(parent_node.id);
+ }
+
+ if (dom_node.is(DOMNode.Element)) |element| {
+ if (element.hasAttributes()) {
+ try w.objectField("attributes");
+ try w.beginArray();
+ var it = element.attributeIterator();
+ while (it.next()) |attr| {
+ try w.write(attr._name.str());
+ try w.write(attr._value.str());
+ }
+ try w.endArray();
+ }
+
+ try w.objectField("localName");
+ try w.write(element.getLocalName());
+ } else {
+ try w.objectField("localName");
+ try w.write("");
+ }
+
+ try w.objectField("nodeType");
+ try w.write(dom_node.getNodeType());
+
+ try w.objectField("nodeName");
+ var name_buf: [Page.BUF_SIZE]u8 = undefined;
+ try w.write(dom_node.getNodeName(&name_buf));
+
+ try w.objectField("nodeValue");
+ try w.write(dom_node.getNodeValue() orelse "");
+
+ if (include_child_count) {
+ try w.objectField("childNodeCount");
+ try w.write(dom_node.getChildrenCount());
+ }
+
+ try w.objectField("documentURL");
+ try w.write(null);
+
+ try w.objectField("baseURL");
+ try w.write(null);
+
+ try w.objectField("xmlVersion");
+ try w.write("");
+
+ try w.objectField("compatibilityMode");
+ try w.write("NoQuirksMode");
+
+ try w.objectField("isScrollable");
+ try w.write(false);
+ }
+};
+
+const testing = @import("testing.zig");
+test "cdp Node: Registry register" {
+ var registry = Registry.init(testing.allocator);
+ defer registry.deinit();
+
+ try testing.expectEqual(0, registry.lookup_by_id.count());
+ try testing.expectEqual(0, registry.lookup_by_node.count());
+
+ var page = try testing.pageTest("cdp/registry1.html");
+ defer page._session.removePage();
+ var doc = page.window._document;
+
+ {
+ const dom_node = (try doc.querySelector("#a1", page)).?.asNode();
+ const node = try registry.register(dom_node);
+ const n1b = registry.lookup_by_id.get(1).?;
+ const n1c = registry.lookup_by_node.get(node.dom).?;
+ try testing.expectEqual(node, n1b);
+ try testing.expectEqual(node, n1c);
+
+ try testing.expectEqual(1, node.id);
+ try testing.expectEqual(dom_node, node.dom);
+ }
+
+ {
+ const dom_node = (try doc.querySelector("p", page)).?.asNode();
+ const node = try registry.register(dom_node);
+ const n1b = registry.lookup_by_id.get(2).?;
+ const n1c = registry.lookup_by_node.get(node.dom).?;
+ try testing.expectEqual(node, n1b);
+ try testing.expectEqual(node, n1c);
+
+ try testing.expectEqual(2, node.id);
+ try testing.expectEqual(dom_node, node.dom);
+ }
+}
+
+test "cdp Node: search list" {
+ var registry = Registry.init(testing.allocator);
+ defer registry.deinit();
+
+ var search_list = Search.List.init(testing.allocator, ®istry);
+ defer search_list.deinit();
+
+ {
+ // empty search list, noops
+ search_list.remove("0");
+ try testing.expectEqual(null, search_list.get("0"));
+ }
+
+ {
+ // empty nodes
+ const s1 = try search_list.create(&.{});
+ try testing.expectEqual("0", s1.name);
+ try testing.expectEqual(0, s1.node_ids.len);
+
+ const s2 = search_list.get("0").?;
+ try testing.expectEqual("0", s2.name);
+ try testing.expectEqual(0, s2.node_ids.len);
+
+ search_list.remove("0");
+ try testing.expectEqual(null, search_list.get("0"));
+ }
+
+ {
+ var page = try testing.pageTest("cdp/registry2.html");
+ defer page._session.removePage();
+ var doc = page.window._document;
+
+ const s1 = try search_list.create((try doc.querySelectorAll("a", page))._nodes);
+ try testing.expectEqual("1", s1.name);
+ try testing.expectEqualSlices(u32, &.{ 1, 2 }, s1.node_ids);
+
+ try testing.expectEqual(2, registry.lookup_by_id.count());
+ try testing.expectEqual(2, registry.lookup_by_node.count());
+
+ const s2 = try search_list.create((try doc.querySelectorAll("#a1", page))._nodes);
+ try testing.expectEqual("2", s2.name);
+ try testing.expectEqualSlices(u32, &.{1}, s2.node_ids);
+
+ const s3 = try search_list.create((try doc.querySelectorAll("#a2", page))._nodes);
+ try testing.expectEqual("3", s3.name);
+ try testing.expectEqualSlices(u32, &.{2}, s3.node_ids);
+
+ try testing.expectEqual(2, registry.lookup_by_id.count());
+ try testing.expectEqual(2, registry.lookup_by_node.count());
+ }
+}
+
+test "cdp Node: Writer" {
+ var registry = Registry.init(testing.allocator);
+ defer registry.deinit();
+
+ var page = try testing.pageTest("cdp/registry3.html");
+ defer page._session.removePage();
+ var doc = page.window._document;
+
+ {
+ const node = try registry.register(doc.asNode());
+ const json = try std.json.Stringify.valueAlloc(testing.allocator, Writer{
+ .root = node,
+ .depth = 0,
+ .exclude_root = false,
+ .registry = ®istry,
+ }, .{});
+ defer testing.allocator.free(json);
+
+ try testing.expectJson(.{
+ .nodeId = 1,
+ .backendNodeId = 1,
+ .nodeType = 9,
+ .nodeName = "#document",
+ .localName = "",
+ .nodeValue = "",
+ .documentURL = null,
+ .baseURL = null,
+ .xmlVersion = "",
+ .isScrollable = false,
+ .compatibilityMode = "NoQuirksMode",
+ .childNodeCount = 1,
+ .children = &.{.{
+ .nodeId = 2,
+ .backendNodeId = 2,
+ .nodeType = 1,
+ .nodeName = "HTML",
+ .localName = "html",
+ .nodeValue = "",
+ .childNodeCount = 2,
+ .documentURL = null,
+ .baseURL = null,
+ .xmlVersion = "",
+ .compatibilityMode = "NoQuirksMode",
+ .isScrollable = false,
+ }},
+ }, json);
+ }
+
+ {
+ const node = registry.lookup_by_id.get(2).?;
+ const json = try std.json.Stringify.valueAlloc(testing.allocator, Writer{
+ .root = node,
+ .depth = 1,
+ .exclude_root = false,
+ .registry = ®istry,
+ }, .{});
+ defer testing.allocator.free(json);
+
+ try testing.expectJson(.{
+ .nodeId = 2,
+ .backendNodeId = 2,
+ .nodeType = 1,
+ .nodeName = "HTML",
+ .localName = "html",
+ .nodeValue = "",
+ .childNodeCount = 2,
+ .documentURL = null,
+ .baseURL = null,
+ .xmlVersion = "",
+ .compatibilityMode = "NoQuirksMode",
+ .isScrollable = false,
+ .children = &.{ .{
+ .nodeId = 3,
+ .backendNodeId = 3,
+ .nodeType = 1,
+ .nodeName = "HEAD",
+ .localName = "head",
+ .nodeValue = "",
+ .childNodeCount = 0,
+ .documentURL = null,
+ .baseURL = null,
+ .xmlVersion = "",
+ .compatibilityMode = "NoQuirksMode",
+ .isScrollable = false,
+ .parentId = 2,
+ }, .{
+ .nodeId = 4,
+ .backendNodeId = 4,
+ .nodeType = 1,
+ .nodeName = "BODY",
+ .localName = "body",
+ .nodeValue = "",
+ .childNodeCount = 3,
+ .documentURL = null,
+ .baseURL = null,
+ .xmlVersion = "",
+ .compatibilityMode = "NoQuirksMode",
+ .isScrollable = false,
+ .parentId = 2,
+ } },
+ }, json);
+ }
+
+ {
+ const node = registry.lookup_by_id.get(2).?;
+ const json = try std.json.Stringify.valueAlloc(testing.allocator, Writer{
+ .root = node,
+ .depth = -1,
+ .exclude_root = true,
+ .registry = ®istry,
+ }, .{});
+ defer testing.allocator.free(json);
+
+ try testing.expectJson(&.{ .{
+ .nodeId = 3,
+ .backendNodeId = 3,
+ .nodeType = 1,
+ .nodeName = "HEAD",
+ .localName = "head",
+ .nodeValue = "",
+ .childNodeCount = 0,
+ .documentURL = null,
+ .baseURL = null,
+ .xmlVersion = "",
+ .compatibilityMode = "NoQuirksMode",
+ .isScrollable = false,
+ .parentId = 2,
+ }, .{
+ .nodeId = 4,
+ .backendNodeId = 4,
+ .nodeType = 1,
+ .nodeName = "BODY",
+ .localName = "body",
+ .nodeValue = "",
+ .childNodeCount = 3,
+ .documentURL = null,
+ .baseURL = null,
+ .xmlVersion = "",
+ .compatibilityMode = "NoQuirksMode",
+ .isScrollable = false,
+ .children = &.{ .{
+ .nodeId = 5,
+ .localName = "a",
+ .childNodeCount = 0,
+ .attributes = &.{"id", "a1"},
+ .parentId = 4,
+ }, .{
+ .nodeId = 6,
+ .localName = "div",
+ .childNodeCount = 1,
+ .parentId = 4,
+ .children = &.{.{
+ .nodeId = 7,
+ .localName = "a",
+ .childNodeCount = 0,
+ .parentId = 6,
+ .attributes = &.{"id", "a2"},
+ }},
+ }, .{
+ .nodeId = 8,
+ .backendNodeId = 8,
+ .nodeName = "#text",
+ .localName = "",
+ .childNodeCount = 0,
+ .parentId = 4,
+ .nodeValue = "\n",
+ } },
+ } }, json);
+ }
+}
diff --git a/src/cdp/cdp.zig b/src/cdp/cdp.zig
index a9b3c919..a2ce7159 100644
--- a/src/cdp/cdp.zig
+++ b/src/cdp/cdp.zig
@@ -287,8 +287,7 @@ pub fn CDPT(comptime TypeProvider: type) type {
}
pub fn BrowserContext(comptime CDP_T: type) type {
- // @ZIGMOD
- // const Node = @import("Node.zig");
+ const Node = @import("Node.zig");
return struct {
id: []const u8,
@@ -328,9 +327,8 @@ pub fn BrowserContext(comptime CDP_T: type) type {
security_origin: []const u8,
page_life_cycle_events: bool,
secure_context_type: []const u8,
- // @ZIGDOM
- // node_registry: Node.Registry,
- // node_search_list: Node.Search.List,
+ node_registry: Node.Registry,
+ node_search_list: Node.Search.List,
inspector: js.Inspector,
isolated_worlds: std.ArrayListUnmanaged(IsolatedWorld),
@@ -363,9 +361,8 @@ pub fn BrowserContext(comptime CDP_T: type) type {
const inspector = try cdp.browser.env.newInspector(arena, self);
- // @ZIGDOM
- // var registry = Node.Registry.init(allocator);
- // errdefer registry.deinit();
+ var registry = Node.Registry.init(allocator);
+ errdefer registry.deinit();
self.* = .{
.id = id,
@@ -378,9 +375,8 @@ pub fn BrowserContext(comptime CDP_T: type) type {
.secure_context_type = "Secure", // TODO = enum
.loader_id = LOADER_ID,
.page_life_cycle_events = false, // TODO; Target based value
- // @ZIGDOM
- // .node_registry = registry,
- // .node_search_list = undefined,
+ .node_registry = registry,
+ .node_search_list = undefined,
.isolated_worlds = .empty,
.inspector = inspector,
.notification_arena = cdp.notification_arena.allocator(),
@@ -388,8 +384,7 @@ pub fn BrowserContext(comptime CDP_T: type) type {
.captured_responses = .empty,
.log_interceptor = LogInterceptor(Self).init(allocator, self),
};
- // ZIGDOM
- // self.node_search_list = Node.Search.List.init(allocator, &self.node_registry);
+ self.node_search_list = Node.Search.List.init(allocator, &self.node_registry);
errdefer self.deinit();
try cdp.browser.notification.register(.page_remove, self, onPageRemove);
@@ -424,9 +419,8 @@ pub fn BrowserContext(comptime CDP_T: type) type {
world.deinit();
}
self.isolated_worlds.clearRetainingCapacity();
- // @ZIGDOM
- // self.node_registry.deinit();
- // self.node_search_list.deinit();
+ self.node_registry.deinit();
+ self.node_search_list.deinit();
self.cdp.browser.notification.unregisterAll(self);
if (self.http_proxy_changed) {
@@ -440,10 +434,8 @@ pub fn BrowserContext(comptime CDP_T: type) type {
}
pub fn reset(self: *Self) void {
- // @ZIGDOM
- _ = self;
- // self.node_registry.reset();
- // self.node_search_list.reset();
+ self.node_registry.reset();
+ self.node_search_list.reset();
}
pub fn createIsolatedWorld(self: *Self, world_name: []const u8, grant_universal_access: bool) !*IsolatedWorld {
@@ -462,15 +454,14 @@ pub fn BrowserContext(comptime CDP_T: type) type {
return world;
}
- // @ZIGDOM
- // pub fn nodeWriter(self: *Self, root: *const Node, opts: Node.Writer.Opts) Node.Writer {
- // return .{
- // .root = root,
- // .depth = opts.depth,
- // .exclude_root = opts.exclude_root,
- // .registry = &self.node_registry,
- // };
- // }
+ pub fn nodeWriter(self: *Self, root: *const Node, opts: Node.Writer.Opts) Node.Writer {
+ return .{
+ .root = root,
+ .depth = opts.depth,
+ .exclude_root = opts.exclude_root,
+ .registry = &self.node_registry,
+ };
+ }
pub fn getURL(self: *const Self) ?[:0]const u8 {
const page = self.session.currentPage() orelse return null;
diff --git a/src/cdp/testing.zig b/src/cdp/testing.zig
index 3912b842..cefc8823 100644
--- a/src/cdp/testing.zig
+++ b/src/cdp/testing.zig
@@ -32,8 +32,7 @@ pub const expect = std.testing.expect;
pub const expectEqual = base.expectEqual;
pub const expectError = base.expectError;
pub const expectEqualSlices = base.expectEqualSlices;
-
-pub const Document = @import("../testing.zig").Document;
+pub const pageTest = base.pageTest;
const Client = struct {
allocator: Allocator,
diff --git a/src/testing.zig b/src/testing.zig
index 77cc1f00..b250dc20 100644
--- a/src/testing.zig
+++ b/src/testing.zig
@@ -40,6 +40,7 @@ const App = @import("App.zig");
const js = @import("browser/js/js.zig");
const Browser = @import("browser/Browser.zig");
const Session = @import("browser/Session.zig");
+const Page = @import("browser/Page.zig");
// Merged std.testing.expectEqual and std.testing.expectString
// can be useful when testing fields of an anytype an you don't know
@@ -415,6 +416,27 @@ fn runWebApiTest(test_file: [:0]const u8) !void {
};
}
+// Used by a few CDP tests - wouldn't be sad to see this go.
+pub fn pageTest(comptime test_file: []const u8) !*Page {
+ const page = try test_session.createPage();
+ errdefer test_session.removePage();
+
+ const url = try std.fmt.allocPrintSentinel(
+ arena_allocator,
+ "http://127.0.0.1:9582/{s}{s}",
+ .{ WEB_API_TEST_ROOT, test_file },
+ 0,
+ );
+
+ try page.navigate(url, .{});
+ test_session.fetchWait(2000);
+
+ page._session.browser.runMicrotasks();
+ page._session.browser.runMessageLoop();
+
+ return page;
+}
+
test {
std.testing.refAllDecls(@This());
}