diff --git a/src/cdp/Node.zig b/src/cdp/Node.zig
index 3f8f069a..ed682a0b 100644
--- a/src/cdp/Node.zig
+++ b/src/cdp/Node.zig
@@ -44,50 +44,8 @@ const CompatibilityMode = enum {
NoQuirksMode,
};
-pub fn jsonStringify(self: *const Node, writer: anytype) !void {
- try writer.beginObject();
- try writer.objectField("nodeId");
- try writer.write(self.id);
-
- try writer.objectField("parentId");
- try writer.write(self.parent_id);
-
- try writer.objectField("backendNodeId");
- try writer.write(self.backend_node_id);
-
- try writer.objectField("nodeType");
- try writer.write(self.node_type);
-
- try writer.objectField("nodeName");
- try writer.write(self.node_name);
-
- try writer.objectField("localName");
- try writer.write(self.local_name);
-
- try writer.objectField("nodeValue");
- try writer.write(self.node_value);
-
- try writer.objectField("childNodeCount");
- try writer.write(self.child_node_count);
-
- try writer.objectField("children");
- try writer.write(self.children);
-
- try writer.objectField("documentURL");
- try writer.write(self.document_url);
-
- try writer.objectField("baseURL");
- try writer.write(self.base_url);
-
- try writer.objectField("xmlVersion");
- try writer.write(self.xml_version);
-
- try writer.objectField("compatibilityMode");
- try writer.write(self.compatibility_mode);
-
- try writer.objectField("isScrollable");
- try writer.write(self.is_scrollable);
- try writer.endObject();
+pub fn writer(self: *const Node, opts: Writer.Opts) Writer {
+ return .{ .node = self, .opts = opts };
}
// Whenever we send a node to the client, we register it here for future lookup.
@@ -95,6 +53,7 @@ pub fn jsonStringify(self: *const Node, writer: anytype) !void {
pub const Registry = struct {
node_id: u32,
allocator: Allocator,
+ arena: std.heap.ArenaAllocator,
node_pool: std.heap.MemoryPool(Node),
lookup_by_id: std.AutoHashMapUnmanaged(Id, *Node),
lookup_by_node: std.HashMapUnmanaged(*parser.Node, *Node, NodeContext, std.hash_map.default_max_load_percentage),
@@ -102,9 +61,10 @@ pub const Registry = struct {
pub fn init(allocator: Allocator) Registry {
return .{
.node_id = 0,
- .allocator = allocator,
.lookup_by_id = .{},
.lookup_by_node = .{},
+ .allocator = allocator,
+ .arena = std.heap.ArenaAllocator.init(allocator),
.node_pool = std.heap.MemoryPool(Node).init(allocator),
};
}
@@ -114,12 +74,14 @@ pub const Registry = struct {
self.lookup_by_id.deinit(allocator);
self.lookup_by_node.deinit(allocator);
self.node_pool.deinit();
+ self.arena.deinit();
}
pub fn reset(self: *Registry) void {
self.lookup_by_id.clearRetainingCapacity();
self.lookup_by_node.clearRetainingCapacity();
- _ = self.node_pool.reset(.{ .retain_capacity = {} });
+ _ = self.arena.reset(.{ .retain_with_limit = 1024 });
+ _ = self.node_pool.reset(.{ .retain_with_limit = 1024 });
}
pub fn register(self: *Registry, n: *parser.Node) !*Node {
@@ -132,11 +94,10 @@ pub const Registry = struct {
// but, just in case, let's try to keep things tidy.
errdefer _ = self.lookup_by_node.remove(n);
- const children = try parser.nodeGetChildNodes(n);
- const children_count = try parser.nodeListLength(children);
-
const id = self.node_id;
- defer self.node_id = id + 1;
+ self.node_id = id + 1;
+
+ const child_nodes = try self.registerChildNodes(n);
const node = try self.node_pool.create();
errdefer self.node_pool.destroy(node);
@@ -146,12 +107,12 @@ pub const Registry = struct {
.id = id,
.parent_id = null, // TODO
.backend_node_id = id, // ??
- .node_name = try parser.nodeName(n),
- .local_name = try parser.nodeLocalName(n),
- .node_value = try parser.nodeValue(n) orelse "",
- .node_type = @intFromEnum(try parser.nodeType(n)),
- .child_node_count = children_count,
- .children = &.{}, // TODO
+ .node_name = parser.nodeName(n) catch return error.NodeNameError,
+ .local_name = parser.nodeLocalName(n) catch return error.NodeLocalNameError,
+ .node_value = (parser.nodeValue(n) catch return error.NameValueError) orelse "",
+ .node_type = @intFromEnum(parser.nodeType(n) catch return error.NodeTypeError),
+ .child_node_count = @intCast(child_nodes.len),
+ .children = child_nodes,
.document_url = null,
.base_url = null,
.xml_version = "",
@@ -168,6 +129,31 @@ pub const Registry = struct {
try self.lookup_by_id.putNoClobber(self.allocator, id, node);
return node;
}
+
+ pub fn registerChildNodes(self: *Registry, n: *parser.Node) RegisterError![]*Node {
+ const node_list = parser.nodeGetChildNodes(n) catch return error.GetChildNodeError;
+ const count = parser.nodeListLength(node_list) catch return error.NodeListLengthError;
+
+ var arr = try self.arena.allocator().alloc(*Node, count);
+ var i: usize = 0;
+ for (0..count) |_| {
+ const child = (parser.nodeListItem(node_list, @intCast(i)) catch return error.NodeListItemError) orelse continue;
+ arr[i] = try self.register(child);
+ i += 1;
+ }
+ return arr[0..i];
+ }
+};
+
+const RegisterError = error{
+ OutOfMemory,
+ GetChildNodeError,
+ NodeListLengthError,
+ NodeListItemError,
+ NodeNameError,
+ NodeLocalNameError,
+ NameValueError,
+ NodeTypeError,
};
const NodeContext = struct {
@@ -271,8 +257,76 @@ pub const Search = struct {
};
};
+// Need a custom writer, because we can't just serialize the node as-is.
+// Sometimes we want to serializ the node without chidren, sometimes with just
+// its direct children, and sometimes the entire tree.
+// (For now, we only support direct children)
+pub const Writer = struct {
+ opts: Opts,
+ node: *const Node,
+
+ pub const Opts = struct {};
+
+ pub fn jsonStringify(self: *const Writer, w: anytype) !void {
+ try w.beginObject();
+ try writeCommon(self.node, w);
+ try w.objectField("children");
+ try w.beginArray();
+ for (self.node.children) |node| {
+ try w.beginObject();
+ try writeCommon(node, w);
+ try w.endObject();
+ }
+ try w.endArray();
+ try w.endObject();
+ }
+
+ fn writeCommon(node: *const Node, w: anytype) !void {
+ try w.objectField("nodeId");
+ try w.write(node.id);
+
+ if (node.parent_id) |pid| {
+ try w.objectField("parentId");
+ try w.write(pid);
+ }
+
+ try w.objectField("backendNodeId");
+ try w.write(node.backend_node_id);
+
+ try w.objectField("nodeType");
+ try w.write(node.node_type);
+
+ try w.objectField("nodeName");
+ try w.write(node.node_name);
+
+ try w.objectField("localName");
+ try w.write(node.local_name);
+
+ try w.objectField("nodeValue");
+ try w.write(node.node_value);
+
+ try w.objectField("childNodeCount");
+ try w.write(node.child_node_count);
+
+ try w.objectField("documentURL");
+ try w.write(node.document_url);
+
+ try w.objectField("baseURL");
+ try w.write(node.base_url);
+
+ try w.objectField("xmlVersion");
+ try w.write(node.xml_version);
+
+ try w.objectField("compatibilityMode");
+ try w.write(node.compatibility_mode);
+
+ try w.objectField("isScrollable");
+ try w.write(node.is_scrollable);
+ }
+};
+
const testing = @import("testing.zig");
-test "CDP Node: Registry register" {
+test "cdp Node: Registry register" {
var registry = Registry.init(testing.allocator);
defer registry.deinit();
@@ -298,7 +352,8 @@ test "CDP Node: Registry register" {
try testing.expectEqual("a", node.local_name);
try testing.expectEqual("", node.node_value);
try testing.expectEqual(1, node.child_node_count);
- try testing.expectEqual(0, node.children.len);
+ try testing.expectEqual(1, node.children.len);
+ try testing.expectEqual(1, node.children[0].id);
try testing.expectEqual(null, node.document_url);
try testing.expectEqual(null, node.base_url);
try testing.expectEqual("", node.xml_version);
@@ -310,20 +365,21 @@ test "CDP Node: Registry register" {
{
const n = (try doc.querySelector("p")).?;
const node = try registry.register(n);
- const n1b = registry.lookup_by_id.get(1).?;
+ const n1b = registry.lookup_by_id.get(2).?;
const n1c = registry.lookup_by_node.get(node._node).?;
try testing.expectEqual(node, n1b);
try testing.expectEqual(node, n1c);
- try testing.expectEqual(1, node.id);
+ try testing.expectEqual(2, node.id);
try testing.expectEqual(null, node.parent_id);
try testing.expectEqual(1, node.node_type);
- try testing.expectEqual(1, node.backend_node_id);
+ try testing.expectEqual(2, node.backend_node_id);
try testing.expectEqual("P", node.node_name);
try testing.expectEqual("p", node.local_name);
try testing.expectEqual("", node.node_value);
try testing.expectEqual(1, node.child_node_count);
- try testing.expectEqual(0, node.children.len);
+ try testing.expectEqual(1, node.children.len);
+ try testing.expectEqual(3, node.children[0].id);
try testing.expectEqual(null, node.document_url);
try testing.expectEqual(null, node.base_url);
try testing.expectEqual("", node.xml_version);
@@ -333,7 +389,7 @@ test "CDP Node: Registry register" {
}
}
-test "CDP Node: search list" {
+test "cdp Node: search list" {
var registry = Registry.init(testing.allocator);
defer registry.deinit();
@@ -383,3 +439,27 @@ test "CDP Node: search list" {
try testing.expectEqual(2, registry.lookup_by_node.count());
}
}
+
+test "cdp Node: Writer" {
+ var registry = Registry.init(testing.allocator);
+ defer registry.deinit();
+
+ var doc = try testing.Document.init("");
+ defer doc.deinit();
+
+ {
+ const node = try registry.register(doc.asNode());
+ const json = try std.json.stringifyAlloc(testing.allocator, node.writer(.{}), .{});
+ defer testing.allocator.free(json);
+
+ try testing.expectJson(.{ .nodeId = 0, .backendNodeId = 0, .nodeType = 9, .nodeName = "#document", .localName = "", .nodeValue = "", .documentURL = null, .baseURL = null, .xmlVersion = "", .isScrollable = false, .compatibilityMode = "NoQuirksMode", .childNodeCount = 1, .children = &.{.{ .nodeId = 1, .backendNodeId = 1, .nodeType = 1, .nodeName = "HTML", .localName = "html", .nodeValue = "", .childNodeCount = 2, .documentURL = null, .baseURL = null, .xmlVersion = "", .compatibilityMode = "NoQuirksMode", .isScrollable = false }} }, json);
+ }
+
+ {
+ const node = registry.lookup_by_id.get(1).?;
+ const json = try std.json.stringifyAlloc(testing.allocator, node.writer(.{}), .{});
+ defer testing.allocator.free(json);
+
+ try testing.expectJson(.{ .nodeId = 1, .backendNodeId = 1, .nodeType = 1, .nodeName = "HTML", .localName = "html", .nodeValue = "", .childNodeCount = 2, .documentURL = null, .baseURL = null, .xmlVersion = "", .compatibilityMode = "NoQuirksMode", .isScrollable = false, .children = &.{ .{ .nodeId = 2, .backendNodeId = 2, .nodeType = 1, .nodeName = "HEAD", .localName = "head", .nodeValue = "", .childNodeCount = 0, .documentURL = null, .baseURL = null, .xmlVersion = "", .compatibilityMode = "NoQuirksMode", .isScrollable = false }, .{ .nodeId = 3, .backendNodeId = 3, .nodeType = 1, .nodeName = "BODY", .localName = "body", .nodeValue = "", .childNodeCount = 2, .documentURL = null, .baseURL = null, .xmlVersion = "", .compatibilityMode = "NoQuirksMode", .isScrollable = false } } }, json);
+ }
+}
diff --git a/src/cdp/domains/dom.zig b/src/cdp/domains/dom.zig
index 3c97b078..fedecf99 100644
--- a/src/cdp/domains/dom.zig
+++ b/src/cdp/domains/dom.zig
@@ -51,9 +51,7 @@ fn getDocument(cmd: anytype) !void {
const doc = page.doc orelse return error.DocumentNotLoaded;
const node = try bc.node_registry.register(parser.documentToNode(doc));
- return cmd.sendResult(.{
- .root = node,
- }, .{});
+ return cmd.sendResult(.{ .root = node.writer(.{}) }, .{});
}
// https://chromedevtools.github.io/devtools-protocol/tot/DOM/#method-performSearch
@@ -118,6 +116,7 @@ fn getSearchResults(cmd: anytype) !void {
}
const testing = @import("../testing.zig");
+
test "cdp.dom: getSearchResults unknown search id" {
var ctx = testing.context();
defer ctx.deinit();
@@ -149,7 +148,7 @@ test "cdp.dom: search flow" {
.method = "DOM.getSearchResults",
.params = .{ .searchId = "0", .fromIndex = 0, .toIndex = 2 },
});
- try ctx.expectSentResult(.{ .nodeIds = &.{ 0, 1 } }, .{ .id = 13 });
+ try ctx.expectSentResult(.{ .nodeIds = &.{ 0, 2 } }, .{ .id = 13 });
// different fromIndex
try ctx.processMessage(.{
@@ -157,7 +156,7 @@ test "cdp.dom: search flow" {
.method = "DOM.getSearchResults",
.params = .{ .searchId = "0", .fromIndex = 1, .toIndex = 2 },
});
- try ctx.expectSentResult(.{ .nodeIds = &.{1} }, .{ .id = 14 });
+ try ctx.expectSentResult(.{ .nodeIds = &.{2} }, .{ .id = 14 });
// different toIndex
try ctx.processMessage(.{
diff --git a/src/cdp/testing.zig b/src/cdp/testing.zig
index 759c3a0e..d559ab78 100644
--- a/src/cdp/testing.zig
+++ b/src/cdp/testing.zig
@@ -26,11 +26,12 @@ const main = @import("cdp.zig");
const parser = @import("netsurf");
const App = @import("../app.zig").App;
-pub const allocator = @import("../testing.zig").allocator;
-
-pub const expectEqual = @import("../testing.zig").expectEqual;
-pub const expectError = @import("../testing.zig").expectError;
-pub const expectEqualSlices = @import("../testing.zig").expectEqualSlices;
+const base = @import("../testing.zig");
+pub const allocator = base.allocator;
+pub const expectJson = base.expectJson;
+pub const expectEqual = base.expectEqual;
+pub const expectError = base.expectError;
+pub const expectEqualSlices = base.expectEqualSlices;
pub const Document = @import("../testing.zig").Document;
@@ -310,47 +311,5 @@ pub fn context() TestContext {
fn compareExpectedToSent(expected: []const u8, actual: json.Value) !bool {
const expected_value = try std.json.parseFromSlice(json.Value, std.testing.allocator, expected, .{});
defer expected_value.deinit();
- return compareJsonValues(expected_value.value, actual);
-}
-
-fn compareJsonValues(a: std.json.Value, b: std.json.Value) bool {
- if (!std.mem.eql(u8, @tagName(a), @tagName(b))) {
- return false;
- }
-
- switch (a) {
- .null => return true,
- .bool => return a.bool == b.bool,
- .integer => return a.integer == b.integer,
- .float => return a.float == b.float,
- .number_string => return std.mem.eql(u8, a.number_string, b.number_string),
- .string => return std.mem.eql(u8, a.string, b.string),
- .array => {
- const a_len = a.array.items.len;
- const b_len = b.array.items.len;
- if (a_len != b_len) {
- return false;
- }
- for (a.array.items, b.array.items) |a_item, b_item| {
- if (compareJsonValues(a_item, b_item) == false) {
- return false;
- }
- }
- return true;
- },
- .object => {
- var it = a.object.iterator();
- while (it.next()) |entry| {
- const key = entry.key_ptr.*;
- if (b.object.get(key)) |b_item| {
- if (compareJsonValues(entry.value_ptr.*, b_item) == false) {
- return false;
- }
- } else {
- return false;
- }
- }
- return true;
- },
- }
+ return base.isEqualJson(expected_value.value, actual);
}
diff --git a/src/testing.zig b/src/testing.zig
index 309938c6..c8c77503 100644
--- a/src/testing.zig
+++ b/src/testing.zig
@@ -17,14 +17,15 @@
// along with this program. If not, see .
const std = @import("std");
-const parser = @import("netsurf");
+const parser = @import("netsurf");
pub const allocator = std.testing.allocator;
pub const expectError = std.testing.expectError;
pub const expectString = std.testing.expectEqualStrings;
pub const expectEqualSlices = std.testing.expectEqualSlices;
const App = @import("app.zig").App;
+const Allocator = std.mem.Allocator;
// Merged std.testing.expectEqual and std.testing.expectString
// can be useful when testing fields of an anytype an you don't know
@@ -217,12 +218,93 @@ pub const Document = struct {
pub fn querySelectorAll(self: *Document, selector: []const u8) ![]const *parser.Node {
const css = @import("dom/css.zig");
- const node_list = try css.querySelectorAll(self.arena.allocator(), parser.documentToNode(self.doc), selector);
+ const node_list = try css.querySelectorAll(self.arena.allocator(), self.asNode(), selector);
return node_list.nodes.items;
}
pub fn querySelector(self: *Document, selector: []const u8) !?*parser.Node {
const css = @import("dom/css.zig");
- return css.querySelector(self.arena.allocator(), parser.documentToNode(self.doc), selector);
+ return css.querySelector(self.arena.allocator(), self.asNode(), selector);
+ }
+
+ pub fn asNode(self: *const Document) *parser.Node {
+ return parser.documentToNode(self.doc);
}
};
+
+pub fn expectJson(a: anytype, b: anytype) !void {
+ var arena = std.heap.ArenaAllocator.init(allocator);
+ defer arena.deinit();
+
+ const aa = arena.allocator();
+
+ const a_value = try convertToJson(aa, a);
+ const b_value = try convertToJson(aa, b);
+
+ errdefer {
+ const a_json = std.json.stringifyAlloc(aa, a_value, .{ .whitespace = .indent_2 }) catch unreachable;
+ const b_json = std.json.stringifyAlloc(aa, b_value, .{ .whitespace = .indent_2 }) catch unreachable;
+ std.debug.print("== Expected ==\n{s}\n\n== Actual ==\n{s}", .{ a_json, b_json });
+ }
+
+ try expectJsonValue(a_value, b_value);
+}
+
+pub fn isEqualJson(a: anytype, b: anytype) !bool {
+ var arena = std.heap.ArenaAllocator.init(allocator);
+ defer arena.deinit();
+
+ const aa = arena.allocator();
+ const a_value = try convertToJson(aa, a);
+ const b_value = try convertToJson(aa, b);
+ expectJsonValue(a_value, b_value) catch return false;
+ return true;
+}
+
+fn convertToJson(arena: Allocator, value: anytype) !std.json.Value {
+ const T = @TypeOf(value);
+ if (T == std.json.Value) {
+ return value;
+ }
+
+ var str: []const u8 = undefined;
+ if (T == []u8 or T == []const u8 or comptime isStringArray(T)) {
+ str = value;
+ } else {
+ str = try std.json.stringifyAlloc(arena, value, .{});
+ }
+ return std.json.parseFromSliceLeaky(std.json.Value, arena, str, .{});
+}
+
+fn expectJsonValue(a: std.json.Value, b: std.json.Value) !void {
+ try expectEqual(@tagName(a), @tagName(b));
+
+ // at this point, we know that if a is an int, b must also be an int
+ switch (a) {
+ .null => return,
+ .bool => try expectEqual(a.bool, b.bool),
+ .integer => try expectEqual(a.integer, b.integer),
+ .float => try expectEqual(a.float, b.float),
+ .number_string => try expectEqual(a.number_string, b.number_string),
+ .string => try expectEqual(a.string, b.string),
+ .array => {
+ const a_len = a.array.items.len;
+ const b_len = b.array.items.len;
+ try expectEqual(a_len, b_len);
+ for (a.array.items, b.array.items) |a_item, b_item| {
+ try expectJsonValue(a_item, b_item);
+ }
+ },
+ .object => {
+ var it = a.object.iterator();
+ while (it.next()) |entry| {
+ const key = entry.key_ptr.*;
+ if (b.object.get(key)) |b_item| {
+ try expectJsonValue(entry.value_ptr.*, b_item);
+ } else {
+ return error.MissingKey;
+ }
+ }
+ },
+ }
+}