43 Commits

Author SHA1 Message Date
Muki Kiboigo
7d96a25c55 try setting to weak instead of deinit 2025-09-17 06:39:27 -07:00
Muki Kiboigo
4d1e416299 use fetch logging scope, clean some comments 2025-09-16 12:41:35 -07:00
Muki Kiboigo
3badcdbdbd stop using destructor callback for fetch 2025-09-16 12:38:50 -07:00
Muki Kiboigo
fcd82b2c14 htmlRunner for ReadableStream tests, fix ReadableStream enqueue 2025-09-16 12:18:44 -07:00
Muki Kiboigo
d0621510cc use Env.PersistentPromiseResolver 2025-09-16 12:09:54 -07:00
Karl Seguin
2a7a8bc2a6 remove meaningless text from test 2025-09-16 11:08:28 -07:00
Karl Seguin
af916dea1d fix arena, add fetch test 2025-09-16 11:08:27 -07:00
Karl Seguin
31335fc4fb Start working on HTMLSlotElement 2025-09-16 11:08:27 -07:00
Muki Kiboigo
c84634093d use content length to reserve body size 2025-09-16 11:08:27 -07:00
Muki Kiboigo
37d8d2642d copy our Request headers into the HTTP client 2025-09-16 11:08:27 -07:00
Muki Kiboigo
0423a178e9 migrate fetch tests to htmlRunner 2025-09-16 11:08:27 -07:00
Muki Kiboigo
7acf67d668 properly handle closed for ReadableStream 2025-09-16 11:08:27 -07:00
Muki Kiboigo
ef1fece40c deinit persistent promise resolver 2025-09-16 11:08:27 -07:00
Muki Kiboigo
ebb590250f simplify cloning of Req/Resp 2025-09-16 11:08:26 -07:00
Muki Kiboigo
03130a95d8 use call arena for json in Req/Resp 2025-09-16 11:08:26 -07:00
Muki Kiboigo
e133717f7f simplify Headers 2025-09-16 11:08:26 -07:00
Muki Kiboigo
968c695da1 headers iterators should not allocate 2025-09-16 11:08:26 -07:00
Muki Kiboigo
707116a030 use destructor callback for FetchContext 2025-09-16 11:08:26 -07:00
Muki Kiboigo
01966f41ff support object as HeadersInit 2025-09-16 11:08:26 -07:00
Muki Kiboigo
141d17dd55 add logging on fetch error callback 2025-09-16 11:08:26 -07:00
sjorsdonkers
a3c2daf306 retain value, avoid str alloc 2025-09-16 11:08:25 -07:00
sjorsdonkers
dc60fac90d avoid explicit memcpy 2025-09-16 11:08:25 -07:00
sjorsdonkers
a5e2e8ea15 remove length check of fixed size 2025-09-16 11:08:25 -07:00
sjorsdonkers
8295c2abe5 jsValueToZig for fixed sized arrays 2025-09-16 11:08:25 -07:00
Muki Kiboigo
5997be89f6 implement remaining ReadableStream functionality 2025-09-16 11:08:25 -07:00
Muki Kiboigo
1c89cfe5d4 working Header iterators 2025-09-16 11:08:25 -07:00
Muki Kiboigo
b5021bd9fa TypeError when Stream is locked 2025-09-16 11:08:25 -07:00
Muki Kiboigo
4fd365b520 cleaning up various Headers routines 2025-09-16 11:08:25 -07:00
Muki Kiboigo
479cd5ab1a use proper Headers in fetch() 2025-09-16 11:08:24 -07:00
Muki Kiboigo
8285cbcaa9 expand Request/Response interfaces 2025-09-16 11:08:24 -07:00
Muki Kiboigo
545d97b5c0 expand Headers interface 2025-09-16 11:08:24 -07:00
Muki Kiboigo
11016abdd3 remove debug logging in ReadableStream 2025-09-16 11:08:24 -07:00
Muki Kiboigo
066df87dd4 move fetch() into fetch.zig 2025-09-16 11:08:24 -07:00
Muki Kiboigo
91899912d8 add bodyUsed checks on Request and Response 2025-09-16 11:08:24 -07:00
Muki Kiboigo
4ceca6b90b more Headers compatibility 2025-09-16 11:08:24 -07:00
Muki Kiboigo
ec936417c6 add fetch to cdp domain 2025-09-16 11:08:23 -07:00
Muki Kiboigo
4b75b33eb3 add json response method 2025-09-16 11:08:23 -07:00
Muki Kiboigo
1d7e731034 basic readable stream working 2025-09-16 11:08:23 -07:00
Muki Kiboigo
ab60f64452 proper fetch method and body setting 2025-09-16 11:08:23 -07:00
Muki Kiboigo
9757ea7b0f fetch callback logging 2025-09-16 11:08:23 -07:00
Muki Kiboigo
855583874f request url as null terminated 2025-09-16 11:08:23 -07:00
Muki Kiboigo
9efc27c2bb initial fetch in zig 2025-09-16 11:08:23 -07:00
Muki Kiboigo
cab5117d85 remove polyfill and add req/resp 2025-09-16 11:08:19 -07:00
147 changed files with 1105 additions and 1331 deletions

View File

@@ -36,7 +36,6 @@ pub const App = struct {
http_connect_timeout_ms: ?u31 = null,
http_max_host_open: ?u8 = null,
http_max_concurrent: ?u8 = null,
user_agent: [:0]const u8,
};
pub fn init(allocator: Allocator, config: Config) !*App {
@@ -54,7 +53,6 @@ pub const App = struct {
.http_proxy = config.http_proxy,
.tls_verify_host = config.tls_verify_host,
.proxy_bearer_token = config.proxy_bearer_token,
.user_agent = config.user_agent,
});
errdefer http.deinit();

View File

@@ -17,6 +17,7 @@
// along with this program. If not, see <https://www.gnu.org/licenses/>.
const std = @import("std");
const log = @import("../log.zig");
const Allocator = std.mem.Allocator;
const Scheduler = @This();

View File

@@ -161,7 +161,7 @@ pub fn addFromElement(self: *ScriptManager, element: *parser.Element) !void {
remote_url = try URL.stitch(page.arena, src, page.url.raw, .{ .null_terminated = true });
source = .{ .remote = .{} };
} else {
const inline_source = parser.nodeTextContent(@ptrCast(element)) orelse return;
const inline_source = try parser.nodeTextContent(@ptrCast(element)) orelse return;
source = .{ .@"inline" = inline_source };
}
@@ -205,7 +205,7 @@ pub fn addFromElement(self: *ScriptManager, element: *parser.Element) !void {
errdefer pending_script.deinit();
var headers = try self.client.newHeaders();
var headers = try Http.Headers.init();
try page.requestCookie(.{}).headersForRequest(page.arena, remote_url.?, &headers);
try self.client.request(.{
@@ -273,7 +273,7 @@ pub fn blockingGet(self: *ScriptManager, url: [:0]const u8) !BlockingResult {
.buffer_pool = &self.buffer_pool,
};
var headers = try self.client.newHeaders();
var headers = try Http.Headers.init();
try self.page.requestCookie(.{}).headersForRequest(self.page.arena, url, &headers);
var client = self.client;

View File

@@ -31,6 +31,7 @@ const parser = @import("netsurf.zig");
const DataSet = @import("html/DataSet.zig");
const ShadowRoot = @import("dom/shadow_root.zig").ShadowRoot;
const StyleSheet = @import("cssom/StyleSheet.zig");
const CSSStyleSheet = @import("cssom/CSSStyleSheet.zig");
const CSSStyleDeclaration = @import("cssom/CSSStyleDeclaration.zig");
// for HTMLScript (but probably needs to be added to more)

View File

@@ -20,6 +20,7 @@ const std = @import("std");
const builtin = @import("builtin");
const log = @import("../../log.zig");
const Allocator = std.mem.Allocator;
const Page = @import("../page.zig").Page;
const JsObject = @import("../env.zig").Env.JsObject;

View File

@@ -46,15 +46,17 @@ pub fn parse(alloc: std.mem.Allocator, s: []const u8, opts: parser.ParseOptions)
// matchFirst call m.match with the first node that matches the selector s, from the
// descendants of n and returns true. If none matches, it returns false.
pub fn matchFirst(s: *const Selector, node: anytype, m: anytype) !bool {
var child = node.firstChild();
while (child) |c| {
if (try s.match(c)) {
try m.match(c);
var c = try node.firstChild();
while (true) {
if (c == null) break;
if (try s.match(c.?)) {
try m.match(c.?);
return true;
}
if (try matchFirst(s, c, m)) return true;
child = c.nextSibling();
if (try matchFirst(s, c.?, m)) return true;
c = try c.?.nextSibling();
}
return false;
}
@@ -62,11 +64,13 @@ pub fn matchFirst(s: *const Selector, node: anytype, m: anytype) !bool {
// matchAll call m.match with the all the nodes that matches the selector s, from the
// descendants of n.
pub fn matchAll(s: *const Selector, node: anytype, m: anytype) !void {
var child = node.firstChild();
while (child) |c| {
if (try s.match(c)) try m.match(c);
try matchAll(s, c, m);
child = c.nextSibling();
var c = try node.firstChild();
while (true) {
if (c == null) break;
if (try s.match(c.?)) try m.match(c.?);
try matchAll(s, c.?, m);
c = try c.?.nextSibling();
}
}

View File

@@ -26,67 +26,71 @@ const Allocator = std.mem.Allocator;
pub const Node = struct {
node: *parser.Node,
pub fn firstChild(n: Node) ?Node {
const c = parser.nodeFirstChild(n.node);
pub fn firstChild(n: Node) !?Node {
const c = try parser.nodeFirstChild(n.node);
if (c) |cc| return .{ .node = cc };
return null;
}
pub fn lastChild(n: Node) ?Node {
const c = parser.nodeLastChild(n.node);
pub fn lastChild(n: Node) !?Node {
const c = try parser.nodeLastChild(n.node);
if (c) |cc| return .{ .node = cc };
return null;
}
pub fn nextSibling(n: Node) ?Node {
const c = parser.nodeNextSibling(n.node);
pub fn nextSibling(n: Node) !?Node {
const c = try parser.nodeNextSibling(n.node);
if (c) |cc| return .{ .node = cc };
return null;
}
pub fn prevSibling(n: Node) ?Node {
const c = parser.nodePreviousSibling(n.node);
pub fn prevSibling(n: Node) !?Node {
const c = try parser.nodePreviousSibling(n.node);
if (c) |cc| return .{ .node = cc };
return null;
}
pub fn parent(n: Node) ?Node {
const c = parser.nodeParentNode(n.node);
pub fn parent(n: Node) !?Node {
const c = try parser.nodeParentNode(n.node);
if (c) |cc| return .{ .node = cc };
return null;
}
pub fn isElement(n: Node) bool {
return parser.nodeType(n.node) == .element;
const t = parser.nodeType(n.node) catch return false;
return t == .element;
}
pub fn isDocument(n: Node) bool {
return parser.nodeType(n.node) == .document;
const t = parser.nodeType(n.node) catch return false;
return t == .document;
}
pub fn isComment(n: Node) bool {
return parser.nodeType(n.node) == .comment;
const t = parser.nodeType(n.node) catch return false;
return t == .comment;
}
pub fn isText(n: Node) bool {
return parser.nodeType(n.node) == .text;
const t = parser.nodeType(n.node) catch return false;
return t == .text;
}
pub fn text(n: Node) ?[]const u8 {
const data = parser.nodeTextContent(n.node);
pub fn text(n: Node) !?[]const u8 {
const data = try parser.nodeTextContent(n.node);
if (data == null) return null;
if (data.?.len == 0) return null;
return std.mem.trim(u8, data.?, &std.ascii.whitespace);
}
pub fn isEmptyText(n: Node) bool {
const data = parser.nodeTextContent(n.node);
pub fn isEmptyText(n: Node) !bool {
const data = try parser.nodeTextContent(n.node);
if (data == null) return true;
if (data.?.len == 0) return true;
@@ -94,7 +98,7 @@ pub const Node = struct {
}
pub fn tag(n: Node) ![]const u8 {
return parser.nodeName(n.node);
return try parser.nodeName(n.node);
}
pub fn attr(n: Node, key: []const u8) !?[]const u8 {
@@ -136,7 +140,7 @@ const MatcherTest = struct {
test "Browser.CSS.Libdom: matchFirst" {
const alloc = std.testing.allocator;
parser.init();
try parser.init();
defer parser.deinit();
var matcher = MatcherTest.init(alloc);
@@ -281,7 +285,7 @@ test "Browser.CSS.Libdom: matchFirst" {
test "Browser.CSS.Libdom: matchAll" {
const alloc = std.testing.allocator;
parser.init();
try parser.init();
defer parser.deinit();
var matcher = MatcherTest.init(alloc);

View File

@@ -821,8 +821,7 @@ pub const Parser = struct {
// nameStart returns whether c can be the first character of an identifier
// (not counting an initial hyphen, or an escape sequence).
fn nameStart(c: u8) bool {
return 'a' <= c and c <= 'z' or 'A' <= c and c <= 'Z' or c == '_' or c > 127 or
'0' <= c and c <= '9';
return 'a' <= c and c <= 'z' or 'A' <= c and c <= 'Z' or c == '_' or c > 127;
}
// nameChar returns whether c can be a character within an identifier
@@ -891,7 +890,7 @@ test "parser.parseIdentifier" {
err: bool = false,
}{
.{ .s = "x", .exp = "x" },
.{ .s = "96", .exp = "96", .err = false },
.{ .s = "96", .exp = "", .err = true },
.{ .s = "-x", .exp = "-x" },
.{ .s = "r\\e9 sumé", .exp = "résumé" },
.{ .s = "r\\0000e9 sumé", .exp = "résumé" },
@@ -976,7 +975,6 @@ test "parser.parse" {
.{ .s = ":root", .exp = .{ .pseudo_class = .root } },
.{ .s = ".\\:bar", .exp = .{ .class = ":bar" } },
.{ .s = ".foo\\:bar", .exp = .{ .class = "foo:bar" } },
.{ .s = "[class=75c0fa18a94b9e3a6b8e14d6cbe688a27f5da10a]", .exp = .{ .attribute = .{ .key = "class", .val = "75c0fa18a94b9e3a6b8e14d6cbe688a27f5da10a", .op = .eql } } },
};
for (testcases) |tc| {

View File

@@ -334,39 +334,41 @@ pub const Selector = union(enum) {
if (!try v.second.match(n)) return false;
// The first must match a ascendent.
var parent = n.parent();
while (parent) |p| {
if (try v.first.match(p)) {
var p = try n.parent();
while (p != null) {
if (try v.first.match(p.?)) {
return true;
}
parent = p.parent();
p = try p.?.parent();
}
return false;
},
.child => {
const p = n.parent() orelse return false;
return try v.second.match(n) and try v.first.match(p);
const p = try n.parent();
if (p == null) return false;
return try v.second.match(n) and try v.first.match(p.?);
},
.next_sibling => {
if (!try v.second.match(n)) return false;
var child = n.prevSibling();
while (child) |c| {
if (c.isText() or c.isComment()) {
child = c.prevSibling();
var c = try n.prevSibling();
while (c != null) {
if (c.?.isText() or c.?.isComment()) {
c = try c.?.prevSibling();
continue;
}
return try v.first.match(c);
return try v.first.match(c.?);
}
return false;
},
.subsequent_sibling => {
if (!try v.second.match(n)) return false;
var child = n.prevSibling();
while (child) |c| {
if (try v.first.match(c)) return true;
child = c.prevSibling();
var c = try n.prevSibling();
while (c != null) {
if (try v.first.match(c.?)) return true;
c = try c.?.prevSibling();
}
return false;
},
@@ -436,10 +438,10 @@ pub const Selector = union(enum) {
// Only containsOwn is implemented.
if (v.own == false) return Error.UnsupportedContainsPseudoClass;
var child = n.firstChild();
while (child) |c| {
if (c.isText()) {
const text = c.text();
var c = try n.firstChild();
while (c != null) {
if (c.?.isText()) {
const text = try c.?.text();
if (text) |_text| {
if (contains(_text, v.val, false)) { // we are case sensitive. Is this correct behavior?
return true;
@@ -447,7 +449,7 @@ pub const Selector = union(enum) {
}
}
child = c.nextSibling();
c = try c.?.nextSibling();
}
return false;
},
@@ -475,16 +477,16 @@ pub const Selector = union(enum) {
.empty => {
if (!n.isElement()) return false;
var child = n.firstChild();
while (child) |c| {
if (c.isElement()) return false;
var c = try n.firstChild();
while (c != null) {
if (c.?.isElement()) return false;
if (c.isText()) {
if (c.isEmptyText()) continue;
if (c.?.isText()) {
if (try c.?.isEmptyText()) continue;
return false;
}
child = c.nextSibling();
c = try c.?.nextSibling();
}
return true;
@@ -492,7 +494,7 @@ pub const Selector = union(enum) {
.root => {
if (!n.isElement()) return false;
const p = n.parent();
const p = try n.parent();
return (p != null and p.?.isDocument());
},
.link => {
@@ -607,23 +609,24 @@ pub const Selector = union(enum) {
}
fn hasLegendInPreviousSiblings(n: anytype) anyerror!bool {
var child = n.prevSibling();
while (child) |c| {
const ctag = try c.tag();
var c = try n.prevSibling();
while (c != null) {
const ctag = try c.?.tag();
if (std.ascii.eqlIgnoreCase("legend", ctag)) return true;
child = c.prevSibling();
c = try c.?.prevSibling();
}
return false;
}
fn inDisabledFieldset(n: anytype) anyerror!bool {
const p = n.parent() orelse return false;
const p = try n.parent();
if (p == null) return false;
const ntag = try n.tag();
const ptag = try p.tag();
const ptag = try p.?.tag();
if (std.ascii.eqlIgnoreCase("fieldset", ptag) and
try p.attr("disabled") != null and
try p.?.attr("disabled") != null and
(!std.ascii.eqlIgnoreCase("legend", ntag) or try hasLegendInPreviousSiblings(n)))
{
return true;
@@ -639,7 +642,7 @@ pub const Selector = union(enum) {
// ```
// https://github.com/andybalholm/cascadia/blob/master/pseudo_classes.go#L434
return try inDisabledFieldset(p);
return try inDisabledFieldset(p.?);
}
fn langMatch(lang: []const u8, n: anytype) anyerror!bool {
@@ -653,8 +656,10 @@ pub const Selector = union(enum) {
}
// if the tag doesn't match, try the parent.
const p = n.parent() orelse return false;
return langMatch(lang, p);
const p = try n.parent();
if (p == null) return false;
return langMatch(lang, p.?);
}
// onlyChildMatch implements :only-child
@@ -662,24 +667,25 @@ pub const Selector = union(enum) {
fn onlyChildMatch(of_type: bool, n: anytype) anyerror!bool {
if (!n.isElement()) return false;
const p = n.parent() orelse return false;
const p = try n.parent();
if (p == null) return false;
const ntag = try n.tag();
var count: usize = 0;
var child = p.firstChild();
var c = try p.?.firstChild();
// loop hover all n siblings.
while (child) |c| {
while (c != null) {
// ignore non elements or others tags if of-type is true.
if (!c.isElement() or (of_type and !std.mem.eql(u8, ntag, try c.tag()))) {
child = c.nextSibling();
if (!c.?.isElement() or (of_type and !std.mem.eql(u8, ntag, try c.?.tag()))) {
c = try c.?.nextSibling();
continue;
}
count += 1;
if (count > 1) return false;
child = c.nextSibling();
c = try c.?.nextSibling();
}
return count == 1;
@@ -690,25 +696,27 @@ pub const Selector = union(enum) {
fn simpleNthLastChildMatch(b: isize, of_type: bool, n: anytype) anyerror!bool {
if (!n.isElement()) return false;
const p = n.parent() orelse return false;
const p = try n.parent();
if (p == null) return false;
const ntag = try n.tag();
var count: isize = 0;
var child = p.lastChild();
var c = try p.?.lastChild();
// loop hover all n siblings.
while (child) |c| {
while (c != null) {
// ignore non elements or others tags if of-type is true.
if (!c.isElement() or (of_type and !std.mem.eql(u8, ntag, try c.tag()))) {
child = c.prevSibling();
if (!c.?.isElement() or (of_type and !std.mem.eql(u8, ntag, try c.?.tag()))) {
c = try c.?.prevSibling();
continue;
}
count += 1;
if (n.eql(c)) return count == b;
if (n.eql(c.?)) return count == b;
if (count >= b) return false;
child = c.prevSibling();
c = try c.?.prevSibling();
}
return false;
@@ -719,25 +727,27 @@ pub const Selector = union(enum) {
fn simpleNthChildMatch(b: isize, of_type: bool, n: anytype) anyerror!bool {
if (!n.isElement()) return false;
const p = n.parent() orelse return false;
const p = try n.parent();
if (p == null) return false;
const ntag = try n.tag();
var count: isize = 0;
var child = p.firstChild();
var c = try p.?.firstChild();
// loop hover all n siblings.
while (child) |c| {
while (c != null) {
// ignore non elements or others tags if of-type is true.
if (!c.isElement() or (of_type and !std.mem.eql(u8, ntag, try c.tag()))) {
child = c.nextSibling();
if (!c.?.isElement() or (of_type and !std.mem.eql(u8, ntag, try c.?.tag()))) {
c = try c.?.nextSibling();
continue;
}
count += 1;
if (n.eql(c)) return count == b;
if (n.eql(c.?)) return count == b;
if (count >= b) return false;
child = c.nextSibling();
c = try c.?.nextSibling();
}
return false;
@@ -749,27 +759,29 @@ pub const Selector = union(enum) {
fn nthChildMatch(a: isize, b: isize, last: bool, of_type: bool, n: anytype) anyerror!bool {
if (!n.isElement()) return false;
const p = n.parent() orelse return false;
const p = try n.parent();
if (p == null) return false;
const ntag = try n.tag();
var i: isize = -1;
var count: isize = 0;
var child = p.firstChild();
var c = try p.?.firstChild();
// loop hover all n siblings.
while (child) |c| {
while (c != null) {
// ignore non elements or others tags if of-type is true.
if (!c.isElement() or (of_type and !std.mem.eql(u8, ntag, try c.tag()))) {
child = c.nextSibling();
if (!c.?.isElement() or (of_type and !std.mem.eql(u8, ntag, try c.?.tag()))) {
c = try c.?.nextSibling();
continue;
}
count += 1;
if (n.eql(c)) {
if (n.eql(c.?)) {
i = count;
if (!last) break;
}
child = c.nextSibling();
c = try c.?.nextSibling();
}
if (i == -1) return false;
@@ -782,21 +794,21 @@ pub const Selector = union(enum) {
}
fn hasDescendantMatch(s: *const Selector, n: anytype) anyerror!bool {
var child = n.firstChild();
while (child) |c| {
if (try s.match(c)) return true;
if (c.isElement() and try hasDescendantMatch(s, c)) return true;
child = c.nextSibling();
var c = try n.firstChild();
while (c != null) {
if (try s.match(c.?)) return true;
if (c.?.isElement() and try hasDescendantMatch(s, c.?)) return true;
c = try c.?.nextSibling();
}
return false;
}
fn hasChildMatch(s: *const Selector, n: anytype) anyerror!bool {
var child = n.firstChild();
while (child) |c| {
if (try s.match(c)) return true;
child = c.nextSibling();
var c = try n.firstChild();
while (c != null) {
if (try s.match(c.?)) return true;
c = try c.?.nextSibling();
}
return false;
@@ -847,23 +859,23 @@ pub const NodeTest = struct {
name: []const u8 = "",
att: ?[]const u8 = null,
pub fn firstChild(n: *const NodeTest) ?*const NodeTest {
pub fn firstChild(n: *const NodeTest) !?*const NodeTest {
return n.child;
}
pub fn lastChild(n: *const NodeTest) ?*const NodeTest {
pub fn lastChild(n: *const NodeTest) !?*const NodeTest {
return n.last;
}
pub fn nextSibling(n: *const NodeTest) ?*const NodeTest {
pub fn nextSibling(n: *const NodeTest) !?*const NodeTest {
return n.sibling;
}
pub fn prevSibling(n: *const NodeTest) ?*const NodeTest {
pub fn prevSibling(n: *const NodeTest) !?*const NodeTest {
return n.prev;
}
pub fn parent(n: *const NodeTest) ?*const NodeTest {
pub fn parent(n: *const NodeTest) !?*const NodeTest {
return n.par;
}
@@ -879,7 +891,7 @@ pub const NodeTest = struct {
return false;
}
pub fn text(_: *const NodeTest) ?[]const u8 {
pub fn text(_: *const NodeTest) !?[]const u8 {
return null;
}
@@ -887,7 +899,7 @@ pub const NodeTest = struct {
return false;
}
pub fn isEmptyText(_: *const NodeTest) bool {
pub fn isEmptyText(_: *const NodeTest) !bool {
return false;
}
@@ -981,11 +993,6 @@ test "Browser.CSS.Selector: matchFirst" {
.n = .{ .child = &.{ .name = "p", .sibling = &.{ .name = "p", .att = "bar" } } },
.exp = 0,
},
.{
.q = "[foo=1baz]",
.n = .{ .child = &.{ .name = "p", .sibling = &.{ .name = "p", .att = "bar" } } },
.exp = 0,
},
.{
.q = "[foo!=bar]",
.n = .{ .child = &.{ .name = "p", .sibling = &.{ .name = "p", .att = "bar" } } },

View File

@@ -19,6 +19,7 @@
const std = @import("std");
const CSSRule = @import("CSSRule.zig");
const StyleSheet = @import("StyleSheet.zig").StyleSheet;
const CSSImportRule = CSSRule.CSSImportRule;

View File

@@ -94,6 +94,7 @@ pub const MessagePort = struct {
if (opts_ != null) {
log.warn(.web_api, "not implemented", .{ .feature = "MessagePort postMessage options" });
return error.NotImplemented;
}
try self.pair.dispatchOrQueue(obj, page.arena);

View File

@@ -25,24 +25,24 @@ pub const Attr = struct {
pub const prototype = *Node;
pub const subtype = .node;
pub fn get_namespaceURI(self: *parser.Attribute) ?[]const u8 {
return parser.nodeGetNamespace(parser.attributeToNode(self));
pub fn get_namespaceURI(self: *parser.Attribute) !?[]const u8 {
return try parser.nodeGetNamespace(parser.attributeToNode(self));
}
pub fn get_prefix(self: *parser.Attribute) ?[]const u8 {
return parser.nodeGetPrefix(parser.attributeToNode(self));
pub fn get_prefix(self: *parser.Attribute) !?[]const u8 {
return try parser.nodeGetPrefix(parser.attributeToNode(self));
}
pub fn get_localName(self: *parser.Attribute) ![]const u8 {
return parser.nodeLocalName(parser.attributeToNode(self));
return try parser.nodeLocalName(parser.attributeToNode(self));
}
pub fn get_name(self: *parser.Attribute) ![]const u8 {
return parser.attributeGetName(self);
return try parser.attributeGetName(self);
}
pub fn get_value(self: *parser.Attribute) !?[]const u8 {
return parser.attributeGetValue(self);
return try parser.attributeGetValue(self);
}
pub fn set_value(self: *parser.Attribute, v: []const u8) !?[]const u8 {

View File

@@ -51,7 +51,7 @@ pub const CharacterData = struct {
}
pub fn get_nextElementSibling(self: *parser.CharacterData) !?ElementUnion {
const res = parser.nodeNextElementSibling(parser.characterDataToNode(self));
const res = try parser.nodeNextElementSibling(parser.characterDataToNode(self));
if (res == null) {
return null;
}
@@ -59,7 +59,7 @@ pub const CharacterData = struct {
}
pub fn get_previousElementSibling(self: *parser.CharacterData) !?ElementUnion {
const res = parser.nodePreviousElementSibling(parser.characterDataToNode(self));
const res = try parser.nodePreviousElementSibling(parser.characterDataToNode(self));
if (res == null) {
return null;
}
@@ -68,8 +68,8 @@ pub const CharacterData = struct {
// Read/Write attributes
pub fn get_data(self: *parser.CharacterData) []const u8 {
return parser.characterDataData(self);
pub fn get_data(self: *parser.CharacterData) ![]const u8 {
return try parser.characterDataData(self);
}
pub fn set_data(self: *parser.CharacterData, data: []const u8) !void {
@@ -96,18 +96,18 @@ pub const CharacterData = struct {
}
pub fn _substringData(self: *parser.CharacterData, offset: u32, count: u32) ![]const u8 {
return parser.characterDataSubstringData(self, offset, count);
return try parser.characterDataSubstringData(self, offset, count);
}
// netsurf's CharacterData (text, comment) doesn't implement the
// dom_node_get_attributes and thus will crash if we try to call nodeIsEqualNode.
pub fn _isEqualNode(self: *parser.CharacterData, other_node: *parser.Node) bool {
if (parser.nodeType(@ptrCast(@alignCast(self))) != parser.nodeType(other_node)) {
pub fn _isEqualNode(self: *parser.CharacterData, other_node: *parser.Node) !bool {
if (try parser.nodeType(@ptrCast(@alignCast(self))) != try parser.nodeType(other_node)) {
return false;
}
const other: *parser.CharacterData = @ptrCast(other_node);
if (std.mem.eql(u8, get_data(self), get_data(other)) == false) {
if (std.mem.eql(u8, try get_data(self), try get_data(other)) == false) {
return false;
}

View File

@@ -18,6 +18,7 @@
const std = @import("std");
const log = @import("../../log.zig");
const parser = @import("../netsurf.zig");
const Page = @import("../page.zig").Page;
@@ -155,14 +156,22 @@ pub const Document = struct {
// the spec changed to return an HTMLCollection instead.
// That's why we reimplemented getElementsByTagName by using an
// HTMLCollection in zig here.
pub fn _getElementsByTagName(self: *parser.Document, tag_name: Env.String) !collection.HTMLCollection {
return collection.HTMLCollectionByTagName(parser.documentToNode(self), tag_name.string, .{
pub fn _getElementsByTagName(
self: *parser.Document,
tag_name: []const u8,
page: *Page,
) !collection.HTMLCollection {
return try collection.HTMLCollectionByTagName(page.arena, parser.documentToNode(self), tag_name, .{
.include_root = true,
});
}
pub fn _getElementsByClassName(self: *parser.Document, class_names: Env.String) !collection.HTMLCollection {
return collection.HTMLCollectionByClassName(parser.documentToNode(self), class_names.string, .{
pub fn _getElementsByClassName(
self: *parser.Document,
classNames: []const u8,
page: *Page,
) !collection.HTMLCollection {
return try collection.HTMLCollectionByClassName(page.arena, parser.documentToNode(self), classNames, .{
.include_root = true,
});
}

View File

@@ -38,8 +38,8 @@ pub const DocumentFragment = struct {
);
}
pub fn _isEqualNode(self: *parser.DocumentFragment, other_node: *parser.Node) bool {
const other_type = parser.nodeType(other_node);
pub fn _isEqualNode(self: *parser.DocumentFragment, other_node: *parser.Node) !bool {
const other_type = try parser.nodeType(other_node);
if (other_type != .document_fragment) {
return false;
}

View File

@@ -29,21 +29,21 @@ pub const DocumentType = struct {
pub const subtype = .node;
pub fn get_name(self: *parser.DocumentType) ![]const u8 {
return parser.documentTypeGetName(self);
return try parser.documentTypeGetName(self);
}
pub fn get_publicId(self: *parser.DocumentType) []const u8 {
return parser.documentTypeGetPublicId(self);
pub fn get_publicId(self: *parser.DocumentType) ![]const u8 {
return try parser.documentTypeGetPublicId(self);
}
pub fn get_systemId(self: *parser.DocumentType) []const u8 {
return parser.documentTypeGetSystemId(self);
pub fn get_systemId(self: *parser.DocumentType) ![]const u8 {
return try parser.documentTypeGetSystemId(self);
}
// netsurf's DocumentType doesn't implement the dom_node_get_attributes
// and thus will crash if we try to call nodeIsEqualNode.
pub fn _isEqualNode(self: *parser.DocumentType, other_node: *parser.Node) !bool {
if (parser.nodeType(other_node) != .document_type) {
if (try parser.nodeType(other_node) != .document_type) {
return false;
}
@@ -51,10 +51,10 @@ pub const DocumentType = struct {
if (std.mem.eql(u8, try get_name(self), try get_name(other)) == false) {
return false;
}
if (std.mem.eql(u8, get_publicId(self), get_publicId(other)) == false) {
if (std.mem.eql(u8, try get_publicId(self), try get_publicId(other)) == false) {
return false;
}
if (std.mem.eql(u8, get_systemId(self), get_systemId(other)) == false) {
if (std.mem.eql(u8, try get_systemId(self), try get_systemId(other)) == false) {
return false;
}
return true;

View File

@@ -19,7 +19,6 @@
const std = @import("std");
const parser = @import("../netsurf.zig");
const Env = @import("../env.zig").Env;
const Page = @import("../page.zig").Page;
const css = @import("css.zig");
@@ -62,7 +61,7 @@ pub const Element = struct {
pub fn toInterfaceT(comptime T: type, e: *parser.Element) !T {
const tagname = try parser.elementGetTagName(e) orelse {
// If the owner's document is HTML, assume we have an HTMLElement.
const doc = parser.nodeOwnerDocument(parser.elementToNode(e));
const doc = try parser.nodeOwnerDocument(parser.elementToNode(e));
if (doc != null and !doc.?.is_html) {
return .{ .HTMLElement = @as(*parser.ElementHTML, @ptrCast(e)) };
}
@@ -74,7 +73,7 @@ pub const Element = struct {
const tag = parser.Tag.fromString(tagname) catch {
// If the owner's document is HTML, assume we have an HTMLElement.
const doc = parser.nodeOwnerDocument(parser.elementToNode(e));
const doc = try parser.nodeOwnerDocument(parser.elementToNode(e));
if (doc != null and doc.?.is_html) {
return .{ .HTMLElement = @as(*parser.ElementHTML, @ptrCast(e)) };
}
@@ -88,12 +87,12 @@ pub const Element = struct {
// JS funcs
// --------
pub fn get_namespaceURI(self: *parser.Element) ?[]const u8 {
return parser.nodeGetNamespace(parser.elementToNode(self));
pub fn get_namespaceURI(self: *parser.Element) !?[]const u8 {
return try parser.nodeGetNamespace(parser.elementToNode(self));
}
pub fn get_prefix(self: *parser.Element) ?[]const u8 {
return parser.nodeGetPrefix(parser.elementToNode(self));
pub fn get_prefix(self: *parser.Element) !?[]const u8 {
return try parser.nodeGetPrefix(parser.elementToNode(self));
}
pub fn get_localName(self: *parser.Element) ![]const u8 {
@@ -104,14 +103,6 @@ pub const Element = struct {
return try parser.nodeName(parser.elementToNode(self));
}
pub fn get_dir(self: *parser.Element) ![]const u8 {
return try parser.elementGetAttribute(self, "dir") orelse "";
}
pub fn set_dir(self: *parser.Element, dir: []const u8) !void {
return parser.elementSetAttribute(self, "dir", dir);
}
pub fn get_id(self: *parser.Element) ![]const u8 {
return try parser.elementGetAttribute(self, "id") orelse "";
}
@@ -159,7 +150,7 @@ pub const Element = struct {
pub fn set_innerHTML(self: *parser.Element, str: []const u8, page: *Page) !void {
const node = parser.elementToNode(self);
const doc = parser.nodeOwnerDocument(node) orelse return parser.DOMError.WrongDocument;
const doc = try parser.nodeOwnerDocument(node) orelse return parser.DOMError.WrongDocument;
// parse the fragment
const fragment = try parser.documentParseFragmentFromStr(doc, str);
@@ -177,9 +168,9 @@ pub const Element = struct {
// or an actual document. In a blank page, something like:
// x.innerHTML = '<script></script>';
// does _not_ create an empty script, but in a real page, it does. Weird.
const html = parser.nodeFirstChild(fragment_node) orelse return;
const head = parser.nodeFirstChild(html) orelse return;
const body = parser.nodeNextSibling(head) orelse return;
const html = try parser.nodeFirstChild(fragment_node) orelse return;
const head = try parser.nodeFirstChild(html) orelse return;
const body = try parser.nodeNextSibling(head) orelse return;
if (try parser.elementTag(self) == .template) {
// HTMLElementTemplate is special. We don't append these as children
@@ -188,11 +179,11 @@ pub const Element = struct {
// a new fragment
const clean = try parser.documentCreateDocumentFragment(doc);
const children = try parser.nodeGetChildNodes(body);
const ln = parser.nodeListLength(children);
const ln = try parser.nodeListLength(children);
for (0..ln) |_| {
// always index 0, because nodeAppendChild moves the node out of
// the nodeList and into the new tree
const child = parser.nodeListItem(children, 0) orelse continue;
const child = try parser.nodeListItem(children, 0) orelse continue;
_ = try parser.nodeAppendChild(@ptrCast(@alignCast(clean)), child);
}
@@ -206,22 +197,22 @@ pub const Element = struct {
{
// First, copy some of the head element
const children = try parser.nodeGetChildNodes(head);
const ln = parser.nodeListLength(children);
const ln = try parser.nodeListLength(children);
for (0..ln) |_| {
// always index 0, because nodeAppendChild moves the node out of
// the nodeList and into the new tree
const child = parser.nodeListItem(children, 0) orelse continue;
const child = try parser.nodeListItem(children, 0) orelse continue;
_ = try parser.nodeAppendChild(node, child);
}
}
{
const children = try parser.nodeGetChildNodes(body);
const ln = parser.nodeListLength(children);
const ln = try parser.nodeListLength(children);
for (0..ln) |_| {
// always index 0, because nodeAppendChild moves the node out of
// the nodeList and into the new tree
const child = parser.nodeListItem(children, 0) orelse continue;
const child = try parser.nodeListItem(children, 0) orelse continue;
_ = try parser.nodeAppendChild(node, child);
}
}
@@ -243,7 +234,7 @@ pub const Element = struct {
}
return parser.nodeToElement(current.node);
}
current = current.parent() orelse return null;
current = try current.parent() orelse return null;
}
}
@@ -359,18 +350,28 @@ pub const Element = struct {
return try parser.elementRemoveAttributeNode(self, attr);
}
pub fn _getElementsByTagName(self: *parser.Element, tag_name: Env.String) !collection.HTMLCollection {
return collection.HTMLCollectionByTagName(
pub fn _getElementsByTagName(
self: *parser.Element,
tag_name: []const u8,
page: *Page,
) !collection.HTMLCollection {
return try collection.HTMLCollectionByTagName(
page.arena,
parser.elementToNode(self),
tag_name.string,
tag_name,
.{ .include_root = false },
);
}
pub fn _getElementsByClassName(self: *parser.Element, class_names: Env.String) !collection.HTMLCollection {
pub fn _getElementsByClassName(
self: *parser.Element,
classNames: []const u8,
page: *Page,
) !collection.HTMLCollection {
return try collection.HTMLCollectionByClassName(
page.arena,
parser.elementToNode(self),
class_names.string,
classNames,
.{ .include_root = false },
);
}
@@ -406,13 +407,13 @@ pub const Element = struct {
// NonDocumentTypeChildNode
// https://dom.spec.whatwg.org/#interface-nondocumenttypechildnode
pub fn get_previousElementSibling(self: *parser.Element) !?Union {
const res = parser.nodePreviousElementSibling(parser.elementToNode(self));
const res = try parser.nodePreviousElementSibling(parser.elementToNode(self));
if (res == null) return null;
return try toInterface(res.?);
}
pub fn get_nextElementSibling(self: *parser.Element) !?Union {
const res = parser.nodeNextElementSibling(parser.elementToNode(self));
const res = try parser.nodeNextElementSibling(parser.elementToNode(self));
if (res == null) return null;
return try toInterface(res.?);
}
@@ -425,7 +426,7 @@ pub const Element = struct {
while (true) {
next = try walker.get_next(root, next) orelse return null;
// ignore non-element nodes.
if (parser.nodeType(next.?) != .element) {
if (try parser.nodeType(next.?) != .element) {
continue;
}
const e = parser.nodeToElement(next.?);
@@ -473,7 +474,7 @@ pub const Element = struct {
// Returns a 0 DOMRect object if the element is eventually detached from the main window
pub fn _getBoundingClientRect(self: *parser.Element, page: *Page) !DOMRect {
// Since we are lazy rendering we need to do this check. We could store the renderer in a viewport such that it could cache these, but it would require tracking changes.
if (!page.isNodeAttached(parser.elementToNode(self))) {
if (!try page.isNodeAttached(parser.elementToNode(self))) {
return DOMRect{
.x = 0,
.y = 0,
@@ -492,7 +493,7 @@ pub const Element = struct {
// We do not render so it only always return the element's bounding rect.
// Returns an empty array if the element is eventually detached from the main window
pub fn _getClientRects(self: *parser.Element, page: *Page) ![]DOMRect {
if (!page.isNodeAttached(parser.elementToNode(self))) {
if (!try page.isNodeAttached(parser.elementToNode(self))) {
return &.{};
}
const heap_ptr = try page.call_arena.create(DOMRect);
@@ -548,7 +549,7 @@ pub const Element = struct {
}
// Not sure what to do if there is no owner document
const doc = parser.nodeOwnerDocument(@ptrCast(self)) orelse return error.InvalidArgument;
const doc = try parser.nodeOwnerDocument(@ptrCast(self)) orelse return error.InvalidArgument;
const fragment = try parser.documentCreateDocumentFragment(doc);
const sr = try page.arena.create(ShadowRoot);
sr.* = .{
@@ -594,7 +595,7 @@ pub const Element = struct {
// for related elements JIT by walking the tree, but there could be
// cases in libdom or the Zig WebAPI where this reference is kept
const as_node: *parser.Node = @ptrCast(self);
const parent = parser.nodeParentNode(as_node) orelse return;
const parent = try parser.nodeParentNode(as_node) orelse return;
_ = try Node._removeChild(parent, as_node);
}
};

View File

@@ -17,6 +17,7 @@
// along with this program. If not, see <https://www.gnu.org/licenses/>.
const std = @import("std");
const Env = @import("../env.zig").Env;
const parser = @import("../netsurf.zig");
const Page = @import("../page.zig").Page;
@@ -47,7 +48,7 @@ pub const EventTarget = struct {
pub fn toInterface(et: *parser.EventTarget, page: *Page) !Union {
// libdom assumes that all event targets are libdom nodes. They are not.
switch (parser.eventTargetInternalType(et)) {
switch (try parser.eventTargetInternalType(et)) {
.libdom_node => {
return .{ .node = try nod.Node.toInterface(@as(*parser.Node, @ptrCast(et))) };
},

View File

@@ -52,13 +52,13 @@ pub const MatchByTagName = struct {
tag: []const u8,
is_wildcard: bool,
fn init(tag_name: []const u8) MatchByTagName {
fn init(arena: Allocator, tag_name: []const u8) !MatchByTagName {
if (std.mem.eql(u8, tag_name, "*")) {
return .{ .tag = "*", .is_wildcard = true };
}
return .{
.tag = tag_name,
.tag = try arena.dupe(u8, tag_name),
.is_wildcard = false,
};
}
@@ -69,14 +69,15 @@ pub const MatchByTagName = struct {
};
pub fn HTMLCollectionByTagName(
arena: Allocator,
root: ?*parser.Node,
tag_name: []const u8,
opts: Opts,
) HTMLCollection {
return .{
) !HTMLCollection {
return HTMLCollection{
.root = root,
.walker = .{ .walkerDepthFirst = .{} },
.matcher = .{ .matchByTagName = MatchByTagName.init(tag_name) },
.matcher = .{ .matchByTagName = try MatchByTagName.init(arena, tag_name) },
.mutable = opts.mutable,
.include_root = opts.include_root,
};
@@ -85,9 +86,9 @@ pub fn HTMLCollectionByTagName(
pub const MatchByClassName = struct {
class_names: []const u8,
fn init(class_names: []const u8) !MatchByClassName {
fn init(arena: Allocator, class_names: []const u8) !MatchByClassName {
return .{
.class_names = class_names,
.class_names = try arena.dupe(u8, class_names),
};
}
@@ -106,14 +107,15 @@ pub const MatchByClassName = struct {
};
pub fn HTMLCollectionByClassName(
arena: Allocator,
root: ?*parser.Node,
class_names: []const u8,
classNames: []const u8,
opts: Opts,
) !HTMLCollection {
return HTMLCollection{
.root = root,
.walker = .{ .walkerDepthFirst = .{} },
.matcher = .{ .matchByClassName = try MatchByClassName.init(class_names) },
.matcher = .{ .matchByClassName = try MatchByClassName.init(arena, classNames) },
.mutable = opts.mutable,
.include_root = opts.include_root,
};
@@ -122,8 +124,10 @@ pub fn HTMLCollectionByClassName(
pub const MatchByName = struct {
name: []const u8,
fn init(name: []const u8) !MatchByName {
return .{ .name = name };
fn init(arena: Allocator, name: []const u8) !MatchByName {
return .{
.name = try arena.dupe(u8, name),
};
}
pub fn match(self: MatchByName, node: *parser.Node) !bool {
@@ -134,6 +138,7 @@ pub const MatchByName = struct {
};
pub fn HTMLCollectionByName(
arena: Allocator,
root: ?*parser.Node,
name: []const u8,
opts: Opts,
@@ -141,7 +146,7 @@ pub fn HTMLCollectionByName(
return HTMLCollection{
.root = root,
.walker = .{ .walkerDepthFirst = .{} },
.matcher = .{ .matchByName = try MatchByName.init(name) },
.matcher = .{ .matchByName = try MatchByName.init(arena, name) },
.mutable = opts.mutable,
.include_root = opts.include_root,
};
@@ -198,8 +203,8 @@ pub fn HTMLCollectionChildren(
};
}
pub fn HTMLCollectionEmpty() HTMLCollection {
return .{
pub fn HTMLCollectionEmpty() !HTMLCollection {
return HTMLCollection{
.root = null,
.walker = .{ .walkerNone = .{} },
.matcher = .{ .matchFalse = .{} },
@@ -221,11 +226,14 @@ pub const MatchByLinks = struct {
}
};
pub fn HTMLCollectionByLinks(root: ?*parser.Node, opts: Opts) HTMLCollection {
return .{
pub fn HTMLCollectionByLinks(
root: ?*parser.Node,
opts: Opts,
) !HTMLCollection {
return HTMLCollection{
.root = root,
.walker = .{ .walkerDepthFirst = .{} },
.matcher = .{ .matchByLinks = .{} },
.matcher = .{ .matchByLinks = MatchByLinks{} },
.mutable = opts.mutable,
.include_root = opts.include_root,
};
@@ -244,11 +252,14 @@ pub const MatchByAnchors = struct {
}
};
pub fn HTMLCollectionByAnchors(root: ?*parser.Node, opts: Opts) HTMLCollection {
return .{
pub fn HTMLCollectionByAnchors(
root: ?*parser.Node,
opts: Opts,
) !HTMLCollection {
return HTMLCollection{
.root = root,
.walker = .{ .walkerDepthFirst = .{} },
.matcher = .{ .matchByAnchors = .{} },
.matcher = .{ .matchByAnchors = MatchByAnchors{} },
.mutable = opts.mutable,
.include_root = opts.include_root,
};
@@ -333,7 +344,7 @@ pub const HTMLCollection = struct {
var node = try self.start() orelse return 0;
while (true) {
if (parser.nodeType(node) == .element) {
if (try parser.nodeType(node) == .element) {
if (try self.matcher.match(node)) {
len += 1;
}
@@ -360,7 +371,7 @@ pub const HTMLCollection = struct {
}
while (true) {
if (parser.nodeType(node) == .element) {
if (try parser.nodeType(node) == .element) {
if (try self.matcher.match(node)) {
// check if we found the searched element.
if (i == index) {
@@ -394,7 +405,7 @@ pub const HTMLCollection = struct {
var node = try self.start() orelse return null;
while (true) {
if (parser.nodeType(node) == .element) {
if (try parser.nodeType(node) == .element) {
if (try self.matcher.match(node)) {
const elem = @as(*parser.Element, @ptrCast(node));

View File

@@ -157,7 +157,7 @@ pub const IntersectionObserverEntry = struct {
return self.page.renderer.boundingRect();
}
const root_type = parser.nodeType(root);
const root_type = try parser.nodeType(root);
var element: *parser.Element = undefined;
switch (root_type) {

View File

@@ -17,6 +17,7 @@
// along with this program. If not, see <https://www.gnu.org/licenses/>.
const std = @import("std");
const Allocator = std.mem.Allocator;
const log = @import("../../log.zig");
const parser = @import("../netsurf.zig");
@@ -283,7 +284,7 @@ const Observer = struct {
const mutation_event = parser.eventToMutationEvent(event);
const event_type = blk: {
const t = parser.eventType(event);
const t = try parser.eventType(event);
break :blk std.meta.stringToEnum(MutationEventType, t) orelse return;
};
@@ -301,12 +302,12 @@ const Observer = struct {
.DOMAttrModified => {
record.attribute_name = parser.mutationEventAttributeName(mutation_event) catch null;
if (self.options.attributeOldValue) {
record.old_value = parser.mutationEventPrevValue(mutation_event);
record.old_value = parser.mutationEventPrevValue(mutation_event) catch null;
}
},
.DOMCharacterDataModified => {
if (self.options.characterDataOldValue) {
record.old_value = parser.mutationEventPrevValue(mutation_event);
record.old_value = parser.mutationEventPrevValue(mutation_event) catch null;
}
},
.DOMNodeInserted => {

View File

@@ -67,7 +67,7 @@ pub const Node = struct {
pub const subtype = .node;
pub fn toInterface(node: *parser.Node) !Union {
return switch (parser.nodeType(node)) {
return switch (try parser.nodeType(node)) {
.element => try Element.toInterfaceT(
Union,
@as(*parser.Element, @ptrCast(node)),
@@ -124,7 +124,7 @@ pub const Node = struct {
}
pub fn get_firstChild(self: *parser.Node) !?Union {
const res = parser.nodeFirstChild(self);
const res = try parser.nodeFirstChild(self);
if (res == null) {
return null;
}
@@ -132,7 +132,7 @@ pub const Node = struct {
}
pub fn get_lastChild(self: *parser.Node) !?Union {
const res = parser.nodeLastChild(self);
const res = try parser.nodeLastChild(self);
if (res == null) {
return null;
}
@@ -140,7 +140,7 @@ pub const Node = struct {
}
pub fn get_nextSibling(self: *parser.Node) !?Union {
const res = parser.nodeNextSibling(self);
const res = try parser.nodeNextSibling(self);
if (res == null) {
return null;
}
@@ -148,7 +148,7 @@ pub const Node = struct {
}
pub fn get_previousSibling(self: *parser.Node) !?Union {
const res = parser.nodePreviousSibling(self);
const res = try parser.nodePreviousSibling(self);
if (res == null) {
return null;
}
@@ -156,7 +156,7 @@ pub const Node = struct {
}
pub fn get_parentNode(self: *parser.Node) !?Union {
const res = parser.nodeParentNode(self);
const res = try parser.nodeParentNode(self);
if (res == null) {
return null;
}
@@ -164,7 +164,7 @@ pub const Node = struct {
}
pub fn get_parentElement(self: *parser.Node) !?ElementUnion {
const res = parser.nodeParentElement(self);
const res = try parser.nodeParentElement(self);
if (res == null) {
return null;
}
@@ -176,11 +176,11 @@ pub const Node = struct {
}
pub fn get_nodeType(self: *parser.Node) !u8 {
return @intFromEnum(parser.nodeType(self));
return @intFromEnum(try parser.nodeType(self));
}
pub fn get_ownerDocument(self: *parser.Node) !?*parser.DocumentHTML {
const res = parser.nodeOwnerDocument(self);
const res = try parser.nodeOwnerDocument(self);
if (res == null) {
return null;
}
@@ -190,12 +190,12 @@ pub const Node = struct {
pub fn get_isConnected(self: *parser.Node) !bool {
var node = self;
while (true) {
const node_type = parser.nodeType(node);
const node_type = try parser.nodeType(node);
if (node_type == .document) {
return true;
}
if (parser.nodeParentNode(node)) |parent| {
if (try parser.nodeParentNode(node)) |parent| {
// didn't find a document, but node has a parent, let's see
// if it's connected;
node = parent;
@@ -222,15 +222,15 @@ pub const Node = struct {
// Read/Write attributes
pub fn get_nodeValue(self: *parser.Node) !?[]const u8 {
return parser.nodeValue(self);
return try parser.nodeValue(self);
}
pub fn set_nodeValue(self: *parser.Node, data: []u8) !void {
try parser.nodeSetValue(self, data);
}
pub fn get_textContent(self: *parser.Node) ?[]const u8 {
return parser.nodeTextContent(self);
pub fn get_textContent(self: *parser.Node) !?[]const u8 {
return try parser.nodeTextContent(self);
}
pub fn set_textContent(self: *parser.Node, data: []u8) !void {
@@ -240,8 +240,8 @@ pub const Node = struct {
// Methods
pub fn _appendChild(self: *parser.Node, child: *parser.Node) !Union {
const self_owner = parser.nodeOwnerDocument(self);
const child_owner = parser.nodeOwnerDocument(child);
const self_owner = try parser.nodeOwnerDocument(self);
const child_owner = try parser.nodeOwnerDocument(child);
// If the node to be inserted has a different ownerDocument than the parent node,
// modern browsers automatically adopt the node and its descendants into
@@ -272,14 +272,14 @@ pub const Node = struct {
return 0;
}
const docself = parser.nodeOwnerDocument(self) orelse blk: {
if (parser.nodeType(self) == .document) {
const docself = try parser.nodeOwnerDocument(self) orelse blk: {
if (try parser.nodeType(self) == .document) {
break :blk @as(*parser.Document, @ptrCast(self));
}
break :blk null;
};
const docother = parser.nodeOwnerDocument(other) orelse blk: {
if (parser.nodeType(other) == .document) {
const docother = try parser.nodeOwnerDocument(other) orelse blk: {
if (try parser.nodeType(other) == .document) {
break :blk @as(*parser.Document, @ptrCast(other));
}
break :blk null;
@@ -299,8 +299,8 @@ pub const Node = struct {
@intFromEnum(parser.DocumentPosition.contained_by);
}
const rootself = parser.nodeGetRootNode(self);
const rootother = parser.nodeGetRootNode(other);
const rootself = try parser.nodeGetRootNode(self);
const rootother = try parser.nodeGetRootNode(other);
if (rootself != rootother) {
return @intFromEnum(parser.DocumentPosition.disconnected) +
@intFromEnum(parser.DocumentPosition.implementation_specific) +
@@ -347,8 +347,8 @@ pub const Node = struct {
return 0;
}
pub fn _contains(self: *parser.Node, other: *parser.Node) bool {
return parser.nodeContains(self, other);
pub fn _contains(self: *parser.Node, other: *parser.Node) !bool {
return try parser.nodeContains(self, other);
}
// Returns itself or ancestor object inheriting from Node.
@@ -364,7 +364,7 @@ pub const Node = struct {
log.warn(.web_api, "not implemented", .{ .feature = "getRootNode composed" });
};
const root = parser.nodeGetRootNode(self);
const root = try parser.nodeGetRootNode(self);
if (page.getNodeState(root)) |state| {
if (state.shadow_root) |sr| {
return .{ .shadow_root = sr };
@@ -374,18 +374,18 @@ pub const Node = struct {
return .{ .node = try Node.toInterface(root) };
}
pub fn _hasChildNodes(self: *parser.Node) bool {
return parser.nodeHasChildNodes(self);
pub fn _hasChildNodes(self: *parser.Node) !bool {
return try parser.nodeHasChildNodes(self);
}
pub fn get_childNodes(self: *parser.Node, page: *Page) !NodeList {
const allocator = page.arena;
var list: NodeList = .{};
var n = parser.nodeFirstChild(self) orelse return list;
var n = try parser.nodeFirstChild(self) orelse return list;
while (true) {
try list.append(allocator, n);
n = parser.nodeNextSibling(n) orelse return list;
n = try parser.nodeNextSibling(n) orelse return list;
}
}
@@ -394,8 +394,8 @@ pub const Node = struct {
return _appendChild(self, new_node);
}
const self_owner = parser.nodeOwnerDocument(self);
const new_node_owner = parser.nodeOwnerDocument(new_node);
const self_owner = try parser.nodeOwnerDocument(self);
const new_node_owner = try parser.nodeOwnerDocument(new_node);
// If the node to be inserted has a different ownerDocument than the parent node,
// modern browsers automatically adopt the node and its descendants into
@@ -415,7 +415,7 @@ pub const Node = struct {
}
pub fn _isDefaultNamespace(self: *parser.Node, namespace: ?[]const u8) !bool {
return parser.nodeIsDefaultNamespace(self, namespace);
return try parser.nodeIsDefaultNamespace(self, namespace);
}
pub fn _isEqualNode(self: *parser.Node, other: *parser.Node) !bool {
@@ -423,10 +423,10 @@ pub const Node = struct {
return try parser.nodeIsEqualNode(self, other);
}
pub fn _isSameNode(self: *parser.Node, other: *parser.Node) bool {
pub fn _isSameNode(self: *parser.Node, other: *parser.Node) !bool {
// TODO: other is not an optional parameter, but can be null.
// NOTE: there is no need to use isSameNode(); instead use the === strict equality operator
return parser.nodeIsSameNode(self, other);
return try parser.nodeIsSameNode(self, other);
}
pub fn _lookupPrefix(self: *parser.Node, namespace: ?[]const u8) !?[]const u8 {
@@ -482,9 +482,9 @@ pub const Node = struct {
return parser.DOMError.HierarchyRequest;
}
const doc = (parser.nodeOwnerDocument(self)) orelse return;
const doc = (try parser.nodeOwnerDocument(self)) orelse return;
if (parser.nodeFirstChild(self)) |first| {
if (try parser.nodeFirstChild(self)) |first| {
for (nodes) |node| {
_ = try parser.nodeInsertBefore(self, try node.toNode(doc), first);
}
@@ -506,7 +506,7 @@ pub const Node = struct {
return parser.DOMError.HierarchyRequest;
}
const doc = (parser.nodeOwnerDocument(self)) orelse return;
const doc = (try parser.nodeOwnerDocument(self)) orelse return;
for (nodes) |node| {
_ = try parser.nodeAppendChild(self, try node.toNode(doc));
}
@@ -525,7 +525,7 @@ pub const Node = struct {
// remove existing children
try removeChildren(self);
const doc = (parser.nodeOwnerDocument(self)) orelse return;
const doc = (try parser.nodeOwnerDocument(self)) orelse return;
// add new children
for (nodes) |node| {
_ = try parser.nodeAppendChild(self, try node.toNode(doc));
@@ -533,30 +533,30 @@ pub const Node = struct {
}
pub fn removeChildren(self: *parser.Node) !void {
if (!parser.nodeHasChildNodes(self)) return;
if (!try parser.nodeHasChildNodes(self)) return;
const children = try parser.nodeGetChildNodes(self);
const ln = parser.nodeListLength(children);
const ln = try parser.nodeListLength(children);
var i: u32 = 0;
while (i < ln) {
defer i += 1;
// we always retrieve the 0 index child on purpose: libdom nodelist
// are dynamic. So the next child to remove is always as pos 0.
const child = parser.nodeListItem(children, 0) orelse continue;
const child = try parser.nodeListItem(children, 0) orelse continue;
_ = try parser.nodeRemoveChild(self, child);
}
}
pub fn before(self: *parser.Node, nodes: []const NodeOrText) !void {
const parent = parser.nodeParentNode(self) orelse return;
const doc = (parser.nodeOwnerDocument(parent)) orelse return;
const parent = try parser.nodeParentNode(self) orelse return;
const doc = (try parser.nodeOwnerDocument(parent)) orelse return;
var sibling: ?*parser.Node = self;
// have to find the first sibling that isn't in nodes
CHECK: while (sibling) |s| {
for (nodes) |n| {
if (n.is(s)) {
sibling = parser.nodePreviousSibling(s);
sibling = try parser.nodePreviousSibling(s);
continue :CHECK;
}
}
@@ -564,7 +564,7 @@ pub const Node = struct {
}
if (sibling == null) {
sibling = parser.nodeFirstChild(parent);
sibling = try parser.nodeFirstChild(parent);
}
if (sibling) |ref_node| {
@@ -578,15 +578,15 @@ pub const Node = struct {
}
pub fn after(self: *parser.Node, nodes: []const NodeOrText) !void {
const parent = parser.nodeParentNode(self) orelse return;
const doc = (parser.nodeOwnerDocument(parent)) orelse return;
const parent = try parser.nodeParentNode(self) orelse return;
const doc = (try parser.nodeOwnerDocument(parent)) orelse return;
// have to find the first sibling that isn't in nodes
var sibling = parser.nodeNextSibling(self);
var sibling = try parser.nodeNextSibling(self);
CHECK: while (sibling) |s| {
for (nodes) |n| {
if (n.is(s)) {
sibling = parser.nodeNextSibling(s);
sibling = try parser.nodeNextSibling(s);
continue :CHECK;
}
}

View File

@@ -44,7 +44,7 @@ pub const NodeFilter = struct {
const VerifyResult = enum { accept, skip, reject };
pub fn verify(what_to_show: u32, filter: ?Env.Function, node: *parser.Node) !VerifyResult {
const node_type = parser.nodeType(node);
const node_type = try parser.nodeType(node);
// Verify that we can show this node type.
if (!switch (node_type) {

View File

@@ -125,7 +125,7 @@ pub const NodeIterator = struct {
return try Node.toInterface(sibling);
}
current = (parser.nodeParentNode(current)) orelse break;
current = (try parser.nodeParentNode(current)) orelse break;
}
return null;
@@ -147,7 +147,7 @@ pub const NodeIterator = struct {
}
var current = self.reference_node;
while (parser.nodePreviousSibling(current)) |previous| {
while (try parser.nodePreviousSibling(current)) |previous| {
current = previous;
switch (try NodeFilter.verify(self.what_to_show, self.filter_func, current)) {
@@ -189,11 +189,11 @@ pub const NodeIterator = struct {
fn firstChild(self: *const NodeIterator, node: *parser.Node) !?*parser.Node {
const children = try parser.nodeGetChildNodes(node);
const child_count = parser.nodeListLength(children);
const child_count = try parser.nodeListLength(children);
for (0..child_count) |i| {
const index: u32 = @intCast(i);
const child = (parser.nodeListItem(children, index)) orelse return null;
const child = (try parser.nodeListItem(children, index)) orelse return null;
switch (try NodeFilter.verify(self.what_to_show, self.filter_func, child)) {
.accept => return child, // NOTE: Skip and reject are equivalent for NodeIterator, this is different from TreeWalker
@@ -206,12 +206,12 @@ pub const NodeIterator = struct {
fn lastChild(self: *const NodeIterator, node: *parser.Node) !?*parser.Node {
const children = try parser.nodeGetChildNodes(node);
const child_count = parser.nodeListLength(children);
const child_count = try parser.nodeListLength(children);
var index: u32 = child_count;
while (index > 0) {
index -= 1;
const child = (parser.nodeListItem(children, index)) orelse return null;
const child = (try parser.nodeListItem(children, index)) orelse return null;
switch (try NodeFilter.verify(self.what_to_show, self.filter_func, child)) {
.accept => return child, // NOTE: Skip and reject are equivalent for NodeIterator, this is different from TreeWalker
@@ -229,7 +229,7 @@ pub const NodeIterator = struct {
var current = node;
while (true) {
if (current == self.root) return null;
current = (parser.nodeParentNode(current)) orelse return null;
current = (try parser.nodeParentNode(current)) orelse return null;
switch (try NodeFilter.verify(self.what_to_show, self.filter_func, current)) {
.accept => return current,
@@ -243,7 +243,7 @@ pub const NodeIterator = struct {
var current = node;
while (true) {
current = (parser.nodeNextSibling(current)) orelse return null;
current = (try parser.nodeNextSibling(current)) orelse return null;
switch (try NodeFilter.verify(self.what_to_show, self.filter_func, current)) {
.accept => return current,

View File

@@ -17,7 +17,6 @@
// along with this program. If not, see <https://www.gnu.org/licenses/>.
const std = @import("std");
const Allocator = std.mem.Allocator;
const log = @import("../../log.zig");
const parser = @import("../netsurf.zig");
@@ -102,20 +101,13 @@ pub const NodeList = struct {
nodes: NodesArrayList = .{},
pub fn deinit(self: *NodeList, allocator: Allocator) void {
self.nodes.deinit(allocator);
pub fn deinit(self: *NodeList, alloc: std.mem.Allocator) void {
// TODO unref all nodes
self.nodes.deinit(alloc);
}
pub fn ensureTotalCapacity(self: *NodeList, allocator: Allocator, n: usize) !void {
return self.nodes.ensureTotalCapacity(allocator, n);
}
pub fn append(self: *NodeList, allocator: Allocator, node: *parser.Node) !void {
try self.nodes.append(allocator, node);
}
pub fn appendAssumeCapacity(self: *NodeList, node: *parser.Node) void {
self.nodes.appendAssumeCapacity(node);
pub fn append(self: *NodeList, alloc: std.mem.Allocator, node: *parser.Node) !void {
try self.nodes.append(alloc, node);
}
pub fn get_length(self: *const NodeList) u32 {

View File

@@ -61,7 +61,7 @@ pub const Performance = struct {
return milliTimestamp() - self.time_origin;
}
pub fn _mark(_: *Performance, name: Env.String, _options: ?PerformanceMark.Options, page: *Page) !PerformanceMark {
pub fn _mark(_: *Performance, name: []const u8, _options: ?PerformanceMark.Options, page: *Page) !PerformanceMark {
const mark: PerformanceMark = try .constructor(name, _options, page);
// TODO: Should store this in an entries list
return mark;
@@ -155,7 +155,7 @@ pub const PerformanceMark = struct {
startTime: ?f64 = null,
};
pub fn constructor(name: Env.String, _options: ?Options, page: *Page) !PerformanceMark {
pub fn constructor(name: []const u8, _options: ?Options, page: *Page) !PerformanceMark {
const perf = &page.window.performance;
const options = _options orelse Options{};
@@ -166,7 +166,9 @@ pub const PerformanceMark = struct {
}
const detail = if (options.detail) |d| try d.persist() else null;
const proto = PerformanceEntry{ .name = name.string, .entry_type = .mark, .start_time = start_time };
const duped_name = try page.arena.dupe(u8, name);
const proto = PerformanceEntry{ .name = duped_name, .entry_type = .mark, .start_time = start_time };
return .{ .proto = proto, .detail = detail };
}

View File

@@ -48,7 +48,7 @@ pub const ProcessingInstruction = struct {
}
pub fn get_data(self: *parser.ProcessingInstruction) !?[]const u8 {
return parser.nodeValue(parser.processingInstructionToNode(self));
return try parser.nodeValue(parser.processingInstructionToNode(self));
}
pub fn set_data(self: *parser.ProcessingInstruction, data: []u8) !void {
@@ -58,7 +58,7 @@ pub const ProcessingInstruction = struct {
// netsurf's ProcessInstruction doesn't implement the dom_node_get_attributes
// and thus will crash if we try to call nodeIsEqualNode.
pub fn _isEqualNode(self: *parser.ProcessingInstruction, other_node: *parser.Node) !bool {
if (parser.nodeType(other_node) != .processing_instruction) {
if (try parser.nodeType(other_node) != .processing_instruction) {
return false;
}

View File

@@ -176,10 +176,10 @@ pub const Range = struct {
self.proto.end_node = node;
// Set end_offset
switch (parser.nodeType(node)) {
switch (try parser.nodeType(node)) {
.text, .cdata_section, .comment, .processing_instruction => {
// For text-like nodes, end_offset should be the length of the text data
if (parser.nodeValue(node)) |text_data| {
if (try parser.nodeValue(node)) |text_data| {
self.proto.end_offset = @intCast(text_data.len);
} else {
self.proto.end_offset = 0;
@@ -188,7 +188,7 @@ pub const Range = struct {
else => {
// For element and other nodes, end_offset is the number of children
const child_nodes = try parser.nodeGetChildNodes(node);
const child_count = parser.nodeListLength(child_nodes);
const child_count = try parser.nodeListLength(child_nodes);
self.proto.end_offset = @intCast(child_count);
},
}
@@ -211,7 +211,7 @@ pub const Range = struct {
pub fn _comparePoint(self: *const Range, node: *parser.Node, offset_: i32) !i32 {
const start = self.proto.start_node;
if (parser.nodeGetRootNode(start) != parser.nodeGetRootNode(node)) {
if (try parser.nodeGetRootNode(start) != try parser.nodeGetRootNode(node)) {
// WPT really wants this error to be first. Later, when we check
// if the relative position is 'disconnected', it'll also catch this
// case, but WPT will complain because it sometimes also sends
@@ -219,7 +219,7 @@ pub const Range = struct {
return error.WrongDocument;
}
if (parser.nodeType(node) == .document_type) {
if (try parser.nodeType(node) == .document_type) {
return error.InvalidNodeType;
}
@@ -245,8 +245,8 @@ pub const Range = struct {
}
pub fn _intersectsNode(self: *const Range, node: *parser.Node) !bool {
const start_root = parser.nodeGetRootNode(self.proto.start_node);
const node_root = parser.nodeGetRootNode(node);
const start_root = try parser.nodeGetRootNode(self.proto.start_node);
const node_root = try parser.nodeGetRootNode(node);
if (start_root != node_root) {
return false;
}
@@ -299,29 +299,29 @@ fn ensureValidOffset(node: *parser.Node, offset: i32) !void {
fn nodeLength(node: *parser.Node) !usize {
switch (try isTextual(node)) {
true => return ((parser.nodeTextContent(node)) orelse "").len,
true => return ((try parser.nodeTextContent(node)) orelse "").len,
false => {
const children = try parser.nodeGetChildNodes(node);
return @intCast(parser.nodeListLength(children));
return @intCast(try parser.nodeListLength(children));
},
}
}
fn isTextual(node: *parser.Node) !bool {
return switch (parser.nodeType(node)) {
return switch (try parser.nodeType(node)) {
.text, .comment, .cdata_section => true,
else => false,
};
}
fn getParentAndIndex(child: *parser.Node) !struct { *parser.Node, u32 } {
const parent = (parser.nodeParentNode(child)) orelse return error.InvalidNodeType;
const parent = (try parser.nodeParentNode(child)) orelse return error.InvalidNodeType;
const children = try parser.nodeGetChildNodes(parent);
const ln = parser.nodeListLength(children);
const ln = try parser.nodeListLength(children);
var i: u32 = 0;
while (i < ln) {
defer i += 1;
const c = parser.nodeListItem(children, i) orelse continue;
const c = try parser.nodeListItem(children, i) orelse continue;
if (c == child) {
return .{ parent, i };
}
@@ -363,7 +363,7 @@ fn compare(node_a: *parser.Node, offset_a: u32, node_b: *parser.Node, offset_b:
if (position & @intFromEnum(parser.DocumentPosition.contains) == @intFromEnum(parser.DocumentPosition.contains)) {
// node_a contains node_b
var child = node_b;
while (parser.nodeParentNode(child)) |parent| {
while (try parser.nodeParentNode(child)) |parent| {
if (parent == node_a) {
// child.parentNode == node_a
break;

View File

@@ -67,7 +67,7 @@ pub const ShadowRoot = struct {
pub fn set_innerHTML(self: *ShadowRoot, str_: ?[]const u8) !void {
const sr_doc = parser.documentFragmentToNode(self.proto);
const doc = parser.nodeOwnerDocument(sr_doc) orelse return parser.DOMError.WrongDocument;
const doc = try parser.nodeOwnerDocument(sr_doc) orelse return parser.DOMError.WrongDocument;
try Node.removeChildren(sr_doc);
const str = str_ orelse return;
@@ -80,16 +80,16 @@ pub const ShadowRoot = struct {
// element.
// For ShadowRoot, it appears the only the children within the body should
// be set.
const html = parser.nodeFirstChild(fragment_node) orelse return;
const head = parser.nodeFirstChild(html) orelse return;
const body = parser.nodeNextSibling(head) orelse return;
const html = try parser.nodeFirstChild(fragment_node) orelse return;
const head = try parser.nodeFirstChild(html) orelse return;
const body = try parser.nodeNextSibling(head) orelse return;
const children = try parser.nodeGetChildNodes(body);
const ln = parser.nodeListLength(children);
const ln = try parser.nodeListLength(children);
for (0..ln) |_| {
// always index 0, because nodeAppendChild moves the node out of
// the nodeList and into the new tree
const child = parser.nodeListItem(children, 0) orelse continue;
const child = try parser.nodeListItem(children, 0) orelse continue;
_ = try parser.nodeAppendChild(sr_doc, child);
}
}

View File

@@ -21,6 +21,7 @@ const parser = @import("../netsurf.zig");
const NodeFilter = @import("node_filter.zig");
const Env = @import("../env.zig").Env;
const Page = @import("../page.zig").Page;
const Node = @import("node.zig").Node;
const NodeUnion = @import("node.zig").Union;
@@ -94,11 +95,11 @@ pub const TreeWalker = struct {
fn firstChild(self: *const TreeWalker, node: *parser.Node) !?*parser.Node {
const children = try parser.nodeGetChildNodes(node);
const child_count = parser.nodeListLength(children);
const child_count = try parser.nodeListLength(children);
for (0..child_count) |i| {
const index: u32 = @intCast(i);
const child = (parser.nodeListItem(children, index)) orelse return null;
const child = (try parser.nodeListItem(children, index)) orelse return null;
switch (try NodeFilter.verify(self.what_to_show, self.filter_func, child)) {
.accept => return child,
@@ -112,12 +113,12 @@ pub const TreeWalker = struct {
fn lastChild(self: *const TreeWalker, node: *parser.Node) !?*parser.Node {
const children = try parser.nodeGetChildNodes(node);
const child_count = parser.nodeListLength(children);
const child_count = try parser.nodeListLength(children);
var index: u32 = child_count;
while (index > 0) {
index -= 1;
const child = (parser.nodeListItem(children, index)) orelse return null;
const child = (try parser.nodeListItem(children, index)) orelse return null;
switch (try NodeFilter.verify(self.what_to_show, self.filter_func, child)) {
.accept => return child,
@@ -133,7 +134,7 @@ pub const TreeWalker = struct {
var current = node;
while (true) {
current = (parser.nodeNextSibling(current)) orelse return null;
current = (try parser.nodeNextSibling(current)) orelse return null;
switch (try NodeFilter.verify(self.what_to_show, self.filter_func, current)) {
.accept => return current,
@@ -148,7 +149,7 @@ pub const TreeWalker = struct {
var current = node;
while (true) {
current = (parser.nodePreviousSibling(current)) orelse return null;
current = (try parser.nodePreviousSibling(current)) orelse return null;
switch (try NodeFilter.verify(self.what_to_show, self.filter_func, current)) {
.accept => return current,
@@ -165,7 +166,7 @@ pub const TreeWalker = struct {
var current = node;
while (true) {
if (current == self.root) return null;
current = (parser.nodeParentNode(current)) orelse return null;
current = (try parser.nodeParentNode(current)) orelse return null;
switch (try NodeFilter.verify(self.what_to_show, self.filter_func, current)) {
.accept => return current,
@@ -205,7 +206,7 @@ pub const TreeWalker = struct {
return try Node.toInterface(sibling);
}
current = (parser.nodeParentNode(current)) orelse break;
current = (try parser.nodeParentNode(current)) orelse break;
}
return null;
@@ -233,7 +234,7 @@ pub const TreeWalker = struct {
if (self.current_node == self.root) return null;
var current = self.current_node;
while (parser.nodePreviousSibling(current)) |previous| {
while (try parser.nodePreviousSibling(current)) |previous| {
current = previous;
switch (try NodeFilter.verify(self.what_to_show, self.filter_func, current)) {

View File

@@ -44,39 +44,39 @@ pub const WalkerDepthFirst = struct {
var n = cur orelse root;
// TODO deinit next
if (parser.nodeFirstChild(n)) |next| {
if (try parser.nodeFirstChild(n)) |next| {
return next;
}
// TODO deinit next
if (parser.nodeNextSibling(n)) |next| {
if (try parser.nodeNextSibling(n)) |next| {
return next;
}
// TODO deinit parent
// Back to the parent of cur.
// If cur has no parent, then the iteration is over.
var parent = parser.nodeParentNode(n) orelse return null;
var parent = try parser.nodeParentNode(n) orelse return null;
// TODO deinit lastchild
var lastchild = parser.nodeLastChild(parent);
var lastchild = try parser.nodeLastChild(parent);
while (n != root and n == lastchild) {
n = parent;
// TODO deinit parent
// Back to the prev's parent.
// If prev has no parent, then the loop must stop.
parent = parser.nodeParentNode(n) orelse break;
parent = try parser.nodeParentNode(n) orelse break;
// TODO deinit lastchild
lastchild = parser.nodeLastChild(parent);
lastchild = try parser.nodeLastChild(parent);
}
if (n == root) {
return null;
}
return parser.nodeNextSibling(n);
return try parser.nodeNextSibling(n);
}
};
@@ -84,14 +84,14 @@ pub const WalkerDepthFirst = struct {
pub const WalkerChildren = struct {
pub fn get_next(_: WalkerChildren, root: *parser.Node, cur: ?*parser.Node) !?*parser.Node {
// On walk start, we return the first root's child.
if (cur == null) return parser.nodeFirstChild(root);
if (cur == null) return try parser.nodeFirstChild(root);
// If cur is root, then return null.
// This is a special case, if the root is included in the walk, we
// don't want to go further to find children.
if (root == cur.?) return null;
return parser.nodeNextSibling(cur.?);
return try parser.nodeNextSibling(cur.?);
}
};

View File

@@ -26,13 +26,7 @@ pub const Opts = struct {
// set to include element shadowroots in the dump
page: ?*const Page = null,
strip_mode: StripMode = .{},
pub const StripMode = struct {
js: bool = false,
ui: bool = false,
css: bool = false,
};
exclude_scripts: bool = false,
};
// writer must be a std.io.Writer
@@ -47,8 +41,8 @@ pub fn writeDocType(doc_type: *parser.DocumentType, writer: *std.Io.Writer) !voi
try writer.writeAll("<!DOCTYPE ");
try writer.writeAll(try parser.documentTypeGetName(doc_type));
const public_id = parser.documentTypeGetPublicId(doc_type);
const system_id = parser.documentTypeGetSystemId(doc_type);
const public_id = try parser.documentTypeGetPublicId(doc_type);
const system_id = try parser.documentTypeGetSystemId(doc_type);
if (public_id.len != 0 and system_id.len != 0) {
try writer.writeAll(" PUBLIC \"");
try writeEscapedAttributeValue(writer, public_id);
@@ -69,11 +63,11 @@ pub fn writeDocType(doc_type: *parser.DocumentType, writer: *std.Io.Writer) !voi
}
pub fn writeNode(node: *parser.Node, opts: Opts, writer: *std.Io.Writer) anyerror!void {
switch (parser.nodeType(node)) {
switch (try parser.nodeType(node)) {
.element => {
// open the tag
const tag_type = try parser.nodeHTMLGetTagType(node) orelse .undef;
if (try isStripped(tag_type, node, opts.strip_mode)) {
if (opts.exclude_scripts and try isScriptOrRelated(tag_type, node)) {
return;
}
@@ -110,7 +104,7 @@ pub fn writeNode(node: *parser.Node, opts: Opts, writer: *std.Io.Writer) anyerro
if (try isVoid(parser.nodeToElement(node))) return;
if (tag_type == .script) {
try writer.writeAll(parser.nodeTextContent(node) orelse "");
try writer.writeAll(try parser.nodeTextContent(node) orelse "");
} else {
// write the children
// TODO avoid recursion
@@ -123,17 +117,17 @@ pub fn writeNode(node: *parser.Node, opts: Opts, writer: *std.Io.Writer) anyerro
try writer.writeAll(">");
},
.text => {
const v = parser.nodeValue(node) orelse return;
const v = try parser.nodeValue(node) orelse return;
try writeEscapedTextNode(writer, v);
},
.cdata_section => {
const v = parser.nodeValue(node) orelse return;
const v = try parser.nodeValue(node) orelse return;
try writer.writeAll("<![CDATA[");
try writer.writeAll(v);
try writer.writeAll("]]>");
},
.comment => {
const v = parser.nodeValue(node) orelse return;
const v = try parser.nodeValue(node) orelse return;
try writer.writeAll("<!--");
try writer.writeAll(v);
try writer.writeAll("-->");
@@ -165,22 +159,9 @@ pub fn writeChildren(root: *parser.Node, opts: Opts, writer: *std.Io.Writer) !vo
}
}
fn isStripped(tag_type: parser.Tag, node: *parser.Node, strip_mode: Opts.StripMode) !bool {
if (strip_mode.js and try isJsRelated(tag_type, node)) {
return true;
}
if (strip_mode.css and try isCssRelated(tag_type, node)) {
return true;
}
if (strip_mode.ui and try isUIRelated(tag_type, node)) {
return true;
}
return false;
}
fn isJsRelated(tag_type: parser.Tag, node: *parser.Node) !bool {
// When `exclude_scripts` is passed to dump, we don't include <script> tags.
// We also want to omit <link rel=preload as=ascript>
fn isScriptOrRelated(tag_type: parser.Tag, node: *parser.Node) !bool {
if (tag_type == .script) {
return true;
}
@@ -197,34 +178,6 @@ fn isJsRelated(tag_type: parser.Tag, node: *parser.Node) !bool {
return false;
}
fn isCssRelated(tag_type: parser.Tag, node: *parser.Node) !bool {
if (tag_type == .style) {
return true;
}
if (tag_type == .link) {
const el = parser.nodeToElement(node);
const rel = try parser.elementGetAttribute(el, "rel") orelse return false;
return std.ascii.eqlIgnoreCase(rel, "stylesheet");
}
return false;
}
fn isUIRelated(tag_type: parser.Tag, node: *parser.Node) !bool {
if (try isCssRelated(tag_type, node)) {
return true;
}
if (tag_type == .img or tag_type == .picture or tag_type == .video) {
return true;
}
if (tag_type == .undef) {
const name = try parser.nodeLocalName(node);
if (std.mem.eql(u8, name, "svg")) {
return true;
}
}
return false;
}
// area, base, br, col, embed, hr, img, input, link, meta, source, track, wbr
// https://html.spec.whatwg.org/#void-elements
fn isVoid(elem: *parser.Element) !bool {
@@ -273,7 +226,7 @@ fn writeEscapedAttributeValue(writer: anytype, value: []const u8) !void {
const testing = std.testing;
test "dump.writeHTML" {
parser.init();
try parser.init();
defer parser.deinit();
try testWriteHTML(

View File

@@ -19,6 +19,7 @@
const std = @import("std");
const log = @import("../../log.zig");
const Env = @import("../env.zig").Env;
const Page = @import("../page.zig").Page;
// https://encoding.spec.whatwg.org/#interface-textdecoder

View File

@@ -84,8 +84,8 @@ pub const Event = struct {
// Getters
pub fn get_type(self: *parser.Event) []const u8 {
return parser.eventType(self);
pub fn get_type(self: *parser.Event) ![]const u8 {
return try parser.eventType(self);
}
pub fn get_target(self: *parser.Event, page: *Page) !?EventTargetUnion {
@@ -158,7 +158,7 @@ pub const Event = struct {
const et_ = parser.eventTarget(self);
const et = et_ orelse return &.{};
var node: ?*parser.Node = switch (parser.eventTargetInternalType(et)) {
var node: ?*parser.Node = switch (try parser.eventTargetInternalType(et)) {
.libdom_node => @as(*parser.Node, @ptrCast(et)),
.plain => parser.eventTargetToNode(et),
else => {
@@ -174,8 +174,8 @@ pub const Event = struct {
.node = try Node.toInterface(n),
});
node = parser.nodeParentNode(n);
if (node == null and parser.nodeType(n) == .document_fragment) {
node = try parser.nodeParentNode(n);
if (node == null and try parser.nodeType(n) == .document_fragment) {
// we have a non-continuous hook from a shadowroot to its host (
// it's parent element). libdom doesn't really support ShdowRoots
// and, for the most part, that works out well since it naturally
@@ -339,7 +339,7 @@ pub const EventHandler = struct {
if (self.once) {
const target = parser.eventTarget(event).?;
const typ = parser.eventType(event);
const typ = parser.eventType(event) catch return;
parser.eventTargetRemoveEventListener(
target,
typ,

View File

@@ -22,6 +22,7 @@ const builtin = @import("builtin");
const parser = @import("../netsurf.zig");
const Event = @import("event.zig").Event;
const JsObject = @import("../env.zig").JsObject;
// TODO: We currently don't have a UIEvent interface so we skip it in the prototype chain.
// https://developer.mozilla.org/en-US/docs/Web/API/UIEvent

View File

@@ -21,6 +21,7 @@ const log = @import("../../log.zig");
const parser = @import("../netsurf.zig");
const Event = @import("event.zig").Event;
const JsObject = @import("../env.zig").JsObject;
// TODO: We currently don't have a UIEvent interface so we skip it in the prototype chain.
// https://developer.mozilla.org/en-US/docs/Web/API/UIEvent

View File

@@ -80,27 +80,6 @@ pub const RequestCredentials = enum {
}
};
pub const RequestMode = enum {
cors,
@"no-cors",
@"same-origin",
navigate,
pub fn fromString(str: []const u8) ?RequestMode {
for (std.enums.values(RequestMode)) |cache| {
if (std.ascii.eqlIgnoreCase(str, @tagName(cache))) {
return cache;
}
} else {
return null;
}
}
pub fn toString(self: RequestMode) []const u8 {
return @tagName(self);
}
};
// https://developer.mozilla.org/en-US/docs/Web/API/RequestInit
pub const RequestInit = struct {
body: ?[]const u8 = null,
@@ -109,7 +88,6 @@ pub const RequestInit = struct {
headers: ?HeadersInit = null,
integrity: ?[]const u8 = null,
method: ?[]const u8 = null,
mode: ?[]const u8 = null,
};
// https://developer.mozilla.org/en-US/docs/Web/API/Request/Request
@@ -119,8 +97,6 @@ method: Http.Method,
url: [:0]const u8,
cache: RequestCache,
credentials: RequestCredentials,
// no-cors is default is not built with constructor.
mode: RequestMode = .@"no-cors",
headers: Headers,
body: ?[]const u8,
body_used: bool = false,
@@ -139,11 +115,11 @@ pub fn constructor(input: RequestInput, _options: ?RequestInit, page: *Page) !Re
},
};
const body = if (options.body) |body| try arena.dupe(u8, body) else null;
const cache = (if (options.cache) |cache| RequestCache.fromString(cache) else null) orelse RequestCache.default;
const credentials = (if (options.credentials) |creds| RequestCredentials.fromString(creds) else null) orelse RequestCredentials.@"same-origin";
const integrity = if (options.integrity) |integ| try arena.dupe(u8, integ) else "";
const headers: Headers = if (options.headers) |hdrs| try Headers.constructor(hdrs, page) else .{};
const mode = (if (options.mode) |mode| RequestMode.fromString(mode) else null) orelse RequestMode.cors;
const method: Http.Method = blk: {
if (options.method) |given_method| {
@@ -159,19 +135,11 @@ pub fn constructor(input: RequestInput, _options: ?RequestInit, page: *Page) !Re
}
};
// Can't have a body on .GET or .HEAD.
const body: ?[]const u8 = blk: {
if (method == .GET or method == .HEAD) {
break :blk null;
} else break :blk if (options.body) |body| try arena.dupe(u8, body) else null;
};
return .{
.method = method,
.url = url,
.cache = cache,
.credentials = credentials,
.mode = mode,
.headers = headers,
.body = body,
.integrity = integrity,
@@ -213,10 +181,6 @@ pub fn get_method(self: *const Request) []const u8 {
return @tagName(self.method);
}
pub fn get_mode(self: *const Request) RequestMode {
return self.mode;
}
pub fn get_url(self: *const Request) []const u8 {
return self.url;
}
@@ -246,7 +210,10 @@ pub fn _bytes(self: *Response, page: *Page) !Env.Promise {
return error.TypeError;
}
const resolver = page.main_context.createPromiseResolver();
const resolver = Env.PromiseResolver{
.js_context = page.main_context,
.resolver = v8.PromiseResolver.init(page.main_context.v8_context),
};
try resolver.resolve(self.body);
self.body_used = true;
@@ -258,24 +225,22 @@ pub fn _json(self: *Response, page: *Page) !Env.Promise {
return error.TypeError;
}
const resolver = page.main_context.createPromiseResolver();
const resolver = Env.PromiseResolver{
.js_context = page.main_context,
.resolver = v8.PromiseResolver.init(page.main_context.v8_context),
};
if (self.body) |body| {
const p = std.json.parseFromSliceLeaky(
std.json.Value,
page.call_arena,
body,
.{},
) catch |e| {
log.info(.browser, "invalid json", .{ .err = e, .source = "Request" });
return error.SyntaxError;
};
try resolver.resolve(p);
} else {
try resolver.resolve(null);
}
const p = std.json.parseFromSliceLeaky(
std.json.Value,
page.call_arena,
self.body,
.{},
) catch |e| {
log.info(.browser, "invalid json", .{ .err = e, .source = "Request" });
return error.SyntaxError;
};
try resolver.resolve(p);
self.body_used = true;
return resolver.promise();
}
@@ -285,7 +250,10 @@ pub fn _text(self: *Response, page: *Page) !Env.Promise {
return error.TypeError;
}
const resolver = page.main_context.createPromiseResolver();
const resolver = Env.PromiseResolver{
.js_context = page.main_context,
.resolver = v8.PromiseResolver.init(page.main_context.v8_context),
};
try resolver.resolve(self.body);
self.body_used = true;

View File

@@ -41,10 +41,9 @@ status_text: []const u8 = "",
headers: Headers,
mime: ?Mime = null,
url: []const u8 = "",
body: ?[]const u8 = null,
body: []const u8 = "",
body_used: bool = false,
redirected: bool = false,
type: ResponseType = .basic,
const ResponseBody = union(enum) {
string: []const u8,
@@ -56,28 +55,6 @@ const ResponseOptions = struct {
headers: ?HeadersInit = null,
};
pub const ResponseType = enum {
basic,
cors,
@"error",
@"opaque",
opaqueredirect,
pub fn fromString(str: []const u8) ?ResponseType {
for (std.enums.values(ResponseType)) |cache| {
if (std.ascii.eqlIgnoreCase(str, @tagName(cache))) {
return cache;
}
} else {
return null;
}
}
pub fn toString(self: ResponseType) []const u8 {
return @tagName(self);
}
};
pub fn constructor(_input: ?ResponseBody, _options: ?ResponseOptions, page: *Page) !Response {
const arena = page.arena;
@@ -91,7 +68,7 @@ pub fn constructor(_input: ?ResponseBody, _options: ?ResponseOptions, page: *Pag
},
}
} else {
break :blk null;
break :blk "";
}
};
@@ -108,9 +85,7 @@ pub fn constructor(_input: ?ResponseBody, _options: ?ResponseOptions, page: *Pag
pub fn get_body(self: *const Response, page: *Page) !*ReadableStream {
const stream = try ReadableStream.constructor(null, null, page);
if (self.body) |body| {
try stream.queue.append(page.arena, body);
}
try stream.queue.append(page.arena, self.body);
return stream;
}
@@ -138,10 +113,6 @@ pub fn get_statusText(self: *const Response) []const u8 {
return self.status_text;
}
pub fn get_type(self: *const Response) ResponseType {
return self.type;
}
pub fn get_url(self: *const Response) []const u8 {
return self.url;
}
@@ -161,7 +132,6 @@ pub fn _clone(self: *const Response) !Response {
.redirected = self.redirected,
.status = self.status,
.url = self.url,
.type = self.type,
};
}
@@ -185,24 +155,22 @@ pub fn _json(self: *Response, page: *Page) !Env.Promise {
return error.TypeError;
}
const resolver = page.main_context.createPromiseResolver();
const resolver = Env.PromiseResolver{
.js_context = page.main_context,
.resolver = v8.PromiseResolver.init(page.main_context.v8_context),
};
if (self.body) |body| {
const p = std.json.parseFromSliceLeaky(
std.json.Value,
page.call_arena,
body,
.{},
) catch |e| {
log.info(.browser, "invalid json", .{ .err = e, .source = "Response" });
return error.SyntaxError;
};
try resolver.resolve(p);
} else {
try resolver.resolve(null);
}
const p = std.json.parseFromSliceLeaky(
std.json.Value,
page.call_arena,
self.body,
.{},
) catch |e| {
log.info(.browser, "invalid json", .{ .err = e, .source = "Response" });
return error.SyntaxError;
};
try resolver.resolve(p);
self.body_used = true;
return resolver.promise();
}
@@ -212,7 +180,10 @@ pub fn _text(self: *Response, page: *Page) !Env.Promise {
return error.TypeError;
}
const resolver = page.main_context.createPromiseResolver();
const resolver = Env.PromiseResolver{
.js_context = page.main_context,
.resolver = v8.PromiseResolver.init(page.main_context.v8_context),
};
try resolver.resolve(self.body);
self.body_used = true;

View File

@@ -53,7 +53,6 @@ pub const FetchContext = struct {
headers: std.ArrayListUnmanaged([]const u8) = .empty,
status: u16 = 0,
mime: ?Mime = null,
mode: Request.RequestMode,
transfer: ?*HttpClient.Transfer = null,
/// This effectively takes ownership of the FetchContext.
@@ -63,19 +62,6 @@ pub const FetchContext = struct {
pub fn toResponse(self: *const FetchContext) !Response {
var headers: Headers = .{};
// If the mode is "no-cors", we need to return this opaque/stripped Response.
// https://developer.mozilla.org/en-US/docs/Web/API/Response/type
if (self.mode == .@"no-cors") {
return Response{
.status = 0,
.headers = headers,
.mime = self.mime,
.body = null,
.url = self.url,
.type = .@"opaque",
};
}
// convert into Headers
for (self.headers.items) |hdr| {
var iter = std.mem.splitScalar(u8, hdr, ':');
@@ -84,25 +70,12 @@ pub const FetchContext = struct {
try headers.append(name, value, self.arena);
}
const resp_type: Response.ResponseType = blk: {
if (std.mem.startsWith(u8, self.url, "data:")) {
break :blk .basic;
}
break :blk switch (self.mode) {
.cors => .cors,
.@"same-origin", .navigate => .basic,
.@"no-cors" => unreachable,
};
};
return Response{
.status = self.status,
.headers = headers,
.mime = self.mime,
.body = self.body.items,
.url = self.url,
.type = resp_type,
};
}
};
@@ -112,7 +85,7 @@ pub fn fetch(input: RequestInput, options: ?RequestInit, page: *Page) !Env.Promi
const arena = page.arena;
const req = try Request.constructor(input, options, page);
var headers = try page.http_client.newHeaders();
var headers = try Http.Headers.init();
// Copy our headers into the HTTP headers.
var header_iter = req.headers.headers.iterator();
@@ -128,7 +101,7 @@ pub fn fetch(input: RequestInput, options: ?RequestInit, page: *Page) !Env.Promi
try page.requestCookie(.{}).headersForRequest(arena, req.url, &headers);
const resolver = try page.main_context.createPersistentPromiseResolver(.page);
const resolver = page.main_context.createPersistentPromiseResolver();
const fetch_ctx = try arena.create(FetchContext);
fetch_ctx.* = .{
@@ -137,7 +110,6 @@ pub fn fetch(input: RequestInput, options: ?RequestInit, page: *Page) !Env.Promi
.promise_resolver = resolver,
.method = req.method,
.url = req.url,
.mode = req.mode,
};
try page.http_client.request(.{
@@ -198,6 +170,7 @@ pub fn fetch(input: RequestInput, options: ?RequestInit, page: *Page) !Env.Promi
.done_callback = struct {
fn doneCallback(ctx: *anyopaque) !void {
const self: *FetchContext = @ptrCast(@alignCast(ctx));
defer self.promise_resolver.setWeak();
self.transfer = null;
log.info(.fetch, "request complete", .{
@@ -214,6 +187,7 @@ pub fn fetch(input: RequestInput, options: ?RequestInit, page: *Page) !Env.Promi
.error_callback = struct {
fn errorCallback(ctx: *anyopaque, err: anyerror) void {
const self: *FetchContext = @ptrCast(@alignCast(ctx));
defer self.promise_resolver.setWeak();
self.transfer = null;
log.err(.fetch, "error", .{

View File

@@ -115,69 +115,67 @@ pub const HTMLDocument = struct {
}
pub fn _getElementsByName(self: *parser.DocumentHTML, name: []const u8, page: *Page) !NodeList {
const arena = page.arena;
var list: NodeList = .{};
if (name.len == 0) {
return list;
}
if (name.len == 0) return list;
const root = parser.documentHTMLToNode(self);
var c = try collection.HTMLCollectionByName(root, name, .{
var c = try collection.HTMLCollectionByName(arena, root, name, .{
.include_root = false,
});
const ln = try c.get_length();
try list.ensureTotalCapacity(page.arena, ln);
var i: u32 = 0;
while (i < ln) : (i += 1) {
while (i < ln) {
const n = try c.item(i) orelse break;
list.appendAssumeCapacity(n);
try list.append(arena, n);
i += 1;
}
return list;
}
pub fn get_images(self: *parser.DocumentHTML) collection.HTMLCollection {
return collection.HTMLCollectionByTagName(parser.documentHTMLToNode(self), "img", .{
pub fn get_images(self: *parser.DocumentHTML, page: *Page) !collection.HTMLCollection {
return try collection.HTMLCollectionByTagName(page.arena, parser.documentHTMLToNode(self), "img", .{
.include_root = false,
});
}
pub fn get_embeds(self: *parser.DocumentHTML) collection.HTMLCollection {
return collection.HTMLCollectionByTagName(parser.documentHTMLToNode(self), "embed", .{
pub fn get_embeds(self: *parser.DocumentHTML, page: *Page) !collection.HTMLCollection {
return try collection.HTMLCollectionByTagName(page.arena, parser.documentHTMLToNode(self), "embed", .{
.include_root = false,
});
}
pub fn get_plugins(self: *parser.DocumentHTML) collection.HTMLCollection {
return get_embeds(self);
pub fn get_plugins(self: *parser.DocumentHTML, page: *Page) !collection.HTMLCollection {
return get_embeds(self, page);
}
pub fn get_forms(self: *parser.DocumentHTML) collection.HTMLCollection {
return collection.HTMLCollectionByTagName(parser.documentHTMLToNode(self), "form", .{
pub fn get_forms(self: *parser.DocumentHTML, page: *Page) !collection.HTMLCollection {
return try collection.HTMLCollectionByTagName(page.arena, parser.documentHTMLToNode(self), "form", .{
.include_root = false,
});
}
pub fn get_scripts(self: *parser.DocumentHTML) collection.HTMLCollection {
return collection.HTMLCollectionByTagName(parser.documentHTMLToNode(self), "script", .{
pub fn get_scripts(self: *parser.DocumentHTML, page: *Page) !collection.HTMLCollection {
return try collection.HTMLCollectionByTagName(page.arena, parser.documentHTMLToNode(self), "script", .{
.include_root = false,
});
}
pub fn get_applets(_: *parser.DocumentHTML) collection.HTMLCollection {
return collection.HTMLCollectionEmpty();
pub fn get_applets(_: *parser.DocumentHTML) !collection.HTMLCollection {
return try collection.HTMLCollectionEmpty();
}
pub fn get_links(self: *parser.DocumentHTML) collection.HTMLCollection {
return collection.HTMLCollectionByLinks(parser.documentHTMLToNode(self), .{
pub fn get_links(self: *parser.DocumentHTML) !collection.HTMLCollection {
return try collection.HTMLCollectionByLinks(parser.documentHTMLToNode(self), .{
.include_root = false,
});
}
pub fn get_anchors(self: *parser.DocumentHTML) collection.HTMLCollection {
return collection.HTMLCollectionByAnchors(parser.documentHTMLToNode(self), .{
pub fn get_anchors(self: *parser.DocumentHTML) !collection.HTMLCollection {
return try collection.HTMLCollectionByAnchors(parser.documentHTMLToNode(self), .{
.include_root = false,
});
}

View File

@@ -133,14 +133,14 @@ pub const HTMLElement = struct {
pub fn get_innerText(e: *parser.ElementHTML) ![]const u8 {
const n = @as(*parser.Node, @ptrCast(e));
return parser.nodeTextContent(n) orelse "";
return try parser.nodeTextContent(n) orelse "";
}
pub fn set_innerText(e: *parser.ElementHTML, s: []const u8) !void {
const n = @as(*parser.Node, @ptrCast(e));
// create text node.
const doc = parser.nodeOwnerDocument(n) orelse return error.NoDocument;
const doc = try parser.nodeOwnerDocument(n) orelse return error.NoDocument;
const t = try parser.documentCreateTextNode(doc, s);
// remove existing children.
@@ -167,12 +167,12 @@ pub const HTMLElement = struct {
focusVisible: bool,
};
pub fn _focus(e: *parser.ElementHTML, _: ?FocusOpts, page: *Page) !void {
if (!page.isNodeAttached(@ptrCast(e))) {
if (!try page.isNodeAttached(@ptrCast(e))) {
return;
}
const Document = @import("../dom/document.zig").Document;
const root_node = parser.nodeGetRootNode(@ptrCast(e));
const root_node = try parser.nodeGetRootNode(@ptrCast(e));
try Document.setFocus(@ptrCast(root_node), e, page);
}
};
@@ -251,7 +251,7 @@ pub const HTMLAnchorElement = struct {
}
pub fn get_text(self: *parser.Anchor) !?[]const u8 {
return parser.nodeTextContent(parser.anchorToNode(self));
return try parser.nodeTextContent(parser.anchorToNode(self));
}
pub fn set_text(self: *parser.Anchor, v: []const u8) !void {
@@ -757,21 +757,13 @@ pub const HTMLLinkElement = struct {
pub const prototype = *HTMLElement;
pub const subtype = .node;
pub fn get_rel(self: *parser.Link) ![]const u8 {
return parser.linkGetRel(self);
}
pub fn set_rel(self: *parser.Link, rel: []const u8) !void {
return parser.linkSetRel(self, rel);
}
pub fn get_href(self: *parser.Link) ![]const u8 {
return parser.linkGetHref(self);
return try parser.linkGetHref(self);
}
pub fn set_href(self: *parser.Link, href: []const u8, page: *const Page) !void {
const full = try urlStitch(page.call_arena, href, page.url.raw, .{});
return parser.linkSetHref(self, full);
return try parser.linkSetHref(self, full);
}
};
@@ -1050,84 +1042,68 @@ pub const HTMLSlotElement = struct {
flatten: bool = false,
};
pub fn _assignedNodes(self: *parser.Slot, opts_: ?AssignedNodesOpts, page: *Page) ![]NodeUnion {
return findAssignedSlotNodes(self, opts_, false, page);
}
// This should return Union, instead of NodeUnion, but we want to re-use
// findAssignedSlotNodes. Returning NodeUnion is fine, as long as every element
// within is an Element. This could be more efficient
pub fn _assignedElements(self: *parser.Slot, opts_: ?AssignedNodesOpts, page: *Page) ![]NodeUnion {
return findAssignedSlotNodes(self, opts_, true, page);
}
fn findAssignedSlotNodes(self: *parser.Slot, opts_: ?AssignedNodesOpts, element_only: bool, page: *Page) ![]NodeUnion {
const opts = opts_ orelse AssignedNodesOpts{ .flatten = false };
if (opts.flatten) {
log.debug(.web_api, "not implemented", .{ .feature = "HTMLSlotElement flatten assignedNodes" });
if (try findAssignedSlotNodes(self, opts, page)) |nodes| {
return nodes;
}
if (!opts.flatten) {
return &.{};
}
const node: *parser.Node = @ptrCast(@alignCast(self));
const nl = try parser.nodeGetChildNodes(node);
const len = try parser.nodeListLength(nl);
if (len == 0) {
return &.{};
}
// First we look for any explicitly assigned nodes (via the slot attribute)
{
const slot_name = try parser.elementGetAttribute(@ptrCast(@alignCast(self)), "name");
var root = parser.nodeGetRootNode(node);
if (page.getNodeState(root)) |state| {
if (state.shadow_root) |sr| {
root = @ptrCast(@alignCast(sr.host));
}
var assigned = try page.call_arena.alloc(NodeUnion, len);
var i: usize = 0;
while (true) : (i += 1) {
const child = try parser.nodeListItem(nl, @intCast(i)) orelse break;
assigned[i] = try Node.toInterface(child);
}
return assigned[0..i];
}
fn findAssignedSlotNodes(self: *parser.Slot, opts: AssignedNodesOpts, page: *Page) !?[]NodeUnion {
if (opts.flatten) {
log.warn(.web_api, "not implemented", .{ .feature = "HTMLSlotElement flatten assignedNodes" });
}
const slot_name = try parser.elementGetAttribute(@ptrCast(@alignCast(self)), "name");
const node: *parser.Node = @ptrCast(@alignCast(self));
var root = try parser.nodeGetRootNode(node);
if (page.getNodeState(root)) |state| {
if (state.shadow_root) |sr| {
root = @ptrCast(@alignCast(sr.host));
}
}
var arr: std.ArrayList(NodeUnion) = .empty;
const w = @import("../dom/walker.zig").WalkerChildren{};
var next: ?*parser.Node = null;
while (true) {
next = try w.get_next(root, next) orelse break;
if (parser.nodeType(next.?) != .element) {
if (slot_name == null and !element_only) {
// default slot (with no name), takes everything
try arr.append(page.call_arena, try Node.toInterface(next.?));
}
continue;
}
const el: *parser.Element = @ptrCast(@alignCast(next.?));
const element_slot = try parser.elementGetAttribute(el, "slot");
if (nullableStringsAreEqual(slot_name, element_slot)) {
// either they're the same string or they are both null
var arr: std.ArrayList(NodeUnion) = .empty;
const w = @import("../dom/walker.zig").WalkerChildren{};
var next: ?*parser.Node = null;
while (true) {
next = try w.get_next(root, next) orelse break;
if (try parser.nodeType(next.?) != .element) {
if (slot_name == null) {
// default slot (with no name), takes everything
try arr.append(page.call_arena, try Node.toInterface(next.?));
continue;
}
continue;
}
if (arr.items.len > 0) {
return arr.items;
}
const el: *parser.Element = @ptrCast(@alignCast(next.?));
const element_slot = try parser.elementGetAttribute(el, "slot");
if (!opts.flatten) {
return &.{};
if (nullableStringsAreEqual(slot_name, element_slot)) {
// either they're the same string or they are both null
try arr.append(page.call_arena, try Node.toInterface(next.?));
continue;
}
}
// Since, we have no explicitly assigned nodes and flatten == false,
// we'll collect the children of the slot - the defaults.
{
const nl = try parser.nodeGetChildNodes(node);
const len = parser.nodeListLength(nl);
if (len == 0) {
return &.{};
}
var assigned = try page.call_arena.alloc(NodeUnion, len);
var i: usize = 0;
while (true) : (i += 1) {
const child = parser.nodeListItem(nl, @intCast(i)) orelse break;
if (!element_only or parser.nodeType(child) == .element) {
assigned[i] = try Node.toInterface(child);
}
}
return assigned[0..i];
}
return if (arr.items.len == 0) null else arr.items;
}
fn nullableStringsAreEqual(a: ?[]const u8, b: ?[]const u8) bool {
@@ -1353,6 +1329,39 @@ test "Browser: HTML.HtmlScriptElement" {
try testing.htmlRunner("html/script/inline_defer.html");
}
test "Browser: HTML.HtmlSlotElement" {
try testing.htmlRunner("html/slot.html");
test "Browser: HTML.HTMLSlotElement" {
try testing.htmlRunner("html/html_slot_element.html");
}
const Check = struct {
input: []const u8,
expected: ?[]const u8 = null, // Needed when input != expected
};
const bool_valids = [_]Check{
.{ .input = "true" },
.{ .input = "''", .expected = "false" },
.{ .input = "13.5", .expected = "true" },
};
const str_valids = [_]Check{
.{ .input = "'foo'", .expected = "foo" },
.{ .input = "5", .expected = "5" },
.{ .input = "''", .expected = "" },
.{ .input = "document", .expected = "[object HTMLDocument]" },
};
// .{ "elem.type = '5'", "5" },
// .{ "elem.type", "text" },
fn testProperty(
arena: std.mem.Allocator,
runner: *testing.JsRunner,
elem_dot_prop: []const u8,
always: ?[]const u8, // Ignores checks' expected if set
checks: []const Check,
) !void {
for (checks) |check| {
try runner.testCases(&.{
.{ try std.mem.concat(arena, u8, &.{ elem_dot_prop, " = ", check.input }), null },
.{ elem_dot_prop, always orelse check.expected orelse check.input },
}, .{});
}
}

View File

@@ -21,6 +21,7 @@ const Allocator = std.mem.Allocator;
const parser = @import("../netsurf.zig");
const Page = @import("../page.zig").Page;
const HTMLElement = @import("elements.zig").HTMLElement;
const FormData = @import("../xhr/form_data.zig").FormData;
pub const HTMLFormElement = struct {
pub const Self = parser.Form;

View File

@@ -16,8 +16,10 @@
// You should have received a copy of the GNU Affero General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
const std = @import("std");
const Allocator = std.mem.Allocator;
const parser = @import("../netsurf.zig");
const Page = @import("../page.zig").Page;
const HTMLElement = @import("elements.zig").HTMLElement;
// https://html.spec.whatwg.org/multipage/iframe-embed-object.html#htmliframeelement

View File

@@ -37,6 +37,7 @@ const domcss = @import("../dom/css.zig");
const Css = @import("../css/css.zig").Css;
const Function = Env.Function;
const JsObject = Env.JsObject;
const v8 = @import("v8");
const Request = @import("../fetch/Request.zig");
@@ -253,18 +254,10 @@ pub const Window = struct {
return self.createTimeout(cbk, 0, page, .{ .name = "queueMicrotask" });
}
pub fn _setImmediate(self: *Window, cbk: Function, page: *Page) !u32 {
return self.createTimeout(cbk, 0, page, .{ .name = "setImmediate" });
}
pub fn _clearImmediate(self: *Window, id: u32) void {
_ = self.timers.remove(id);
}
pub fn _matchMedia(_: *const Window, media: Env.String) !MediaQueryList {
pub fn _matchMedia(_: *const Window, media: []const u8, page: *Page) !MediaQueryList {
return .{
.matches = false, // TODO?
.media = media.string,
.media = try page.arena.dupe(u8, media),
};
}

View File

@@ -26,9 +26,14 @@ const c = @cImport({
@cInclude("mimalloc.h");
});
const Error = error{
HeapNotNull,
HeapNull,
};
var heap: ?*c.mi_heap_t = null;
pub fn create() void {
pub fn create() Error!void {
std.debug.assert(heap == null);
heap = c.mi_heap_new();
std.debug.assert(heap != null);
@@ -40,45 +45,6 @@ pub fn destroy() void {
heap = null;
}
pub fn getRSS() i64 {
if (@import("builtin").mode != .Debug) {
// just don't trust my implementation, plus a caller might not know
// that this requires parsing some unstructured data
@compileError("Only available in debug builds");
}
var buf: [1024 * 8]u8 = undefined;
var fba = std.heap.FixedBufferAllocator.init(&buf);
var writer = std.Io.Writer.Allocating.init(fba.allocator());
c.mi_stats_print_out(struct {
fn print(msg: [*c]const u8, data: ?*anyopaque) callconv(.c) void {
const w: *std.Io.Writer = @ptrCast(@alignCast(data.?));
w.writeAll(std.mem.span(msg)) catch |err| {
std.debug.print("Failed to write mimalloc data: {}\n", .{err});
};
}
}.print, &writer.writer);
const data = writer.written();
const index = std.mem.indexOf(u8, data, "rss: ") orelse return -1;
const sep = std.mem.indexOfScalarPos(u8, data, index + 5, ' ') orelse return -2;
const value = std.fmt.parseFloat(f64, data[index + 5 .. sep]) catch return -3;
const unit = data[sep + 1 ..];
if (std.mem.startsWith(u8, unit, "KiB,")) {
return @as(i64, @intFromFloat(value)) * 1024;
}
if (std.mem.startsWith(u8, unit, "MiB,")) {
return @as(i64, @intFromFloat(value)) * 1024 * 1024;
}
if (std.mem.startsWith(u8, unit, "GiB,")) {
return @as(i64, @intFromFloat(value)) * 1024 * 1024 * 1024;
}
return -4;
}
pub export fn m_alloc(size: usize) callconv(.c) ?*anyopaque {
std.debug.assert(heap != null);
return c.mi_heap_malloc(heap.?, size);

View File

@@ -17,6 +17,7 @@
// along with this program. If not, see <https://www.gnu.org/licenses/>.
const std = @import("std");
const Allocator = std.mem.Allocator;
pub const Mime = struct {
content_type: ContentType,
@@ -54,7 +55,7 @@ pub const Mime = struct {
};
/// Returns the null-terminated charset value.
pub fn charsetString(mime: *const Mime) [:0]const u8 {
pub inline fn charsetString(mime: *const Mime) [:0]const u8 {
return @ptrCast(&mime.charset);
}

View File

@@ -36,8 +36,8 @@ const mimalloc = @import("mimalloc.zig");
// init initializes netsurf lib.
// init starts a mimalloc heap arena for the netsurf session. The caller must
// call deinit() to free the arena memory.
pub fn init() void {
mimalloc.create();
pub fn init() !void {
try mimalloc.create();
}
// deinit frees the mimalloc heap arena memory.
@@ -84,7 +84,7 @@ pub fn deinit() void {
// - VtableT: the type of the vtable (dom_node_vtable, dom_element_vtable, etc)
// - NodeT: the type of the node interface (dom_element, dom_document, etc)
// - node: the node interface instance
fn getVtable(comptime VtableT: type, comptime NodeT: type, node: anytype) VtableT {
inline fn getVtable(comptime VtableT: type, comptime NodeT: type, node: anytype) VtableT {
// first align correctly the node interface
const node_aligned: *align(@alignOf(NodeExternal)) NodeT = @alignCast(node);
// then convert the node interface to a base node
@@ -101,12 +101,12 @@ fn getVtable(comptime VtableT: type, comptime NodeT: type, node: anytype) Vtable
// Utils
const String = c.dom_string;
fn strToData(s: *String) []const u8 {
inline fn strToData(s: *String) []const u8 {
const data = c.dom_string_data(s);
return data[0..c.dom_string_byte_length(s)];
}
pub fn strFromData(data: []const u8) !*String {
pub inline fn strFromData(data: []const u8) !*String {
var s: ?*String = null;
const err = c.dom_string_create(data.ptr, data.len, &s);
try DOMErr(err);
@@ -116,10 +116,10 @@ pub fn strFromData(data: []const u8) !*String {
const LWCString = c.lwc_string;
// TODO implement lwcStringToData
// fn lwcStringToData(s: *LWCString) []const u8 {
// inline fn lwcStringToData(s: *LWCString) []const u8 {
// }
fn lwcStringFromData(data: []const u8) !*LWCString {
inline fn lwcStringFromData(data: []const u8) !*LWCString {
var s: ?*LWCString = null;
const err = c.lwc_intern_string(data.ptr, data.len, &s);
try DOMErr(err);
@@ -445,15 +445,13 @@ pub fn eventInit(evt: *Event, typ: []const u8, opts: EventInit) !void {
try DOMErr(err);
}
pub fn eventType(evt: *Event) []const u8 {
pub fn eventType(evt: *Event) ![]const u8 {
var s: ?*String = null;
const err = c._dom_event_get_type(evt, &s);
std.debug.assert(err == c.DOM_NO_ERR);
try DOMErr(err);
// if the event type is null, return a empty string.
if (s == null) {
return "";
}
if (s == null) return "";
return strToData(s.?);
}
@@ -573,10 +571,10 @@ pub fn mutationEventAttributeName(evt: *MutationEvent) ![]const u8 {
return strToData(s.?);
}
pub fn mutationEventPrevValue(evt: *MutationEvent) ?[]const u8 {
pub fn mutationEventPrevValue(evt: *MutationEvent) !?[]const u8 {
var s: ?*String = null;
const err = c._dom_mutation_event_get_prev_value(evt, &s);
std.debug.assert(err == c.DOM_NO_ERR);
try DOMErr(err);
if (s == null) return null;
return strToData(s.?);
}
@@ -584,7 +582,7 @@ pub fn mutationEventPrevValue(evt: *MutationEvent) ?[]const u8 {
pub fn mutationEventRelatedNode(evt: *MutationEvent) !?*Node {
var n: NodeExternal = undefined;
const err = c._dom_mutation_event_get_related_node(evt, &n);
std.debug.assert(err == c.DOM_NO_ERR);
try DOMErr(err);
if (n == null) return null;
return @as(*Node, @ptrCast(@alignCast(n)));
}
@@ -781,10 +779,10 @@ pub fn eventTargetDispatchEvent(et: *EventTarget, event: *Event) !bool {
return res;
}
pub fn eventTargetInternalType(et: *EventTarget) EventTargetTBase.InternalType {
pub fn eventTargetInternalType(et: *EventTarget) !EventTargetTBase.InternalType {
var res: u32 = undefined;
const err = eventTargetVtable(et).internal_type.?(et, &res);
std.debug.assert(err == c.DOM_NO_ERR);
try DOMErr(err);
return @enumFromInt(res);
}
@@ -852,8 +850,10 @@ pub const EventTargetTBase = extern struct {
pub fn dispatch_event(et: [*c]c.dom_event_target, evt: ?*c.struct_dom_event, res: [*c]bool) callconv(.c) c.dom_exception {
const self = @as(*Self, @ptrCast(et));
// Set the event target to the target dispatched.
const err = c._dom_event_set_target(evt, et);
std.debug.assert(err == c.DOM_NO_ERR);
const e = c._dom_event_set_target(evt, et);
if (e != c.DOM_NO_ERR) {
return e;
}
return c._dom_event_target_dispatch(et, &self.eti, evt, c.DOM_AT_TARGET, res);
}
@@ -1046,17 +1046,17 @@ pub const NodeType = enum(u4) {
// NodeList
pub const NodeList = c.dom_nodelist;
pub fn nodeListLength(nodeList: *NodeList) u32 {
pub fn nodeListLength(nodeList: *NodeList) !u32 {
var ln: u32 = undefined;
const err = c.dom_nodelist_get_length(nodeList, &ln);
std.debug.assert(err == c.DOM_NO_ERR);
try DOMErr(err);
return ln;
}
pub fn nodeListItem(nodeList: *NodeList, index: u32) ?*Node {
pub fn nodeListItem(nodeList: *NodeList, index: u32) !?*Node {
var n: NodeExternal = undefined;
const err = c._dom_nodelist_item(nodeList, index, &n);
std.debug.assert(err == c.DOM_NO_ERR);
try DOMErr(err);
if (n == null) return null;
return @as(*Node, @ptrCast(@alignCast(n)));
}
@@ -1173,86 +1173,88 @@ fn nodeVtable(node: *Node) c.dom_node_vtable {
pub fn nodeLocalName(node: *Node) ![]const u8 {
var s: ?*String = null;
const err = nodeVtable(node).dom_node_get_local_name.?(node, &s);
std.debug.assert(err == c.DOM_NO_ERR);
try DOMErr(err);
if (s == null) return "";
var s_lower: ?*String = null;
const errStr = c.dom_string_tolower(s, true, &s_lower);
try DOMErr(errStr);
return strToData(s_lower.?);
}
pub fn nodeType(node: *Node) NodeType {
pub fn nodeType(node: *Node) !NodeType {
var node_type: c.dom_node_type = undefined;
const err = nodeVtable(node).dom_node_get_node_type.?(node, &node_type);
std.debug.assert(err == c.DOM_NO_ERR);
try DOMErr(err);
return @as(NodeType, @enumFromInt(node_type));
}
pub fn nodeFirstChild(node: *Node) ?*Node {
pub fn nodeFirstChild(node: *Node) !?*Node {
var res: ?*Node = null;
const err = nodeVtable(node).dom_node_get_first_child.?(node, &res);
std.debug.assert(err == c.DOM_NO_ERR);
try DOMErr(err);
return res;
}
pub fn nodeLastChild(node: *Node) ?*Node {
pub fn nodeLastChild(node: *Node) !?*Node {
var res: ?*Node = null;
const err = nodeVtable(node).dom_node_get_last_child.?(node, &res);
std.debug.assert(err == c.DOM_NO_ERR);
try DOMErr(err);
return res;
}
pub fn nodeNextSibling(node: *Node) ?*Node {
pub fn nodeNextSibling(node: *Node) !?*Node {
var res: ?*Node = null;
const err = nodeVtable(node).dom_node_get_next_sibling.?(node, &res);
std.debug.assert(err == c.DOM_NO_ERR);
try DOMErr(err);
return res;
}
pub fn nodeNextElementSibling(node: *Node) ?*Element {
pub fn nodeNextElementSibling(node: *Node) !?*Element {
var n = node;
while (true) {
const res = nodeNextSibling(n) orelse return null;
const res = try nodeNextSibling(n);
if (res == null) return null;
if (nodeType(res) == .element) {
return @as(*Element, @ptrCast(res));
if (try nodeType(res.?) == .element) {
return @as(*Element, @ptrCast(res.?));
}
n = res;
n = res.?;
}
return null;
}
pub fn nodePreviousSibling(node: *Node) ?*Node {
pub fn nodePreviousSibling(node: *Node) !?*Node {
var res: ?*Node = null;
const err = nodeVtable(node).dom_node_get_previous_sibling.?(node, &res);
std.debug.assert(err == c.DOM_NO_ERR);
try DOMErr(err);
return res;
}
pub fn nodePreviousElementSibling(node: *Node) ?*Element {
pub fn nodePreviousElementSibling(node: *Node) !?*Element {
var n = node;
while (true) {
const res = nodePreviousSibling(n) orelse return null;
if (nodeType(res) == .element) {
return @as(*Element, @ptrCast(res));
const res = try nodePreviousSibling(n);
if (res == null) return null;
if (try nodeType(res.?) == .element) {
return @as(*Element, @ptrCast(res.?));
}
n = res;
n = res.?;
}
return null;
}
pub fn nodeParentNode(node: *Node) ?*Node {
pub fn nodeParentNode(node: *Node) !?*Node {
var res: ?*Node = null;
const err = nodeVtable(node).dom_node_get_parent_node.?(node, &res);
std.debug.assert(err == c.DOM_NO_ERR);
try DOMErr(err);
return res;
}
pub fn nodeParentElement(node: *Node) ?*Element {
const res = nodeParentNode(node);
pub fn nodeParentElement(node: *Node) !?*Element {
const res = try nodeParentNode(node);
if (res) |value| {
if (nodeType(value) == .element) {
if (try nodeType(value) == .element) {
return @as(*Element, @ptrCast(value));
}
}
@@ -1267,17 +1269,17 @@ pub fn nodeName(node: *Node) ![]const u8 {
return strToData(s.?);
}
pub fn nodeOwnerDocument(node: *Node) ?*Document {
pub fn nodeOwnerDocument(node: *Node) !?*Document {
var doc: ?*Document = null;
const err = nodeVtable(node).dom_node_get_owner_document.?(node, &doc);
std.debug.assert(err == c.DOM_NO_ERR);
try DOMErr(err);
return doc;
}
pub fn nodeValue(node: *Node) ?[]const u8 {
pub fn nodeValue(node: *Node) !?[]const u8 {
var s: ?*String = null;
const err = nodeVtable(node).dom_node_get_node_value.?(node, &s);
std.debug.assert(err == c.DOM_NO_ERR);
try DOMErr(err);
if (s == null) return null;
return strToData(s.?);
}
@@ -1288,14 +1290,14 @@ pub fn nodeSetValue(node: *Node, value: []const u8) !void {
try DOMErr(err);
}
pub fn nodeTextContent(node: *Node) ?[]const u8 {
pub fn nodeTextContent(node: *Node) !?[]const u8 {
var s: ?*String = null;
const err = nodeVtable(node).dom_node_get_text_content.?(node, &s);
std.debug.assert(err == c.DOM_NO_ERR);
try DOMErr(err);
if (s == null) {
// NOTE: it seems that there is a bug in netsurf implem
// an empty Element should return an empty string and not null
if (nodeType(node) == .element) {
if (try nodeType(node) == .element) {
return "";
}
return null;
@@ -1316,10 +1318,10 @@ pub fn nodeGetChildNodes(node: *Node) !*NodeList {
return nlist.?;
}
pub fn nodeGetRootNode(node: *Node) *Node {
pub fn nodeGetRootNode(node: *Node) !*Node {
var root = node;
while (true) {
const parent = nodeParentNode(root);
const parent = try nodeParentNode(root);
if (parent) |parent_| {
root = parent_;
} else break;
@@ -1341,17 +1343,17 @@ pub fn nodeCloneNode(node: *Node, is_deep: bool) !*Node {
return res.?;
}
pub fn nodeContains(node: *Node, other: *Node) bool {
pub fn nodeContains(node: *Node, other: *Node) !bool {
var res: bool = undefined;
const err = c._dom_node_contains(node, other, &res);
std.debug.assert(err == c.DOM_NO_ERR);
try DOMErr(err);
return res;
}
pub fn nodeHasChildNodes(node: *Node) bool {
pub fn nodeHasChildNodes(node: *Node) !bool {
var res: bool = undefined;
const err = nodeVtable(node).dom_node_has_child_nodes.?(node, &res);
std.debug.assert(err == c.DOM_NO_ERR);
try DOMErr(err);
return res;
}
@@ -1366,7 +1368,7 @@ pub fn nodeIsDefaultNamespace(node: *Node, namespace_: ?[]const u8) !bool {
const s = if (namespace_) |n| try strFromData(n) else null;
var res: bool = undefined;
const err = nodeVtable(node).dom_node_is_default_namespace.?(node, s, &res);
std.debug.assert(err == c.DOM_NO_ERR);
try DOMErr(err);
return res;
}
@@ -1377,10 +1379,10 @@ pub fn nodeIsEqualNode(node: *Node, other: *Node) !bool {
return res;
}
pub fn nodeIsSameNode(node: *Node, other: *Node) bool {
pub fn nodeIsSameNode(node: *Node, other: *Node) !bool {
var res: bool = undefined;
const err = nodeVtable(node).dom_node_is_same.?(node, other, &res);
std.debug.assert(err == c.DOM_NO_ERR);
try DOMErr(err);
return res;
}
@@ -1420,6 +1422,13 @@ pub fn nodeReplaceChild(node: *Node, new_child: *Node, old_child: *Node) !*Node
return res.?;
}
pub fn nodeHasAttributes(node: *Node) !bool {
var res: bool = undefined;
const err = nodeVtable(node).dom_node_has_attributes.?(node, &res);
try DOMErr(err);
return res;
}
pub fn nodeGetAttributes(node: *Node) !?*NamedNodeMap {
var res: ?*NamedNodeMap = null;
const err = nodeVtable(node).dom_node_get_attributes.?(node, &res);
@@ -1427,18 +1436,18 @@ pub fn nodeGetAttributes(node: *Node) !?*NamedNodeMap {
return res;
}
pub fn nodeGetNamespace(node: *Node) ?[]const u8 {
pub fn nodeGetNamespace(node: *Node) !?[]const u8 {
var s: ?*String = null;
const err = nodeVtable(node).dom_node_get_namespace.?(node, &s);
std.debug.assert(err == c.DOM_NO_ERR);
try DOMErr(err);
if (s == null) return null;
return strToData(s.?);
}
pub fn nodeGetPrefix(node: *Node) ?[]const u8 {
pub fn nodeGetPrefix(node: *Node) !?[]const u8 {
var s: ?*String = null;
const err = nodeVtable(node).dom_node_get_prefix.?(node, &s);
std.debug.assert(err == c.DOM_NO_ERR);
try DOMErr(err);
if (s == null) return null;
return strToData(s.?);
}
@@ -1454,24 +1463,23 @@ pub fn nodeSetEmbedderData(node: *Node, data: *anyopaque) void {
pub fn nodeGetElementById(node: *Node, id: []const u8) !?*Element {
var el: ?*Element = null;
const str_id = try strFromData(id);
const err = c._dom_find_element_by_id(node, str_id, &el);
std.debug.assert(err == c.DOM_NO_ERR);
try DOMErr(c._dom_find_element_by_id(node, str_id, &el));
return el;
}
// nodeToElement is an helper to convert a node to an element.
pub fn nodeToElement(node: *Node) *Element {
pub inline fn nodeToElement(node: *Node) *Element {
return @as(*Element, @ptrCast(node));
}
// nodeToDocument is an helper to convert a node to an document.
pub fn nodeToDocument(node: *Node) *Document {
pub inline fn nodeToDocument(node: *Node) *Document {
return @as(*Document, @ptrCast(node));
}
// Combination of nodeToElement + elementTag
pub fn nodeHTMLGetTagType(node: *Node) !?Tag {
if (nodeType(node) != .element) {
if (try nodeType(node) != .element) {
return null;
}
@@ -1485,14 +1493,14 @@ fn characterDataVtable(data: *CharacterData) c.dom_characterdata_vtable {
return getVtable(c.dom_characterdata_vtable, CharacterData, data);
}
pub fn characterDataToNode(cdata: *CharacterData) *Node {
pub inline fn characterDataToNode(cdata: *CharacterData) *Node {
return @as(*Node, @ptrCast(@alignCast(cdata)));
}
pub fn characterDataData(cdata: *CharacterData) []const u8 {
pub fn characterDataData(cdata: *CharacterData) ![]const u8 {
var s: ?*String = null;
const err = characterDataVtable(cdata).dom_characterdata_get_data.?(cdata, &s);
std.debug.assert(err == c.DOM_NO_ERR);
try DOMErr(err);
return strToData(s.?);
}
@@ -1505,7 +1513,7 @@ pub fn characterDataSetData(cdata: *CharacterData, data: []const u8) !void {
pub fn characterDataLength(cdata: *CharacterData) !u32 {
var n: u32 = undefined;
const err = characterDataVtable(cdata).dom_characterdata_get_length.?(cdata, &n);
std.debug.assert(err == c.DOM_NO_ERR);
try DOMErr(err);
return n;
}
@@ -1570,7 +1578,7 @@ pub const Comment = c.dom_comment;
pub const ProcessingInstruction = c.dom_processing_instruction;
// processingInstructionToNode is an helper to convert an ProcessingInstruction to a node.
pub fn processingInstructionToNode(pi: *ProcessingInstruction) *Node {
pub inline fn processingInstructionToNode(pi: *ProcessingInstruction) *Node {
return @as(*Node, @ptrCast(@alignCast(pi)));
}
@@ -1625,7 +1633,7 @@ pub fn attributeGetOwnerElement(a: *Attribute) !?*Element {
}
// attributeToNode is an helper to convert an attribute to a node.
pub fn attributeToNode(a: *Attribute) *Node {
pub inline fn attributeToNode(a: *Attribute) *Node {
return @as(*Node, @ptrCast(@alignCast(a)));
}
@@ -1787,7 +1795,7 @@ pub fn elementHasClass(elem: *Element, class: []const u8) !bool {
}
// elementToNode is an helper to convert an element to a node.
pub fn elementToNode(e: *Element) *Node {
pub inline fn elementToNode(e: *Element) *Node {
return @as(*Node, @ptrCast(@alignCast(e)));
}
@@ -1856,14 +1864,14 @@ fn elementHTMLVtable(elem_html: *ElementHTML) c.dom_html_element_vtable {
// HTMLScriptElement
// scriptToElt is an helper to convert an script to an element.
pub fn scriptToElt(s: *Script) *Element {
pub inline fn scriptToElt(s: *Script) *Element {
return @as(*Element, @ptrCast(@alignCast(s)));
}
// HTMLAnchorElement
// anchorToNode is an helper to convert an anchor to a node.
pub fn anchorToNode(a: *Anchor) *Node {
pub inline fn anchorToNode(a: *Anchor) *Node {
return @as(*Node, @ptrCast(@alignCast(a)));
}
@@ -1934,19 +1942,6 @@ pub fn anchorSetRel(a: *Anchor, rel: []const u8) !void {
// HTMLLinkElement
pub fn linkGetRel(link: *Link) ![]const u8 {
var res: ?*String = null;
const err = c.dom_html_link_element_get_rel(link, &res);
try DOMErr(err);
if (res == null) return "";
return strToData(res.?);
}
pub fn linkSetRel(link: *Link, rel: []const u8) !void {
const err = c.dom_html_link_element_set_rel(link, try strFromData(rel));
return DOMErr(err);
}
pub fn linkGetHref(link: *Link) ![]const u8 {
var res: ?*String = null;
const err = c.dom_html_link_element_get_href(link, &res);
@@ -2037,7 +2032,7 @@ pub const OptionCollection = c.dom_html_options_collection;
// Document Fragment
pub const DocumentFragment = c.dom_document_fragment;
pub fn documentFragmentToNode(doc: *DocumentFragment) *Node {
pub inline fn documentFragmentToNode(doc: *DocumentFragment) *Node {
return @as(*Node, @ptrCast(@alignCast(doc)));
}
@@ -2068,29 +2063,29 @@ fn documentTypeVtable(dt: *DocumentType) c.dom_document_type_vtable {
return getVtable(c.dom_document_type_vtable, DocumentType, dt);
}
pub fn documentTypeGetName(dt: *DocumentType) ![]const u8 {
pub inline fn documentTypeGetName(dt: *DocumentType) ![]const u8 {
var s: ?*String = null;
const err = documentTypeVtable(dt).dom_document_type_get_name.?(dt, &s);
try DOMErr(err);
return strToData(s.?);
}
pub fn documentTypeGetPublicId(dt: *DocumentType) []const u8 {
pub inline fn documentTypeGetPublicId(dt: *DocumentType) ![]const u8 {
var s: ?*String = null;
const err = documentTypeVtable(dt).dom_document_type_get_public_id.?(dt, &s);
std.debug.assert(err == c.DOM_NO_ERR);
try DOMErr(err);
return strToData(s.?);
}
pub fn documentTypeGetSystemId(dt: *DocumentType) []const u8 {
pub inline fn documentTypeGetSystemId(dt: *DocumentType) ![]const u8 {
var s: ?*String = null;
const err = documentTypeVtable(dt).dom_document_type_get_system_id.?(dt, &s);
std.debug.assert(err == c.DOM_NO_ERR);
try DOMErr(err);
return strToData(s.?);
}
// DOMImplementation
pub fn domImplementationCreateDocument(
pub inline fn domImplementationCreateDocument(
namespace: ?[:0]const u8,
qname: ?[:0]const u8,
doctype: ?*DocumentType,
@@ -2120,7 +2115,7 @@ pub fn domImplementationCreateDocument(
return doc.?;
}
pub fn domImplementationCreateDocumentType(
pub inline fn domImplementationCreateDocumentType(
qname: [:0]const u8,
publicId: [:0]const u8,
systemId: [:0]const u8,
@@ -2131,7 +2126,7 @@ pub fn domImplementationCreateDocumentType(
return dt.?;
}
pub fn domImplementationCreateHTMLDocument(title: ?[]const u8) !*DocumentHTML {
pub inline fn domImplementationCreateHTMLDocument(title: ?[]const u8) !*DocumentHTML {
const doc_html = try documentCreateDocument(title);
const doc = documentHTMLToDocument(doc_html);
@@ -2162,18 +2157,18 @@ fn documentVtable(doc: *Document) c.dom_document_vtable {
return getVtable(c.dom_document_vtable, Document, doc);
}
pub fn documentToNode(doc: *Document) *Node {
pub inline fn documentToNode(doc: *Document) *Node {
return @as(*Node, @ptrCast(@alignCast(doc)));
}
pub fn documentGetElementById(doc: *Document, id: []const u8) !?*Element {
pub inline fn documentGetElementById(doc: *Document, id: []const u8) !?*Element {
var elem: ?*Element = null;
const err = documentVtable(doc).dom_document_get_element_by_id.?(doc, try strFromData(id), &elem);
try DOMErr(err);
return elem;
}
pub fn documentGetElementsByTagName(doc: *Document, tagname: []const u8) !*NodeList {
pub inline fn documentGetElementsByTagName(doc: *Document, tagname: []const u8) !*NodeList {
var nlist: ?*NodeList = null;
const err = documentVtable(doc).dom_document_get_elements_by_tag_name.?(doc, try strFromData(tagname), &nlist);
try DOMErr(err);
@@ -2181,7 +2176,7 @@ pub fn documentGetElementsByTagName(doc: *Document, tagname: []const u8) !*NodeL
}
// documentGetDocumentElement returns the root document element.
pub fn documentGetDocumentElement(doc: *Document) !?*Element {
pub inline fn documentGetDocumentElement(doc: *Document) !?*Element {
var elem: ?*Element = null;
const err = documentVtable(doc).dom_document_get_document_element.?(doc, &elem);
try DOMErr(err);
@@ -2189,7 +2184,7 @@ pub fn documentGetDocumentElement(doc: *Document) !?*Element {
return elem.?;
}
pub fn documentGetDocumentURI(doc: *Document) ![]const u8 {
pub inline fn documentGetDocumentURI(doc: *Document) ![]const u8 {
var s: ?*String = null;
const err = documentVtable(doc).dom_document_get_uri.?(doc, &s);
try DOMErr(err);
@@ -2201,19 +2196,19 @@ pub fn documentSetDocumentURI(doc: *Document, uri: []const u8) !void {
try DOMErr(err);
}
pub fn documentGetInputEncoding(doc: *Document) ![]const u8 {
pub inline fn documentGetInputEncoding(doc: *Document) ![]const u8 {
var s: ?*String = null;
const err = documentVtable(doc).dom_document_get_input_encoding.?(doc, &s);
try DOMErr(err);
return strToData(s.?);
}
pub fn documentSetInputEncoding(doc: *Document, enc: []const u8) !void {
pub inline fn documentSetInputEncoding(doc: *Document, enc: []const u8) !void {
const err = documentVtable(doc).dom_document_set_input_encoding.?(doc, try strFromData(enc));
try DOMErr(err);
}
pub fn documentCreateDocument(title: ?[]const u8) !*DocumentHTML {
pub inline fn documentCreateDocument(title: ?[]const u8) !*DocumentHTML {
var doc: ?*Document = null;
const err = c.dom_implementation_create_document(
c.DOM_IMPLEMENTATION_HTML,
@@ -2280,42 +2275,42 @@ pub fn documentCreateElementNS(doc: *Document, ns: []const u8, tag_name: []const
return elem.?;
}
pub fn documentGetDoctype(doc: *Document) !?*DocumentType {
pub inline fn documentGetDoctype(doc: *Document) !?*DocumentType {
var dt: ?*DocumentType = null;
const err = documentVtable(doc).dom_document_get_doctype.?(doc, &dt);
try DOMErr(err);
return dt;
}
pub fn documentCreateDocumentFragment(doc: *Document) !*DocumentFragment {
pub inline fn documentCreateDocumentFragment(doc: *Document) !*DocumentFragment {
var df: ?*DocumentFragment = null;
const err = documentVtable(doc).dom_document_create_document_fragment.?(doc, &df);
try DOMErr(err);
return df.?;
}
pub fn documentCreateTextNode(doc: *Document, s: []const u8) !*Text {
pub inline fn documentCreateTextNode(doc: *Document, s: []const u8) !*Text {
var txt: ?*Text = null;
const err = documentVtable(doc).dom_document_create_text_node.?(doc, try strFromData(s), &txt);
try DOMErr(err);
return txt.?;
}
pub fn documentCreateCDATASection(doc: *Document, s: []const u8) !*CDATASection {
pub inline fn documentCreateCDATASection(doc: *Document, s: []const u8) !*CDATASection {
var cdata: ?*CDATASection = null;
const err = documentVtable(doc).dom_document_create_cdata_section.?(doc, try strFromData(s), &cdata);
try DOMErr(err);
return cdata.?;
}
pub fn documentCreateComment(doc: *Document, s: []const u8) !*Comment {
pub inline fn documentCreateComment(doc: *Document, s: []const u8) !*Comment {
var com: ?*Comment = null;
const err = documentVtable(doc).dom_document_create_comment.?(doc, try strFromData(s), &com);
try DOMErr(err);
return com.?;
}
pub fn documentCreateProcessingInstruction(doc: *Document, target: []const u8, data: []const u8) !*ProcessingInstruction {
pub inline fn documentCreateProcessingInstruction(doc: *Document, target: []const u8, data: []const u8) !*ProcessingInstruction {
var pi: ?*ProcessingInstruction = null;
const err = documentVtable(doc).dom_document_create_processing_instruction.?(
doc,
@@ -2327,7 +2322,7 @@ pub fn documentCreateProcessingInstruction(doc: *Document, target: []const u8, d
return pi.?;
}
pub fn documentImportNode(doc: *Document, node: *Node, deep: bool) !*Node {
pub inline fn documentImportNode(doc: *Document, node: *Node, deep: bool) !*Node {
var res: NodeExternal = undefined;
const nodeext = toNodeExternal(Node, node);
const err = documentVtable(doc).dom_document_import_node.?(doc, nodeext, deep, &res);
@@ -2335,7 +2330,7 @@ pub fn documentImportNode(doc: *Document, node: *Node, deep: bool) !*Node {
return @as(*Node, @ptrCast(@alignCast(res)));
}
pub fn documentAdoptNode(doc: *Document, node: *Node) !*Node {
pub inline fn documentAdoptNode(doc: *Document, node: *Node) !*Node {
var res: NodeExternal = undefined;
const nodeext = toNodeExternal(Node, node);
const err = documentVtable(doc).dom_document_adopt_node.?(doc, nodeext, &res);
@@ -2343,14 +2338,14 @@ pub fn documentAdoptNode(doc: *Document, node: *Node) !*Node {
return @as(*Node, @ptrCast(@alignCast(res)));
}
pub fn documentCreateAttribute(doc: *Document, name: []const u8) !*Attribute {
pub inline fn documentCreateAttribute(doc: *Document, name: []const u8) !*Attribute {
var attr: ?*Attribute = null;
const err = documentVtable(doc).dom_document_create_attribute.?(doc, try strFromData(name), &attr);
try DOMErr(err);
return attr.?;
}
pub fn documentCreateAttributeNS(doc: *Document, ns: []const u8, qname: []const u8) !*Attribute {
pub inline fn documentCreateAttributeNS(doc: *Document, ns: []const u8, qname: []const u8) !*Attribute {
var attr: ?*Attribute = null;
const err = documentVtable(doc).dom_document_create_attribute_ns.?(
doc,
@@ -2374,7 +2369,7 @@ pub fn documentSetScriptAddedCallback(
pub const DocumentHTML = c.dom_html_document;
// documentHTMLToNode is an helper to convert a documentHTML to an node.
pub fn documentHTMLToNode(doc: *DocumentHTML) *Node {
pub inline fn documentHTMLToNode(doc: *DocumentHTML) *Node {
return @as(*Node, @ptrCast(@alignCast(doc)));
}
@@ -2524,11 +2519,11 @@ pub fn documentHTMLClose(doc: *DocumentHTML) !void {
try DOMErr(err);
}
pub fn documentHTMLToDocument(doc_html: *DocumentHTML) *Document {
pub inline fn documentHTMLToDocument(doc_html: *DocumentHTML) *Document {
return @as(*Document, @ptrCast(doc_html));
}
pub fn documentHTMLBody(doc_html: *DocumentHTML) !?*Body {
pub inline fn documentHTMLBody(doc_html: *DocumentHTML) !?*Body {
var body: ?*ElementHTML = null;
const err = documentHTMLVtable(doc_html).get_body.?(doc_html, &body);
try DOMErr(err);
@@ -2536,16 +2531,16 @@ pub fn documentHTMLBody(doc_html: *DocumentHTML) !?*Body {
return @as(*Body, @ptrCast(body.?));
}
pub fn bodyToElement(body: *Body) *Element {
pub inline fn bodyToElement(body: *Body) *Element {
return @as(*Element, @ptrCast(@alignCast(body)));
}
pub fn documentHTMLSetBody(doc_html: *DocumentHTML, elt: ?*ElementHTML) !void {
pub inline fn documentHTMLSetBody(doc_html: *DocumentHTML, elt: ?*ElementHTML) !void {
const err = documentHTMLVtable(doc_html).set_body.?(doc_html, elt);
try DOMErr(err);
}
pub fn documentHTMLGetReferrer(doc: *DocumentHTML) ![]const u8 {
pub inline fn documentHTMLGetReferrer(doc: *DocumentHTML) ![]const u8 {
var s: ?*String = null;
const err = documentHTMLVtable(doc).get_referrer.?(doc, &s);
try DOMErr(err);
@@ -2553,7 +2548,7 @@ pub fn documentHTMLGetReferrer(doc: *DocumentHTML) ![]const u8 {
return strToData(s.?);
}
pub fn documentHTMLGetTitle(doc: *DocumentHTML) ![]const u8 {
pub inline fn documentHTMLGetTitle(doc: *DocumentHTML) ![]const u8 {
var s: ?*String = null;
const err = documentHTMLVtable(doc).get_title.?(doc, &s);
try DOMErr(err);
@@ -2561,7 +2556,7 @@ pub fn documentHTMLGetTitle(doc: *DocumentHTML) ![]const u8 {
return strToData(s.?);
}
pub fn documentHTMLSetTitle(doc: *DocumentHTML, v: []const u8) !void {
pub inline fn documentHTMLSetTitle(doc: *DocumentHTML, v: []const u8) !void {
const err = documentHTMLVtable(doc).set_title.?(doc, try strFromData(v));
try DOMErr(err);
}

View File

@@ -181,7 +181,7 @@ pub const Page = struct {
// set to include element shadowroots in the dump
page: ?*const Page = null,
with_base: bool = false,
strip_mode: Dump.Opts.StripMode = .{},
exclude_scripts: bool = false,
};
// dump writes the page content into the given file.
@@ -198,12 +198,12 @@ pub const Page = struct {
// returns the <pre> element from the HTML
const doc = parser.documentHTMLToDocument(self.window.document);
const list = try parser.documentGetElementsByTagName(doc, "pre");
const pre = parser.nodeListItem(list, 0) orelse return error.InvalidHTML;
const pre = try parser.nodeListItem(list, 0) orelse return error.InvalidHTML;
const walker = Walker{};
var next: ?*parser.Node = null;
while (true) {
next = try walker.get_next(pre, next) orelse break;
const v = parser.nodeTextContent(next.?) orelse return;
const v = try parser.nodeTextContent(next.?) orelse return;
try out.writeAll(v);
}
return;
@@ -228,7 +228,7 @@ pub const Page = struct {
try Dump.writeHTML(doc, .{
.page = opts.page,
.strip_mode = opts.strip_mode,
.exclude_scripts = opts.exclude_scripts,
}, out);
}
@@ -241,7 +241,7 @@ pub const Page = struct {
// find <head> tag
const list = try parser.documentGetElementsByTagName(doc, "head");
const head = parser.nodeListItem(list, 0) orelse return;
const head = try parser.nodeListItem(list, 0) orelse return;
const base = try parser.documentCreateElement(doc, "base");
try parser.elementSetAttribute(base, "href", self.url.raw);
@@ -312,12 +312,6 @@ pub const Page = struct {
// mode with an extra socket. Either way, we're waiting
// for http traffic
if (try http_client.tick(ms_remaining) == .extra_socket) {
// exit_when_done is explicitly set when there isn't
// an extra socket, so it should not be possibl to
// get an extra_socket message when exit_when_done
// is true.
std.debug.assert(exit_when_done == false);
// data on a socket we aren't handling, return to caller
return .extra_socket;
}
@@ -353,8 +347,11 @@ pub const Page = struct {
std.debug.assert(http_client.intercepted == 0);
const ms = ms_to_next_task orelse blk: {
const min_wait = if (comptime builtin.is_test) 50 else 100;
if (wait_ms - ms_remaining < min_wait) {
// TODO: when jsRunner is fully replaced with the
// htmlRunner, we can remove the first part of this
// condition. jsRunner calls `page.wait` far too
// often to enforce this.
if (wait_ms > 100 and wait_ms - ms_remaining < 100) {
// Look, we want to exit ASAP, but we don't want
// to exit so fast that we've run none of the
// background jobs.
@@ -551,7 +548,7 @@ pub const Page = struct {
const owned_url = try self.arena.dupeZ(u8, request_url);
self.url = try URL.parse(owned_url, null);
var headers = try self.http_client.newHeaders();
var headers = try Http.Headers.init();
if (opts.header) |hdr| try headers.add(hdr);
try self.requestCookie(.{ .is_navigation = true }).headersForRequest(self.arena, owned_url, &headers);
@@ -1083,9 +1080,9 @@ pub const Page = struct {
try self.navigateFromWebAPI(action, opts);
}
pub fn isNodeAttached(self: *const Page, node: *parser.Node) bool {
pub fn isNodeAttached(self: *const Page, node: *parser.Node) !bool {
const root = parser.documentToNode(parser.documentHTMLToDocument(self.window.document));
return root == parser.nodeGetRootNode(node);
return root == try parser.nodeGetRootNode(node);
}
fn elementSubmitForm(self: *Page, element: *parser.Element) !void {

View File

@@ -22,6 +22,7 @@ const Allocator = std.mem.Allocator;
const Env = @import("env.zig").Env;
const Page = @import("page.zig").Page;
const URL = @import("../url.zig").URL;
const Browser = @import("browser.zig").Browser;
const NavigateOpts = @import("page.zig").NavigateOpts;
@@ -93,7 +94,7 @@ pub const Session = struct {
// Start netsurf memory arena.
// We need to init this early as JS event handlers may be registered through Runtime.evaluate before the first html doc is loaded
parser.init();
try parser.init();
const page_arena = &self.browser.page_arena;
_ = page_arena.reset(.{ .retain_with_limit = 1 * 1024 * 1024 });

View File

@@ -18,6 +18,8 @@
const std = @import("std");
const DOMError = @import("../netsurf.zig").DOMError;
pub const cookie = @import("cookie.zig");
pub const Cookie = cookie.Cookie;
pub const CookieJar = cookie.Jar;

View File

@@ -78,15 +78,11 @@ const QueueingStrategy = struct {
pub fn constructor(underlying: ?UnderlyingSource, _strategy: ?QueueingStrategy, page: *Page) !*ReadableStream {
const strategy: QueueingStrategy = _strategy orelse .{};
const cancel_resolver = try page.main_context.createPersistentPromiseResolver(.self);
const closed_resolver = try page.main_context.createPersistentPromiseResolver(.self);
const cancel_resolver = page.main_context.createPersistentPromiseResolver();
const closed_resolver = page.main_context.createPersistentPromiseResolver();
const stream = try page.arena.create(ReadableStream);
stream.* = ReadableStream{
.cancel_resolver = cancel_resolver,
.closed_resolver = closed_resolver,
.strategy = strategy,
};
stream.* = ReadableStream{ .cancel_resolver = cancel_resolver, .closed_resolver = closed_resolver, .strategy = strategy };
const controller = ReadableStreamDefaultController{ .stream = stream };
@@ -112,7 +108,10 @@ pub fn constructor(underlying: ?UnderlyingSource, _strategy: ?QueueingStrategy,
pub fn destructor(self: *ReadableStream) void {
self.cancel_resolver.deinit();
self.closed_resolver.deinit();
// reader resolver is scoped to the page lifetime and is cleaned up by it.
if (self.reader_resolver) |*rr| {
rr.deinit();
}
}
pub fn get_locked(self: *const ReadableStream) bool {

View File

@@ -40,6 +40,7 @@ pub fn _close(self: *ReadableStreamDefaultController, _reason: ?[]const u8, page
// Resolve the Reader Promise
if (self.stream.reader_resolver) |*rr| {
defer rr.deinit();
try rr.resolve(ReadableStreamReadResult{ .value = .empty, .done = true });
self.stream.reader_resolver = null;
}
@@ -61,6 +62,7 @@ pub fn _enqueue(self: *ReadableStreamDefaultController, chunk: []const u8, page:
const duped_chunk = try page.arena.dupe(u8, chunk);
if (self.stream.reader_resolver) |*rr| {
defer rr.deinit();
try rr.resolve(ReadableStreamReadResult{ .value = .{ .data = duped_chunk }, .done = false });
self.stream.reader_resolver = null;
}
@@ -73,6 +75,7 @@ pub fn _error(self: *ReadableStreamDefaultController, err: Env.JsObject) !void {
self.stream.state = .{ .errored = err };
if (self.stream.reader_resolver) |*rr| {
defer rr.deinit();
try rr.reject(err);
self.stream.reader_resolver = null;
}

View File

@@ -56,7 +56,7 @@ pub fn _read(self: *const ReadableStreamDefaultReader, page: *Page) !Env.Promise
if (self.stream.reader_resolver) |rr| {
return rr.promise();
} else {
const persistent_resolver = try page.main_context.createPersistentPromiseResolver(.page);
const persistent_resolver = page.main_context.createPersistentPromiseResolver();
self.stream.reader_resolver = persistent_resolver;
return persistent_resolver.promise();
}

View File

@@ -23,6 +23,7 @@ const parser = @import("../netsurf.zig");
const Env = @import("../env.zig").Env;
const Page = @import("../page.zig").Page;
const FormData = @import("../xhr/form_data.zig").FormData;
const HTMLElement = @import("../html/elements.zig").HTMLElement;
const kv = @import("../key_value.zig");
const iterator = @import("../iterator/iterator.zig");

View File

@@ -31,6 +31,7 @@ const Mime = @import("../mime.zig").Mime;
const parser = @import("../netsurf.zig");
const Page = @import("../page.zig").Page;
const Http = @import("../../http/Http.zig");
const CookieJar = @import("../storage/storage.zig").CookieJar;
// XHR interfaces
// https://xhr.spec.whatwg.org/#interface-xmlhttprequest
@@ -369,7 +370,7 @@ pub const XMLHttpRequest = struct {
}
}
var headers = try page.http_client.newHeaders();
var headers = try Http.Headers.init();
for (self.headers.items) |hdr| {
try headers.add(hdr);
}

View File

@@ -35,7 +35,7 @@ pub const XMLSerializer = struct {
pub fn _serializeToString(_: *const XMLSerializer, root: *parser.Node, page: *Page) ![]const u8 {
var aw = std.Io.Writer.Allocating.init(page.call_arena);
switch (parser.nodeType(root)) {
switch (try parser.nodeType(root)) {
.document => try dump.writeHTML(@as(*parser.Document, @ptrCast(root)), .{}, &aw.writer),
.document_type => try dump.writeDocType(@as(*parser.DocumentType, @ptrCast(root)), &aw.writer),
else => try dump.writeNode(root, .{}, &aw.writer),

View File

@@ -42,7 +42,7 @@ pub const Registry = struct {
pub fn init(allocator: Allocator) Registry {
return .{
.node_id = 1,
.node_id = 0,
.lookup_by_id = .{},
.lookup_by_node = .{},
.allocator = allocator,
@@ -243,13 +243,13 @@ pub const Writer = struct {
fn writeChildren(self: *const Writer, node: *const Node, depth: usize, w: anytype) anyerror!usize {
var registry = self.registry;
const child_nodes = try parser.nodeGetChildNodes(node._node);
const child_count = parser.nodeListLength(child_nodes);
const child_count = try parser.nodeListLength(child_nodes);
const full_child = self.depth < 0 or self.depth < depth;
var i: usize = 0;
try w.beginArray();
for (0..child_count) |_| {
const child = (parser.nodeListItem(child_nodes, @intCast(i))) orelse break;
const child = (try parser.nodeListItem(child_nodes, @intCast(i))) orelse break;
const child_node = try registry.register(child);
if (full_child) {
try self.toJSON(child_node, depth + 1, w);
@@ -275,7 +275,7 @@ pub const Writer = struct {
const n = node._node;
if (parser.nodeParentNode(n)) |p| {
if (try parser.nodeParentNode(n)) |p| {
const parent_node = try self.registry.register(p);
try w.objectField("parentId");
try w.write(parent_node.id);
@@ -295,7 +295,7 @@ pub const Writer = struct {
}
try w.objectField("nodeType");
try w.write(@intFromEnum(parser.nodeType(n)));
try w.write(@intFromEnum(try parser.nodeType(n)));
try w.objectField("nodeName");
try w.write(try parser.nodeName(n));
@@ -304,12 +304,12 @@ pub const Writer = struct {
try w.write(try parser.nodeLocalName(n));
try w.objectField("nodeValue");
try w.write((parser.nodeValue(n)) orelse "");
try w.write((try parser.nodeValue(n)) orelse "");
if (include_child_count) {
try w.objectField("childNodeCount");
const child_nodes = try parser.nodeGetChildNodes(n);
try w.write(parser.nodeListLength(child_nodes));
try w.write(try parser.nodeListLength(child_nodes));
}
try w.objectField("documentURL");
@@ -331,7 +331,7 @@ pub const Writer = struct {
const testing = @import("testing.zig");
test "cdp Node: Registry register" {
parser.init();
try parser.init();
defer parser.deinit();
var registry = Registry.init(testing.allocator);
@@ -346,6 +346,18 @@ test "cdp Node: Registry register" {
{
const n = (try doc.querySelector("#a1")).?;
const node = try registry.register(n);
const n1b = registry.lookup_by_id.get(0).?;
const n1c = registry.lookup_by_node.get(node._node).?;
try testing.expectEqual(node, n1b);
try testing.expectEqual(node, n1c);
try testing.expectEqual(0, node.id);
try testing.expectEqual(n, node._node);
}
{
const n = (try doc.querySelector("p")).?;
const node = try registry.register(n);
const n1b = registry.lookup_by_id.get(1).?;
const n1c = registry.lookup_by_node.get(node._node).?;
try testing.expectEqual(node, n1b);
@@ -354,22 +366,10 @@ test "cdp Node: Registry register" {
try testing.expectEqual(1, node.id);
try testing.expectEqual(n, node._node);
}
{
const n = (try doc.querySelector("p")).?;
const node = try registry.register(n);
const n1b = registry.lookup_by_id.get(2).?;
const n1c = registry.lookup_by_node.get(node._node).?;
try testing.expectEqual(node, n1b);
try testing.expectEqual(node, n1c);
try testing.expectEqual(2, node.id);
try testing.expectEqual(n, node._node);
}
}
test "cdp Node: search list" {
parser.init();
try parser.init();
defer parser.deinit();
var registry = Registry.init(testing.allocator);
@@ -404,18 +404,18 @@ test "cdp Node: search list" {
const s1 = try search_list.create(try doc.querySelectorAll("a"));
try testing.expectEqual("1", s1.name);
try testing.expectEqualSlices(u32, &.{ 1, 2 }, s1.node_ids);
try testing.expectEqualSlices(u32, &.{ 0, 1 }, s1.node_ids);
try testing.expectEqual(2, registry.lookup_by_id.count());
try testing.expectEqual(2, registry.lookup_by_node.count());
const s2 = try search_list.create(try doc.querySelectorAll("#a1"));
try testing.expectEqual("2", s2.name);
try testing.expectEqualSlices(u32, &.{1}, s2.node_ids);
try testing.expectEqualSlices(u32, &.{0}, s2.node_ids);
const s3 = try search_list.create(try doc.querySelectorAll("#a2"));
try testing.expectEqual("3", s3.name);
try testing.expectEqualSlices(u32, &.{2}, s3.node_ids);
try testing.expectEqualSlices(u32, &.{1}, s3.node_ids);
try testing.expectEqual(2, registry.lookup_by_id.count());
try testing.expectEqual(2, registry.lookup_by_node.count());
@@ -423,7 +423,7 @@ test "cdp Node: search list" {
}
test "cdp Node: Writer" {
parser.init();
try parser.init();
defer parser.deinit();
var registry = Registry.init(testing.allocator);
@@ -443,8 +443,8 @@ test "cdp Node: Writer" {
defer testing.allocator.free(json);
try testing.expectJson(.{
.nodeId = 1,
.backendNodeId = 1,
.nodeId = 0,
.backendNodeId = 0,
.nodeType = 9,
.nodeName = "#document",
.localName = "",
@@ -456,8 +456,8 @@ test "cdp Node: Writer" {
.compatibilityMode = "NoQuirksMode",
.childNodeCount = 1,
.children = &.{.{
.nodeId = 2,
.backendNodeId = 2,
.nodeId = 1,
.backendNodeId = 1,
.nodeType = 1,
.nodeName = "HTML",
.localName = "html",
@@ -473,7 +473,7 @@ test "cdp Node: Writer" {
}
{
const node = registry.lookup_by_id.get(2).?;
const node = registry.lookup_by_id.get(1).?;
const json = try std.json.Stringify.valueAlloc(testing.allocator, Writer{
.root = node,
.depth = 1,
@@ -483,8 +483,8 @@ test "cdp Node: Writer" {
defer testing.allocator.free(json);
try testing.expectJson(.{
.nodeId = 2,
.backendNodeId = 2,
.nodeId = 1,
.backendNodeId = 1,
.nodeType = 1,
.nodeName = "HTML",
.localName = "html",
@@ -496,8 +496,8 @@ test "cdp Node: Writer" {
.compatibilityMode = "NoQuirksMode",
.isScrollable = false,
.children = &.{ .{
.nodeId = 3,
.backendNodeId = 3,
.nodeId = 2,
.backendNodeId = 2,
.nodeType = 1,
.nodeName = "HEAD",
.localName = "head",
@@ -508,10 +508,10 @@ test "cdp Node: Writer" {
.xmlVersion = "",
.compatibilityMode = "NoQuirksMode",
.isScrollable = false,
.parentId = 2,
.parentId = 1,
}, .{
.nodeId = 4,
.backendNodeId = 4,
.nodeId = 3,
.backendNodeId = 3,
.nodeType = 1,
.nodeName = "BODY",
.localName = "body",
@@ -522,13 +522,13 @@ test "cdp Node: Writer" {
.xmlVersion = "",
.compatibilityMode = "NoQuirksMode",
.isScrollable = false,
.parentId = 2,
.parentId = 1,
} },
}, json);
}
{
const node = registry.lookup_by_id.get(2).?;
const node = registry.lookup_by_id.get(1).?;
const json = try std.json.Stringify.valueAlloc(testing.allocator, Writer{
.root = node,
.depth = -1,
@@ -538,8 +538,8 @@ test "cdp Node: Writer" {
defer testing.allocator.free(json);
try testing.expectJson(&.{ .{
.nodeId = 3,
.backendNodeId = 3,
.nodeId = 2,
.backendNodeId = 2,
.nodeType = 1,
.nodeName = "HEAD",
.localName = "head",
@@ -550,10 +550,10 @@ test "cdp Node: Writer" {
.xmlVersion = "",
.compatibilityMode = "NoQuirksMode",
.isScrollable = false,
.parentId = 2,
.parentId = 1,
}, .{
.nodeId = 4,
.backendNodeId = 4,
.nodeId = 3,
.backendNodeId = 3,
.nodeType = 1,
.nodeName = "BODY",
.localName = "body",
@@ -565,20 +565,20 @@ test "cdp Node: Writer" {
.compatibilityMode = "NoQuirksMode",
.isScrollable = false,
.children = &.{ .{
.nodeId = 5,
.nodeId = 4,
.localName = "a",
.childNodeCount = 0,
.parentId = 4,
.parentId = 3,
}, .{
.nodeId = 6,
.nodeId = 5,
.localName = "div",
.childNodeCount = 1,
.parentId = 4,
.parentId = 3,
.children = &.{.{
.nodeId = 7,
.nodeId = 6,
.localName = "a",
.childNodeCount = 0,
.parentId = 6,
.parentId = 5,
}},
} },
} }, json);

View File

@@ -29,6 +29,7 @@ const Page = @import("../browser/page.zig").Page;
const Inspector = @import("../browser/env.zig").Env.Inspector;
const Incrementing = @import("../id.zig").Incrementing;
const Notification = @import("../notification.zig").Notification;
const NetworkState = @import("domains/network.zig").NetworkState;
const InterceptState = @import("domains/fetch.zig").InterceptState;
const polyfill = @import("../browser/polyfill/polyfill.zig");
@@ -150,18 +151,18 @@ pub fn CDPT(comptime TypeProvider: type) type {
if (std.mem.eql(u8, input_session_id, "STARTUP")) {
is_startup = true;
} else if (self.isValidSessionId(input_session_id) == false) {
return command.sendError(-32001, "Unknown sessionId", .{});
return command.sendError(-32001, "Unknown sessionId");
}
}
if (is_startup) {
dispatchStartupCommand(&command) catch |err| {
command.sendError(-31999, @errorName(err), .{}) catch {};
command.sendError(-31999, @errorName(err)) catch {};
return err;
};
} else {
dispatchCommand(&command, input.method) catch |err| {
command.sendError(-31998, @errorName(err), .{}) catch {};
command.sendError(-31998, @errorName(err)) catch {};
return err;
};
}
@@ -330,7 +331,7 @@ pub fn BrowserContext(comptime CDP_T: type) type {
node_search_list: Node.Search.List,
inspector: Inspector,
isolated_worlds: std.ArrayListUnmanaged(IsolatedWorld),
isolated_world: ?IsolatedWorld,
http_proxy_changed: bool = false,
@@ -374,7 +375,7 @@ pub fn BrowserContext(comptime CDP_T: type) type {
.page_life_cycle_events = false, // TODO; Target based value
.node_registry = registry,
.node_search_list = undefined,
.isolated_worlds = .empty,
.isolated_world = null,
.inspector = inspector,
.notification_arena = cdp.notification_arena.allocator(),
.intercept_state = try InterceptState.init(allocator),
@@ -403,10 +404,9 @@ pub fn BrowserContext(comptime CDP_T: type) type {
// so we need to shutdown the page one first.
self.cdp.browser.closeSession();
for (self.isolated_worlds.items) |*world| {
if (self.isolated_world) |*world| {
world.deinit();
}
self.isolated_worlds.clearRetainingCapacity();
self.node_registry.deinit();
self.node_search_list.deinit();
self.cdp.browser.notification.unregisterAll(self);
@@ -427,19 +427,19 @@ pub fn BrowserContext(comptime CDP_T: type) type {
}
pub fn createIsolatedWorld(self: *Self, world_name: []const u8, grant_universal_access: bool) !*IsolatedWorld {
if (self.isolated_world != null) {
return error.CurrentlyOnly1IsolatedWorldSupported;
}
var executor = try self.cdp.browser.env.newExecutionWorld();
errdefer executor.deinit();
const owned_name = try self.arena.dupe(u8, world_name);
const world = try self.isolated_worlds.addOne(self.arena);
world.* = .{
.name = owned_name,
self.isolated_world = .{
.name = try self.arena.dupe(u8, world_name),
.executor = executor,
.grant_universal_access = grant_universal_access,
};
return world;
return &self.isolated_world.?;
}
pub fn nodeWriter(self: *Self, root: *const Node, opts: Node.Writer.Opts) Node.Writer {
@@ -682,14 +682,7 @@ const IsolatedWorld = struct {
// This also means this pointer becomes invalid after removePage untill a new page is created.
// Currently we have only 1 page/frame and thus also only 1 state in the isolate world.
pub fn createContext(self: *IsolatedWorld, page: *Page) !void {
// if (self.executor.js_context != null) return error.Only1IsolatedContextSupported;
if (self.executor.js_context != null) {
log.warn(.cdp, "not implemented", .{
.feature = "createContext: Not implemented second isolated context creation",
.info = "reuse existing context",
});
return;
}
if (self.executor.js_context != null) return error.Only1IsolatedContextSupported;
_ = try self.executor.createJsContext(
&page.window,
page,
@@ -698,14 +691,6 @@ const IsolatedWorld = struct {
Env.GlobalMissingCallback.init(&self.polyfill_loader),
);
}
pub fn createContextAndLoadPolyfills(self: *IsolatedWorld, arena: Allocator, page: *Page) !void {
// We need to recreate the isolated world context
try self.createContext(page);
const loader = @import("../browser/polyfill/polyfill.zig");
try loader.preload(arena, &self.executor.js_context.?);
}
};
// This is a generic because when we send a result we have two different
@@ -772,14 +757,10 @@ pub fn Command(comptime CDP_T: type, comptime Sender: type) type {
return self.cdp.sendEvent(method, p, opts);
}
const SendErrorOpts = struct {
include_session_id: bool = true,
};
pub fn sendError(self: *Self, code: i32, message: []const u8, opts: SendErrorOpts) !void {
pub fn sendError(self: *Self, code: i32, message: []const u8) !void {
return self.sender.sendJSON(.{
.id = self.input.id,
.@"error" = .{ .code = code, .message = message },
.sessionId = if (opts.include_session_id) self.input.session_id else null,
});
}

View File

@@ -17,7 +17,6 @@
// along with this program. If not, see <https://www.gnu.org/licenses/>.
const std = @import("std");
const log = @import("../../log.zig");
const Allocator = std.mem.Allocator;
const Node = @import("../Node.zig");
const css = @import("../../browser/dom/css.zig");
@@ -40,7 +39,6 @@ pub fn processMessage(cmd: anytype) !void {
getContentQuads,
getBoxModel,
requestChildNodes,
getFrameOwner,
}, cmd.input.action) orelse return error.UnknownMethod;
switch (action) {
@@ -57,7 +55,6 @@ pub fn processMessage(cmd: anytype) !void {
.getContentQuads => return getContentQuads(cmd),
.getBoxModel => return getBoxModel(cmd),
.requestChildNodes => return requestChildNodes(cmd),
.getFrameOwner => return getFrameOwner(cmd),
}
}
@@ -70,10 +67,6 @@ fn getDocument(cmd: anytype) !void {
};
const params = try cmd.params(Params) orelse Params{};
if (params.pierce) {
log.warn(.cdp, "not implemented", .{ .feature = "DOM.getDocument: Not implemented pierce parameter" });
}
const bc = cmd.browser_context orelse return error.BrowserContextNotLoaded;
const page = bc.session.currentPage() orelse return error.PageNotLoaded;
const doc = parser.documentHTMLToDocument(page.window.document);
@@ -122,7 +115,7 @@ fn dispatchSetChildNodes(cmd: anytype, nodes: []*parser.Node) !void {
for (nodes) |_n| {
var n = _n;
while (true) {
const p = parser.nodeParentNode(n) orelse break;
const p = try parser.nodeParentNode(n) orelse break;
// Register the node.
const node = try bc.node_registry.register(p);
@@ -151,7 +144,7 @@ fn dispatchSetChildNodes(cmd: anytype, nodes: []*parser.Node) !void {
// If the node has no parent, it's the root node.
// We don't dispatch event for it because we assume the root node is
// dispatched via the DOM.getDocument command.
const p = parser.nodeParentNode(node._node) orelse {
const p = try parser.nodeParentNode(node._node) orelse {
continue;
};
@@ -213,9 +206,7 @@ fn querySelector(cmd: anytype) !void {
const bc = cmd.browser_context orelse return error.BrowserContextNotLoaded;
const node = bc.node_registry.lookup_by_id.get(params.nodeId) orelse {
return cmd.sendError(-32000, "Could not find node with given id", .{});
};
const node = bc.node_registry.lookup_by_id.get(params.nodeId) orelse return error.UnknownNode;
const selected_node = try css.querySelector(
cmd.arena,
@@ -242,9 +233,7 @@ fn querySelectorAll(cmd: anytype) !void {
const bc = cmd.browser_context orelse return error.BrowserContextNotLoaded;
const node = bc.node_registry.lookup_by_id.get(params.nodeId) orelse {
return cmd.sendError(-32000, "Could not find node with given id", .{});
};
const node = bc.node_registry.lookup_by_id.get(params.nodeId) orelse return error.UnknownNode;
const arena = cmd.arena;
const selected_nodes = try css.querySelectorAll(arena, node._node, params.selector);
@@ -277,12 +266,10 @@ fn resolveNode(cmd: anytype) !void {
var js_context = page.main_context;
if (params.executionContextId) |context_id| {
if (js_context.v8_context.debugContextId() != context_id) {
for (bc.isolated_worlds.items) |*isolated_world| {
js_context = &(isolated_world.executor.js_context orelse return error.ContextNotFound);
if (js_context.v8_context.debugContextId() == context_id) {
break;
}
} else return error.ContextNotFound;
var isolated_world = bc.isolated_world orelse return error.ContextNotFound;
js_context = &(isolated_world.executor.js_context orelse return error.ContextNotFound);
if (js_context.v8_context.debugContextId() != context_id) return error.ContextNotFound;
}
}
@@ -313,18 +300,16 @@ fn describeNode(cmd: anytype) !void {
nodeId: ?Node.Id = null,
backendNodeId: ?Node.Id = null,
objectId: ?[]const u8 = null,
depth: i32 = 1,
depth: u32 = 1,
pierce: bool = false,
})) orelse return error.InvalidParams;
if (params.pierce) {
log.warn(.cdp, "not implemented", .{ .feature = "DOM.describeNode: Not implemented pierce parameter" });
}
if (params.depth != 1 or params.pierce) return error.NotImplemented;
const bc = cmd.browser_context orelse return error.BrowserContextNotLoaded;
const node = try getNode(cmd.arena, bc, params.nodeId, params.backendNodeId, params.objectId);
return cmd.sendResult(.{ .node = bc.nodeWriter(node, .{ .depth = params.depth }) }, .{});
return cmd.sendResult(.{ .node = bc.nodeWriter(node, .{}) }, .{});
}
// An array of quad vertices, x immediately followed by y for each point, points clock-wise.
@@ -368,7 +353,7 @@ fn scrollIntoViewIfNeeded(cmd: anytype) !void {
const bc = cmd.browser_context orelse return error.BrowserContextNotLoaded;
const node = try getNode(cmd.arena, bc, params.nodeId, params.backendNodeId, params.objectId);
const node_type = parser.nodeType(node._node);
const node_type = parser.nodeType(node._node) catch return error.InvalidNode;
switch (node_type) {
.element => {},
.document => {},
@@ -410,7 +395,7 @@ fn getContentQuads(cmd: anytype) !void {
// visibility: hidden
// display: none
if (parser.nodeType(node._node) != .element) return error.NodeIsNotAnElement;
if (try parser.nodeType(node._node) != .element) return error.NodeIsNotAnElement;
// TODO implement for document or text
// Most likely document would require some hierachgy in the renderer. It is left unimplemented till we have a good example.
// Text may be tricky, multiple quads in case of multiple lines? empty quads of text = ""?
@@ -436,7 +421,7 @@ fn getBoxModel(cmd: anytype) !void {
const node = try getNode(cmd.arena, bc, params.nodeId, params.backendNodeId, params.objectId);
// TODO implement for document or text
if (parser.nodeType(node._node) != .element) return error.NodeIsNotAnElement;
if (try parser.nodeType(node._node) != .element) return error.NodeIsNotAnElement;
const element = parser.nodeToElement(node._node);
const rect = try Element._getBoundingClientRect(element, page);
@@ -476,24 +461,6 @@ fn requestChildNodes(cmd: anytype) !void {
return cmd.sendResult(null, .{});
}
fn getFrameOwner(cmd: anytype) !void {
const params = (try cmd.params(struct {
frameId: []const u8,
})) orelse return error.InvalidParams;
const bc = cmd.browser_context orelse return error.BrowserContextNotLoaded;
const target_id = bc.target_id orelse return error.TargetNotLoaded;
if (std.mem.eql(u8, target_id, params.frameId) == false) {
return cmd.sendError(-32000, "Frame with the given id does not belong to the target.", .{});
}
const page = bc.session.currentPage() orelse return error.PageNotLoaded;
const doc = parser.documentHTMLToDocument(page.window.document);
const node = try bc.node_registry.register(parser.documentToNode(doc));
return cmd.sendResult(.{ .nodeId = node.id, .backendNodeId = node.id }, .{});
}
const testing = @import("../testing.zig");
test "cdp.dom: getSearchResults unknown search id" {
@@ -527,7 +494,7 @@ test "cdp.dom: search flow" {
.method = "DOM.getSearchResults",
.params = .{ .searchId = "0", .fromIndex = 0, .toIndex = 2 },
});
try ctx.expectSentResult(.{ .nodeIds = &.{ 1, 2 } }, .{ .id = 13 });
try ctx.expectSentResult(.{ .nodeIds = &.{ 0, 1 } }, .{ .id = 13 });
// different fromIndex
try ctx.processMessage(.{
@@ -535,7 +502,7 @@ test "cdp.dom: search flow" {
.method = "DOM.getSearchResults",
.params = .{ .searchId = "0", .fromIndex = 1, .toIndex = 2 },
});
try ctx.expectSentResult(.{ .nodeIds = &.{2} }, .{ .id = 14 });
try ctx.expectSentResult(.{ .nodeIds = &.{1} }, .{ .id = 14 });
// different toIndex
try ctx.processMessage(.{
@@ -543,7 +510,7 @@ test "cdp.dom: search flow" {
.method = "DOM.getSearchResults",
.params = .{ .searchId = "0", .fromIndex = 0, .toIndex = 1 },
});
try ctx.expectSentResult(.{ .nodeIds = &.{1} }, .{ .id = 15 });
try ctx.expectSentResult(.{ .nodeIds = &.{0} }, .{ .id = 15 });
}
try ctx.processMessage(.{
@@ -567,19 +534,16 @@ test "cdp.dom: querySelector unknown search id" {
_ = try ctx.loadBrowserContext(.{ .id = "BID-A", .html = "<p>1</p> <p>2</p>" });
try ctx.processMessage(.{
try testing.expectError(error.UnknownNode, ctx.processMessage(.{
.id = 9,
.method = "DOM.querySelector",
.params = .{ .nodeId = 99, .selector = "" },
});
try ctx.expectSentError(-32000, "Could not find node with given id", .{});
try ctx.processMessage(.{
}));
try testing.expectError(error.UnknownNode, ctx.processMessage(.{
.id = 9,
.method = "DOM.querySelectorAll",
.params = .{ .nodeId = 99, .selector = "" },
});
try ctx.expectSentError(-32000, "Could not find node with given id", .{});
}));
}
test "cdp.dom: querySelector Node not found" {
@@ -588,7 +552,7 @@ test "cdp.dom: querySelector Node not found" {
_ = try ctx.loadBrowserContext(.{ .id = "BID-A", .html = "<p>1</p> <p>2</p>" });
try ctx.processMessage(.{ // Hacky way to make sure nodeId 1 exists in the registry
try ctx.processMessage(.{ // Hacky way to make sure nodeId 0 exists in the registry
.id = 3,
.method = "DOM.performSearch",
.params = .{ .query = "p" },
@@ -598,13 +562,13 @@ test "cdp.dom: querySelector Node not found" {
try testing.expectError(error.NodeNotFoundForGivenId, ctx.processMessage(.{
.id = 4,
.method = "DOM.querySelector",
.params = .{ .nodeId = 1, .selector = "a" },
.params = .{ .nodeId = 0, .selector = "a" },
}));
try ctx.processMessage(.{
.id = 5,
.method = "DOM.querySelectorAll",
.params = .{ .nodeId = 1, .selector = "a" },
.params = .{ .nodeId = 0, .selector = "a" },
});
try ctx.expectSentResult(.{ .nodeIds = &[_]u32{} }, .{ .id = 5 });
}
@@ -615,7 +579,7 @@ test "cdp.dom: querySelector Nodes found" {
_ = try ctx.loadBrowserContext(.{ .id = "BID-A", .html = "<div><p>2</p></div>" });
try ctx.processMessage(.{ // Hacky way to make sure nodeId 1 exists in the registry
try ctx.processMessage(.{ // Hacky way to make sure nodeId 0 exists in the registry
.id = 3,
.method = "DOM.performSearch",
.params = .{ .query = "div" },
@@ -625,18 +589,18 @@ test "cdp.dom: querySelector Nodes found" {
try ctx.processMessage(.{
.id = 4,
.method = "DOM.querySelector",
.params = .{ .nodeId = 1, .selector = "p" },
.params = .{ .nodeId = 0, .selector = "p" },
});
try ctx.expectSentEvent("DOM.setChildNodes", null, .{});
try ctx.expectSentResult(.{ .nodeId = 6 }, .{ .id = 4 });
try ctx.expectSentResult(.{ .nodeId = 5 }, .{ .id = 4 });
try ctx.processMessage(.{
.id = 5,
.method = "DOM.querySelectorAll",
.params = .{ .nodeId = 1, .selector = "p" },
.params = .{ .nodeId = 0, .selector = "p" },
});
try ctx.expectSentEvent("DOM.setChildNodes", null, .{});
try ctx.expectSentResult(.{ .nodeIds = &.{6} }, .{ .id = 5 });
try ctx.expectSentResult(.{ .nodeIds = &.{5} }, .{ .id = 5 });
}
test "cdp.dom: getBoxModel" {
@@ -645,7 +609,7 @@ test "cdp.dom: getBoxModel" {
_ = try ctx.loadBrowserContext(.{ .id = "BID-A", .html = "<div><p>2</p></div>" });
try ctx.processMessage(.{ // Hacky way to make sure nodeId 1 exists in the registry
try ctx.processMessage(.{ // Hacky way to make sure nodeId 0 exists in the registry
.id = 3,
.method = "DOM.getDocument",
});
@@ -653,14 +617,14 @@ test "cdp.dom: getBoxModel" {
try ctx.processMessage(.{
.id = 4,
.method = "DOM.querySelector",
.params = .{ .nodeId = 1, .selector = "p" },
.params = .{ .nodeId = 0, .selector = "p" },
});
try ctx.expectSentResult(.{ .nodeId = 3 }, .{ .id = 4 });
try ctx.expectSentResult(.{ .nodeId = 2 }, .{ .id = 4 });
try ctx.processMessage(.{
.id = 5,
.method = "DOM.getBoxModel",
.params = .{ .nodeId = 6 },
.params = .{ .nodeId = 5 },
});
try ctx.expectSentResult(.{ .model = BoxModel{
.content = Quad{ 0.0, 0.0, 1.0, 0.0, 1.0, 1.0, 0.0, 1.0 },

View File

@@ -19,6 +19,7 @@
const std = @import("std");
const Allocator = std.mem.Allocator;
const log = @import("../../log.zig");
const CdpStorage = @import("storage.zig");
const Transfer = @import("../../http/Client.zig").Transfer;
const Notification = @import("../../notification.zig").Notification;
@@ -243,14 +244,7 @@ pub fn httpRequestStart(arena: Allocator, bc: anytype, msg: *const Notification.
const transfer = msg.transfer;
// We're missing a bunch of fields, but, for now, this seems like enough
try bc.cdp.sendEvent("Network.requestWillBeSent", .{
.requestId = try std.fmt.allocPrint(arena, "REQ-{d}", .{transfer.id}),
.frameId = target_id,
.loaderId = bc.loader_id,
.documentUrl = DocumentUrlWriter.init(&page.url.uri),
.request = TransferAsRequestWriter.init(transfer),
.initiator = .{ .type = "other" },
}, .{ .session_id = session_id });
try bc.cdp.sendEvent("Network.requestWillBeSent", .{ .requestId = try std.fmt.allocPrint(arena, "REQ-{d}", .{transfer.id}), .frameId = target_id, .loaderId = bc.loader_id, .documentUrl = DocumentUrlWriter.init(&page.url.uri), .request = TransferAsRequestWriter.init(transfer) }, .{ .session_id = session_id });
}
pub fn httpResponseHeaderDone(arena: Allocator, bc: anytype, msg: *const Notification.ResponseHeaderDone) !void {

View File

@@ -17,6 +17,7 @@
// along with this program. If not, see <https://www.gnu.org/licenses/>.
const std = @import("std");
const URL = @import("../../url.zig").URL;
const Page = @import("../../browser/page.zig").Page;
const Notification = @import("../../notification.zig").Notification;
@@ -121,7 +122,7 @@ fn createIsolatedWorld(cmd: anytype) !void {
const world = try bc.createIsolatedWorld(params.worldName, params.grantUniveralAccess);
const page = bc.session.currentPage() orelse return error.PageNotLoaded;
try world.createContextAndLoadPolyfills(bc.arena, page);
try pageCreated(bc, page);
const js_context = &world.executor.js_context.?;
// Create the auxdata json for the contextCreated event
@@ -258,7 +259,7 @@ pub fn pageNavigate(arena: Allocator, bc: anytype, event: *const Notification.Pa
true,
);
}
for (bc.isolated_worlds.items) |*isolated_world| {
if (bc.isolated_world) |*isolated_world| {
const aux_json = try std.fmt.allocPrint(arena, "{{\"isDefault\":false,\"type\":\"isolated\",\"frameId\":\"{s}\"}}", .{target_id});
// Calling contextCreated will assign a new Id to the context and send the contextCreated event
bc.inspector.contextCreated(
@@ -273,14 +274,18 @@ pub fn pageNavigate(arena: Allocator, bc: anytype, event: *const Notification.Pa
pub fn pageRemove(bc: anytype) !void {
// The main page is going to be removed, we need to remove contexts from other worlds first.
for (bc.isolated_worlds.items) |*isolated_world| {
if (bc.isolated_world) |*isolated_world| {
try isolated_world.removeContext();
}
}
pub fn pageCreated(bc: anytype, page: *Page) !void {
for (bc.isolated_worlds.items) |*isolated_world| {
try isolated_world.createContextAndLoadPolyfills(bc.arena, page);
if (bc.isolated_world) |*isolated_world| {
// We need to recreate the isolated world context
try isolated_world.createContext(page);
const polyfill = @import("../../browser/polyfill/polyfill.zig");
try polyfill.preload(bc.arena, &isolated_world.executor.js_context.?);
}
}

View File

@@ -27,7 +27,6 @@ pub fn processMessage(cmd: anytype) !void {
addBinding,
callFunctionOn,
releaseObject,
getProperties,
}, cmd.input.action) orelse return error.UnknownMethod;
switch (action) {

View File

@@ -17,6 +17,7 @@
// along with this program. If not, see <https://www.gnu.org/licenses/>.
const std = @import("std");
const Allocator = std.mem.Allocator;
const log = @import("../../log.zig");
const Cookie = @import("../../browser/storage/storage.zig").Cookie;

View File

@@ -79,7 +79,7 @@ fn createBrowserContext(cmd: anytype) !void {
}
const bc = cmd.createBrowserContext() catch |err| switch (err) {
error.AlreadyExists => return cmd.sendError(-32000, "Cannot have more than one browser context at a time", .{}),
error.AlreadyExists => return cmd.sendError(-32000, "Cannot have more than one browser context at a time"),
else => return err,
};
@@ -102,7 +102,7 @@ fn disposeBrowserContext(cmd: anytype) !void {
})) orelse return error.InvalidParams;
if (cmd.cdp.disposeBrowserContext(params.browserContextId) == false) {
return cmd.sendError(-32602, "No browser context with the given id found", .{});
return cmd.sendError(-32602, "No browser context with the given id found");
}
try cmd.sendResult(null, .{});
}
@@ -241,10 +241,10 @@ fn closeTarget(cmd: anytype) !void {
}
bc.session.removePage();
for (bc.isolated_worlds.items) |*world| {
if (bc.isolated_world) |*world| {
world.deinit();
bc.isolated_world = null;
}
bc.isolated_worlds.clearRetainingCapacity();
bc.target_id = null;
}

View File

@@ -24,6 +24,7 @@ const ArenaAllocator = std.heap.ArenaAllocator;
const Testing = @This();
const main = @import("cdp.zig");
const App = @import("../app.zig").App;
const parser = @import("../browser/netsurf.zig");
const base = @import("../testing.zig");

View File

@@ -96,9 +96,6 @@ notification: ?*Notification = null,
// restoring, this originally-configured value is what it goes to.
http_proxy: ?[:0]const u8 = null,
// The complete user-agent header line
user_agent: [:0]const u8,
// libcurl can monitor arbitrary sockets. Currently, we ever [maybe] want to
// monitor the CDP client socket, so we've done the simplest thing possible
// by having this single optional field
@@ -133,7 +130,6 @@ pub fn init(allocator: Allocator, ca_blob: ?c.curl_blob, opts: Http.Opts) !*Clie
.blocking = blocking,
.allocator = allocator,
.http_proxy = opts.http_proxy,
.user_agent = opts.user_agent,
.transfer_pool = transfer_pool,
};
@@ -151,10 +147,6 @@ pub fn deinit(self: *Client) void {
self.allocator.destroy(self);
}
pub fn newHeaders(self: *const Client) !Http.Headers {
return Http.Headers.init(self.user_agent);
}
pub fn abort(self: *Client) void {
while (self.handles.in_use.first) |node| {
const handle: *Handle = @fieldParentPtr("node", node);
@@ -805,7 +797,7 @@ pub const Transfer = struct {
self.req.headers.deinit();
var buf: std.ArrayListUnmanaged(u8) = .empty;
var new_headers = try self.client.newHeaders();
var new_headers = try Http.Headers.init();
for (headers) |hdr| {
// safe to re-use this buffer, because Headers.add because curl copies
// the value we pass into curl_slist_append.

View File

@@ -102,17 +102,12 @@ pub fn newConnection(self: *Http) !Connection {
return Connection.init(self.ca_blob, &self.opts);
}
pub fn newHeaders(self: *const Http) Headers {
return Headers.init(self.opts.user_agent);
}
pub const Connection = struct {
easy: *c.CURL,
opts: Connection.Opts,
const Opts = struct {
proxy_bearer_token: ?[:0]const u8,
user_agent: [:0]const u8,
};
// pointer to opts is not stable, don't hold a reference to it!
@@ -173,7 +168,6 @@ pub const Connection = struct {
return .{
.easy = easy,
.opts = .{
.user_agent = opts.user_agent,
.proxy_bearer_token = opts.proxy_bearer_token,
},
};
@@ -236,7 +230,7 @@ pub const Connection = struct {
pub fn request(self: *const Connection) !u16 {
const easy = self.easy;
var header_list = try Headers.init(self.opts.user_agent);
var header_list = try Headers.init();
defer header_list.deinit();
try self.secretHeaders(&header_list);
try errorCheck(c.curl_easy_setopt(easy, c.CURLOPT_HTTPHEADER, header_list.headers));
@@ -265,8 +259,8 @@ pub const Headers = struct {
headers: *c.curl_slist,
cookies: ?[*c]const u8,
pub fn init(user_agent: [:0]const u8) !Headers {
const header_list = c.curl_slist_append(null, user_agent);
pub fn init() !Headers {
const header_list = c.curl_slist_append(null, "User-Agent: Lightpanda/1.0");
if (header_list == null) return error.OutOfMemory;
return .{ .headers = header_list, .cookies = null };
}
@@ -343,7 +337,6 @@ pub const Opts = struct {
tls_verify_host: bool = true,
http_proxy: ?[:0]const u8 = null,
proxy_bearer_token: ?[:0]const u8 = null,
user_agent: [:0]const u8,
};
pub const Method = enum(u8) {

View File

@@ -20,6 +20,7 @@ const std = @import("std");
const builtin = @import("builtin");
const Thread = std.Thread;
const Allocator = std.mem.Allocator;
const is_debug = builtin.mode == .Debug;

View File

@@ -22,11 +22,12 @@ const Allocator = std.mem.Allocator;
const log = @import("log.zig");
const App = @import("app.zig").App;
const Http = @import("http/Http.zig");
const Server = @import("server.zig").Server;
const Browser = @import("browser/browser.zig").Browser;
const DumpStripMode = @import("browser/dump.zig").Opts.StripMode;
const build_config = @import("build_config");
const parser = @import("browser/netsurf.zig");
var _app: ?*App = null;
var _server: ?Server = null;
@@ -107,14 +108,6 @@ fn run(alloc: Allocator) !void {
log.opts.filter_scopes = lfs;
}
const user_agent = blk: {
const USER_AGENT = "User-Agent: Lightpanda/1.0";
if (args.userAgentSuffix()) |suffix| {
break :blk try std.fmt.allocPrintSentinel(args_arena.allocator(), "{s} {s}", .{ USER_AGENT, suffix }, 0);
}
break :blk USER_AGENT;
};
// _app is global to handle graceful shutdown.
_app = try App.init(alloc, .{
.run_mode = args.mode,
@@ -125,7 +118,6 @@ fn run(alloc: Allocator) !void {
.http_connect_timeout_ms = args.httpConnectTiemout(),
.http_max_host_open = args.httpMaxHostOpen(),
.http_max_concurrent = args.httpMaxConcurrent(),
.user_agent = user_agent,
});
const app = _app.?;
@@ -185,7 +177,7 @@ fn run(alloc: Allocator) !void {
try page.dump(.{
.page = page,
.with_base = opts.withbase,
.strip_mode = opts.strip_mode,
.exclude_scripts = opts.noscript,
}, &writer.interface);
try writer.interface.flush();
}
@@ -268,13 +260,6 @@ const Command = struct {
};
}
fn userAgentSuffix(self: *const Command) ?[]const u8 {
return switch (self.mode) {
inline .serve, .fetch => |opts| opts.common.user_agent_suffix,
else => unreachable,
};
}
const Mode = union(App.RunMode) {
help: bool, // false when being printed because of an error
fetch: Fetch,
@@ -293,8 +278,8 @@ const Command = struct {
url: []const u8,
dump: bool = false,
common: Common,
noscript: bool = false,
withbase: bool = false,
strip_mode: DumpStripMode = .{},
};
const Common = struct {
@@ -308,7 +293,6 @@ const Command = struct {
log_level: ?log.Level = null,
log_format: ?log.Format = null,
log_filter_scopes: ?[]log.Scope = null,
user_agent_suffix: ?[]const u8 = null,
};
fn printUsageAndExit(self: *const Command, success: bool) void {
@@ -355,9 +339,6 @@ const Command = struct {
\\ Defaults to
++ (if (builtin.mode == .Debug) " pretty." else " logfmt.") ++
\\
\\ --user_agent_suffix
\\ Suffix to append to the Lightpanda/X.Y User-Agent
\\
;
// MAX_HELP_LEN|
@@ -373,14 +354,7 @@ const Command = struct {
\\Options:
\\--dump Dumps document to stdout.
\\ Defaults to false.
\\
\\--strip_mode Comma separated list of tag groups to remove from dump
\\ the dump. e.g. --strip_mode js,css
\\ - "js" script and link[as=script, rel=preload]
\\ - "ui" includes img, picture, video, css and svg
\\ - "css" includes style and link[rel=stylesheet]
\\ - "full" includes js, ui and css
\\
\\--noscript Exclude <script> tags in dump. Defaults to false.
\\--with_base Add a <base> tag in dump. Defaults to false.
\\
++ common_options ++
@@ -468,10 +442,6 @@ fn inferMode(opt: []const u8) ?App.RunMode {
return .fetch;
}
if (std.mem.eql(u8, opt, "--strip_mode")) {
return .fetch;
}
if (std.mem.eql(u8, opt, "--with_base")) {
return .fetch;
}
@@ -557,10 +527,10 @@ fn parseFetchArgs(
args: *std.process.ArgIterator,
) !Command.Fetch {
var dump: bool = false;
var noscript: bool = false;
var withbase: bool = false;
var url: ?[]const u8 = null;
var common: Command.Common = .{};
var strip_mode: DumpStripMode = .{};
while (args.next()) |opt| {
if (std.mem.eql(u8, "--dump", opt)) {
@@ -569,11 +539,7 @@ fn parseFetchArgs(
}
if (std.mem.eql(u8, "--noscript", opt)) {
log.warn(.app, "deprecation warning", .{
.feature = "--noscript argument",
.hint = "use '--strip_mode js' instead",
});
strip_mode.js = true;
noscript = true;
continue;
}
@@ -582,32 +548,6 @@ fn parseFetchArgs(
continue;
}
if (std.mem.eql(u8, "--strip_mode", opt)) {
const str = args.next() orelse {
log.fatal(.app, "missing argument value", .{ .arg = "--strip_mode" });
return error.InvalidArgument;
};
var it = std.mem.splitScalar(u8, str, ',');
while (it.next()) |part| {
const trimmed = std.mem.trim(u8, part, &std.ascii.whitespace);
if (std.mem.eql(u8, trimmed, "js")) {
strip_mode.js = true;
} else if (std.mem.eql(u8, trimmed, "ui")) {
strip_mode.ui = true;
} else if (std.mem.eql(u8, trimmed, "css")) {
strip_mode.css = true;
} else if (std.mem.eql(u8, trimmed, "full")) {
strip_mode.js = true;
strip_mode.ui = true;
strip_mode.css = true;
} else {
log.fatal(.app, "invalid option choice", .{ .arg = "--strip_mode", .value = trimmed });
}
}
continue;
}
if (try parseCommonArg(allocator, opt, args, &common)) {
continue;
}
@@ -633,8 +573,8 @@ fn parseFetchArgs(
.url = url.?,
.dump = dump,
.common = common,
.noscript = noscript,
.withbase = withbase,
.strip_mode = strip_mode,
};
}
@@ -773,21 +713,6 @@ fn parseCommonArg(
return true;
}
if (std.mem.eql(u8, "--user_agent_suffix", opt)) {
const str = args.next() orelse {
log.fatal(.app, "missing argument value", .{ .arg = "--user_agent_suffix" });
return error.InvalidArgument;
};
for (str) |c| {
if (!std.ascii.isPrint(c)) {
log.fatal(.app, "not printable character", .{ .arg = "--user_agent_suffix" });
return error.InvalidArgument;
}
}
common.user_agent_suffix = try allocator.dupe(u8, str);
return true;
}
return false;
}

View File

@@ -27,6 +27,9 @@ const Env = @import("browser/env.zig").Env;
const Browser = @import("browser/browser.zig").Browser;
const TestHTTPServer = @import("TestHTTPServer.zig");
const parser = @import("browser/netsurf.zig");
const polyfill = @import("browser/polyfill/polyfill.zig");
const WPT_DIR = "tests/wpt";
pub fn main() !void {
@@ -66,7 +69,6 @@ pub fn main() !void {
var app = try App.init(allocator, .{
.run_mode = .fetch,
.user_agent = "User-Agent: Lightpanda/1.0 Lightpanda/WPT",
});
defer app.deinit();

View File

@@ -1,7 +1,9 @@
const std = @import("std");
const log = @import("log.zig");
const URL = @import("url.zig").URL;
const page = @import("browser/page.zig");
const Http = @import("http/Http.zig");
const Transfer = @import("http/Client.zig").Transfer;
const Allocator = std.mem.Allocator;

View File

@@ -677,11 +677,6 @@ pub fn Env(comptime State: type, comptime WebApis: type) type {
// we now simply persist every time persist() is called.
js_object_list: std.ArrayListUnmanaged(PersistentObject) = .empty,
// Various web APIs depend on having a persistent promise resolver. They
// require for this PromiseResolver to be valid for a lifetime longer than
// the function that resolves/rejects them.
persisted_promise_resolvers: std.ArrayListUnmanaged(v8.Persistent(v8.PromiseResolver)) = .empty,
// When we need to load a resource (i.e. an external script), we call
// this function to get the source. This is always a reference to the
// Page's fetchModuleSource, but we use a function pointer
@@ -738,10 +733,6 @@ pub fn Env(comptime State: type, comptime WebApis: type) type {
p.deinit();
}
for (self.persisted_promise_resolvers.items) |*p| {
p.deinit();
}
{
var it = self.module_cache.valueIterator();
while (it.next()) |p| {
@@ -1215,10 +1206,6 @@ pub fn Env(comptime State: type, comptime WebApis: type) type {
return try self.createFunction(js_value);
}
if (T == String) {
return .{ .string = try valueToString(self.context_arena, js_value, self.isolate, self.v8_context) };
}
const js_obj = js_value.castTo(v8.Object);
if (comptime isJsObject(T)) {
@@ -1274,20 +1261,11 @@ pub fn Env(comptime State: type, comptime WebApis: type) type {
};
}
// creates a PersistentPromiseResolver, taking in a lifetime parameter.
// If the lifetime is page, the page will clean up the PersistentPromiseResolver.
// If the lifetime is self, you will be expected to deinitalize the PersistentPromiseResolver.
pub fn createPersistentPromiseResolver(
self: *JsContext,
lifetime: enum { self, page },
) !PersistentPromiseResolver {
const resolver = v8.Persistent(v8.PromiseResolver).init(self.isolate, v8.PromiseResolver.init(self.v8_context));
if (lifetime == .page) {
try self.persisted_promise_resolvers.append(self.context_arena, resolver);
}
return .{ .js_context = self, .resolver = resolver };
pub fn createPersistentPromiseResolver(self: *JsContext) PersistentPromiseResolver {
return .{
.js_context = self,
.resolver = v8.Persistent(v8.PromiseResolver).init(self.isolate, v8.PromiseResolver.init(self.v8_context)),
};
}
// Probing is part of trying to map a JS value to a Zig union. There's
@@ -2268,6 +2246,10 @@ pub fn Env(comptime State: type, comptime WebApis: type) type {
self.resolver.deinit();
}
pub fn setWeak(self: *PersistentPromiseResolver) void {
self.resolver.setWeak();
}
pub fn promise(self: PersistentPromiseResolver) Promise {
return .{
.promise = self.resolver.castToPromiseResolver().getPromise(),
@@ -2301,15 +2283,6 @@ pub fn Env(comptime State: type, comptime WebApis: type) type {
promise: v8.Promise,
};
// When doing jsValueToZig, string ([]const u8) are managed by the
// call_arena. That means that if the API wants to persist the string
// (which is relatively common), it needs to dupe it again.
// If the parameter is an Env.String rather than a []const u8, then
// the page's arena will be used (rather than the call arena).
pub const String = struct {
string: []const u8,
};
pub const Inspector = struct {
isolate: v8.Isolate,
inner: *v8.Inspector,
@@ -2843,7 +2816,7 @@ pub fn Env(comptime State: type, comptime WebApis: type) type {
const T = @TypeOf(value);
switch (@typeInfo(T)) {
.void, .bool, .int, .comptime_int, .float, .comptime_float, .@"enum", .null => {
.void, .bool, .int, .comptime_int, .float, .comptime_float, .@"enum" => {
// Need to do this to keep the compiler happy
// simpleZigValueToJs handles all of these cases.
unreachable;
@@ -2967,7 +2940,7 @@ pub fn Env(comptime State: type, comptime WebApis: type) type {
}
return v8.initNull(isolate).toValue();
},
.error_union => return zigValueToJs(templates, isolate, v8_context, try value),
.error_union => return zigValueToJs(templates, isolate, v8_context, value catch |err| return err),
else => {},
}
@@ -3634,7 +3607,6 @@ fn jsUnsignedIntToZig(comptime T: type, max: comptime_int, maybe: u32) !T {
fn simpleZigValueToJs(isolate: v8.Isolate, value: anytype, comptime fail: bool) if (fail) v8.Value else ?v8.Value {
switch (@typeInfo(@TypeOf(value))) {
.void => return v8.initUndefined(isolate).toValue(),
.null => return v8.initNull(isolate).toValue(),
.bool => return v8.getValue(if (value) v8.initTrue(isolate) else v8.initFalse(isolate)),
.int => |n| switch (n.signedness) {
.signed => {

View File

@@ -29,6 +29,8 @@ const log = @import("log.zig");
const App = @import("app.zig").App;
const CDP = @import("cdp/cdp.zig").CDP;
const TimeoutCheck = std.time.ns_per_ms * 100;
const MAX_HTTP_REQUEST_SIZE = 4096;
// max message size

View File

@@ -35,9 +35,7 @@ pub const std_options = std.Options{
};
pub var js_runner_duration: usize = 0;
pub var v8_peak_memory: usize = 0;
pub var libdom_memory: i64 = 0;
pub var tracking_allocator: Allocator = undefined;
pub var tracking_allocator = TrackingAllocator.init(std.testing.allocator);
pub fn main() !void {
var mem: [8192]u8 = undefined;
@@ -54,12 +52,6 @@ pub fn main() !void {
var args = try std.process.argsWithAllocator(allocator);
defer args.deinit();
var tracking_arena = std.heap.ArenaAllocator.init(std.heap.c_allocator);
defer tracking_arena.deinit();
var ta = TrackingAllocator.init(tracking_arena.allocator());
tracking_allocator = ta.allocator();
// ignore the exec name.
_ = args.next();
var json_stats = false;
@@ -90,7 +82,6 @@ pub fn main() !void {
if (isSetup(t) or isTeardown(t)) {
continue;
}
defer _ = tracking_arena.reset(.retain_capacity);
var status = Status.pass;
slowest.startTiming();
@@ -184,7 +175,7 @@ pub fn main() !void {
if (json_stats) {
var stdout = std.fs.File.stdout();
var writer = stdout.writer(&.{});
const stats = ta.stats();
const stats = tracking_allocator.stats();
try std.json.Stringify.value(&.{
.{ .name = "browser", .bench = .{
.duration = js_runner_duration,
@@ -196,13 +187,13 @@ pub fn main() !void {
.duration = js_runner_duration,
.alloc_nb = 0,
.realloc_nb = 0,
.alloc_size = libdom_memory,
.alloc_size = 0,
} },
.{ .name = "v8", .bench = .{
.duration = js_runner_duration,
.alloc_nb = 0,
.realloc_nb = 0,
.alloc_size = v8_peak_memory,
.alloc_size = 0,
} },
.{ .name = "main", .bench = .{
.duration = js_runner_duration,

View File

@@ -19,6 +19,8 @@
const std = @import("std");
const Allocator = std.mem.Allocator;
const Platform = @import("runtime/js.zig").Platform;
pub const allocator = std.testing.allocator;
pub const expectError = std.testing.expectError;
pub const expect = std.testing.expect;
@@ -360,6 +362,128 @@ fn isJsonValue(a: std.json.Value, b: std.json.Value) bool {
}
}
pub const tracking_allocator = @import("root").tracking_allocator.allocator();
pub const JsRunner = struct {
const URL = @import("url.zig").URL;
const Page = @import("browser/page.zig").Page;
page: *Page,
browser: *Browser,
allocator: Allocator,
fn init(alloc: Allocator, opts: RunnerOpts) !JsRunner {
const browser = try alloc.create(Browser);
errdefer alloc.destroy(browser);
browser.* = try Browser.init(test_app);
errdefer browser.deinit();
var session = try browser.newSession();
var page = try session.createPage();
// a bit hacky, but since we aren't going through page.navigate, there's
// some minimum setup we need to do
page.url = try URL.parse(opts.url, null);
try page.window.replaceLocation(.{
.url = try page.url.toWebApi(page.arena),
});
const html_doc = try parser.documentHTMLParseFromStr(opts.html);
try page.setDocument(html_doc);
page.mode = .{ .parsed = {} };
return .{
.page = page,
.browser = browser,
.allocator = alloc,
};
}
pub fn deinit(self: *JsRunner) void {
self.browser.deinit();
self.allocator.destroy(self.browser);
}
const RunOpts = struct {};
pub const Case = std.meta.Tuple(&.{ []const u8, ?[]const u8 });
pub fn testCases(self: *JsRunner, cases: []const Case, _: RunOpts) !void {
const js_context = self.page.main_context;
const arena = self.page.arena;
const start = try std.time.Instant.now();
for (cases, 0..) |case, i| {
var try_catch: Env.TryCatch = undefined;
try_catch.init(js_context);
defer try_catch.deinit();
const value = js_context.exec(case.@"0", null) catch |err| {
if (try try_catch.err(arena)) |msg| {
std.debug.print("{s}\n\nCase: {d}\n{s}\n", .{ msg, i + 1, case.@"0" });
}
return err;
};
_ = self.page.session.wait(100);
@import("root").js_runner_duration += std.time.Instant.since(try std.time.Instant.now(), start);
if (case.@"1") |expected| {
const actual = try value.toString(arena);
if (std.mem.eql(u8, expected, actual) == false) {
std.debug.print("Expected:\n{s}\n\nGot:\n{s}\n\nCase: {d}\n{s}\n", .{ expected, actual, i + 1, case.@"0" });
return error.UnexpectedResult;
}
}
}
}
pub fn exec(self: *JsRunner, src: []const u8, name: ?[]const u8, err_msg: *?[]const u8) !void {
_ = try self.eval(src, name, err_msg);
}
pub fn eval(self: *JsRunner, src: []const u8, name: ?[]const u8, err_msg: *?[]const u8) !Env.Value {
const js_context = self.page.main_context;
const arena = self.page.arena;
var try_catch: Env.TryCatch = undefined;
try_catch.init(js_context);
defer try_catch.deinit();
return js_context.exec(src, name) catch |err| {
if (try try_catch.err(arena)) |msg| {
err_msg.* = msg;
std.debug.print("Error running script: {s}\n", .{msg});
}
return err;
};
}
pub fn dispatchDOMContentLoaded(self: *JsRunner) !void {
const HTMLDocument = @import("browser/html/document.zig").HTMLDocument;
const html_doc = self.page.window.document;
try HTMLDocument.documentIsLoaded(html_doc, self.page);
}
};
const RunnerOpts = struct {
url: []const u8 = "https://lightpanda.io/opensource-browser/",
html: []const u8 =
\\ <div id="content">
\\ <a id="link" href="foo" class="ok">OK</a>
\\ <p id="para-empty" class="ok empty">
\\ <span id="para-empty-child"></span>
\\ </p>
\\ <p id="para"> And</p>
\\ <!--comment-->
\\ </div>
\\
,
};
pub fn jsRunner(alloc: Allocator, opts: RunnerOpts) !JsRunner {
return JsRunner.init(alloc, opts);
}
var gpa: std.heap.GeneralPurposeAllocator(.{}) = .init;
pub var test_app: *App = undefined;
pub var test_browser: Browser = undefined;
@@ -369,7 +493,6 @@ pub fn setup() !void {
test_app = try App.init(gpa.allocator(), .{
.run_mode = .serve,
.tls_verify_host = false,
.user_agent = "User-Agent: Lightpanda/1.0 internal-tester",
});
errdefer test_app.deinit();
@@ -379,22 +502,15 @@ pub fn setup() !void {
test_session = try test_browser.newSession();
}
pub fn shutdown() void {
@import("root").v8_peak_memory = test_browser.env.isolate.getHeapStatistics().total_physical_size;
@import("root").libdom_memory = @import("browser/mimalloc.zig").getRSS();
test_browser.deinit();
test_app.deinit();
}
pub fn htmlRunner(file: []const u8) !void {
defer _ = arena_instance.reset(.retain_capacity);
const start = try std.time.Instant.now();
const page = try test_session.createPage();
defer test_session.removePage();
page.arena = @import("root").tracking_allocator;
const js_context = page.main_context;
var try_catch: Env.TryCatch = undefined;
try_catch.init(js_context);
@@ -404,8 +520,6 @@ pub fn htmlRunner(file: []const u8) !void {
try page.navigate(url, .{});
_ = page.wait(2000);
@import("root").js_runner_duration += std.time.Instant.since(try std.time.Instant.now(), start);
const value = js_context.exec("testing.getStatus()", "testing.getStatus()") catch |err| {
const msg = try_catch.err(arena_allocator) catch @errorName(err) orelse "unknown";
std.debug.print("{s}: test failure\nError: {s}\n", .{ file, msg });

View File

@@ -1,4 +1,3 @@
<!DOCTYPE html>
<script src="testing.js"></script>
<script id=intl>
// this will crash if ICU isn't properly configured / ininitialized

View File

@@ -1,4 +1,3 @@
<!DOCTYPE html>
<script src="testing.js"></script>
<script id=crypto>
const a = crypto.randomUUID();

View File

@@ -1,4 +1,3 @@
<!DOCTYPE html>
<script src="testing.js"></script>
<script id=support>
testing.expectEqual(true, CSS.supports('display: flex'));

View File

@@ -1,4 +1,3 @@
<!DOCTYPE html>
<script src="../testing.js"></script>
<script id=css_rule_list>
let list = new CSSRuleList();

View File

@@ -1,4 +1,3 @@
<!DOCTYPE html>
<script src="../testing.js"></script>
<script id=css_style_declaration>
let style = document.createElement('div').style;

View File

@@ -1,4 +1,3 @@
<!DOCTYPE html>
<script src="../testing.js"></script>
<script id=css_stylesheet>
let css = new CSSStyleSheet()

View File

@@ -1,4 +1,3 @@
<!DOCTYPE html>
<script src="../testing.js"></script>
<script id=animation>

View File

@@ -1,4 +1,3 @@
<!DOCTYPE html>
<script src="../testing.js"></script>
<a id="link" href="foo" class="ok">OK</a>

View File

@@ -1,4 +1,3 @@
<!DOCTYPE html>
<script src="../testing.js"></script>
<a id="link" href="foo" class="ok">OK</a>

View File

@@ -1,4 +1,3 @@
<!DOCTYPE html>
<script src="../testing.js"></script>
<script id=comment>
let comment = new Comment('foo');

View File

@@ -1,4 +1,3 @@
<!DOCTYPE html>
<script src="../testing.js"></script>
<div id="content">

View File

@@ -1,4 +1,3 @@
<!DOCTYPE html>
<script src="../testing.js"></script>
<body></body>

View File

@@ -1,4 +1,3 @@
<!DOCTYPE html>
<script src="../testing.js"></script>
<script id=documentType>
let dt1 = document.implementation.createDocumentType('qname1', 'pid1', 'sys1');

View File

@@ -1,4 +1,3 @@
<!DOCTYPE html>
<script src="../testing.js"></script>
<script id=domParser>
const dp = new DOMParser();;

View File

@@ -1,7 +1,6 @@
<!DOCTYPE html>
<script src="../testing.js"></script>
<div id="content" dir="ltr">
<div id="content">
<a id="link" href="foo" class="ok">OK</a>
<p id="para-empty" class="ok empty">
<span id="para-empty-child"></span>
@@ -17,7 +16,6 @@
testing.expectEqual('div', content.localName);
testing.expectEqual('DIV', content.tagName);
testing.expectEqual('content', content.id);
testing.expectEqual('ltr', content.dir);
content.id = 'foo';
testing.expectEqual('foo', content.id);
@@ -27,7 +25,6 @@
let p1 = document.getElementById('para-empty');
testing.expectEqual('ok empty', p1.className);
testing.expectEqual('', p1.dir);
p1.className = 'foo bar baz';
testing.expectEqual('foo bar baz', p1.className);
@@ -54,16 +51,16 @@
<script id=attributes>
testing.expectEqual(true, content.hasAttributes());
testing.expectEqual(2, content.attributes.length);
testing.expectEqual(['id', 'dir'], content.getAttributeNames());
testing.expectEqual(1, content.attributes.length);
testing.expectEqual(['id'], content.getAttributeNames());
testing.expectEqual('content', content.getAttribute('id'));
testing.expectEqual('content', content.attributes['id'].value);
let x = '';
for (const attr of content.attributes) {
x += attr.name + '=' + attr.value + ',';
x += attr.name + '=' + attr.value;
}
testing.expectEqual('id=content,dir=ltr,', x);
testing.expectEqual('id=content', x);
testing.expectEqual(false, content.hasAttribute('foo'));
testing.expectEqual(null, content.getAttribute('foo'));
@@ -71,7 +68,7 @@
content.setAttribute('foo', 'bar');
testing.expectEqual(true, content.hasAttribute('foo'));
testing.expectEqual('bar', content.getAttribute('foo'));
testing.expectEqual(['id', 'dir', 'foo'], content.getAttributeNames());
testing.expectEqual(['id', 'foo'], content.getAttributeNames());
testing.expectError('Error: InvalidCharacterError', () => {
content.setAttribute('.foo', 'invalid')
@@ -265,27 +262,3 @@
$('#to-remove').remove();
testing.expectEqual(null, $('#to-remove'));
</script>
<script id=elementDir>
const divElement = document.createElement("div");
// Always initialized with empty string if `dir` attribute not provided.
testing.expectEqual("", divElement.dir);
divElement.dir = "ltr";
testing.expectEqual("ltr", divElement.dir);
divElement.dir = "rtl";
testing.expectEqual("rtl", divElement.dir);
divElement.dir = "auto";
testing.expectEqual("auto", divElement.dir);
</script>
<script id=linkRel>
const linkElement = document.createElement("link");
// A newly created link element must have it's rel set to empty string.
testing.expectEqual("", linkElement.rel);
linkElement.rel = "stylesheet";
testing.expectEqual("stylesheet", linkElement.rel);
</script>

View File

@@ -1,4 +1,3 @@
<!DOCTYPE html>
<script src="../testing.js"></script>
<div id="content"><p id=para></p></div>

View File

@@ -1,4 +1,3 @@
<!DOCTYPE html>
<script src="../testing.js"></script>
<div id="content">

View File

@@ -1,4 +1,3 @@
<!DOCTYPE html>
<body>
<div id="content">
<a id="link" href="foo" class="ok">OK</a>
@@ -11,57 +10,50 @@
</body>
<script src="../testing.js"></script>
<script id=caseInsensitve>
const Ptags = document.getElementsByTagName('P');
testing.expectEqual(2, Ptags.length);
testing.expectEqual('p', Ptags.item(0).localName);
testing.expectEqual('p', Ptags.item(1).localName);
</script>
<script id=exceptions>
let content = $('#content');
let pe = $('#para-empty');
<script id=all>
let allTags = document.getElementsByTagName('*');
testing.expectEqual(13, allTags.length);
testing.expectEqual('html', allTags.item(0).localName);
testing.expectEqual('html', allTags.item(0).localName);
testing.expectEqual('head', allTags.item(1).localName);
testing.expectEqual('html', allTags.item(0).localName);
testing.expectEqual('body', allTags.item(2).localName);
testing.expectEqual('div', allTags.item(3).localName);
testing.expectEqual('p', allTags.item(7).localName);
testing.expectEqual('span', allTags.namedItem('para-empty-child').localName);
let getElementsByTagName = document.getElementsByTagName('p');
testing.expectEqual(2, getElementsByTagName.length);
let getElementsByTagNameCI = document.getElementsByTagName('P');
testing.expectEqual(2, getElementsByTagNameCI.length);
testing.expectEqual('p', getElementsByTagName.item(0).localName);
testing.expectEqual('p', getElementsByTagName.item(1).localName);
let getElementsByTagNameAll = document.getElementsByTagName('*');
testing.expectEqual(10, getElementsByTagNameAll.length);
testing.expectEqual('html', getElementsByTagNameAll.item(0).localName);
testing.expectEqual('html', getElementsByTagNameAll.item(0).localName);
testing.expectEqual('head', getElementsByTagNameAll.item(1).localName);
testing.expectEqual('html', getElementsByTagNameAll.item(0).localName);
testing.expectEqual('body', getElementsByTagNameAll.item(2).localName);
testing.expectEqual('div', getElementsByTagNameAll.item(3).localName);
testing.expectEqual('p', getElementsByTagNameAll.item(7).localName);
testing.expectEqual('span', getElementsByTagNameAll.namedItem('para-empty-child').localName);
// array like
testing.expectEqual('html', allTags[0].localName);
testing.expectEqual('p', allTags[7].localName);
testing.expectEqual(undefined, allTags[14]);
testing.expectEqual('span', allTags['para-empty-child'].localName);
testing.expectEqual(undefined, allTags['foo']);
</script>
testing.expectEqual('html', getElementsByTagNameAll[0].localName);
testing.expectEqual('p', getElementsByTagNameAll[7].localName);
testing.expectEqual(undefined, getElementsByTagNameAll[11]);
testing.expectEqual('span', getElementsByTagNameAll['para-empty-child'].localName);
testing.expectEqual(undefined, getElementsByTagNameAll['foo']);
<script id=element>
let content = $('#content');
testing.expectEqual(4, content.getElementsByTagName('*').length);
testing.expectEqual(2, content.getElementsByTagName('p').length);
testing.expectEqual(0, content.getElementsByTagName('div').length);
testing.expectEqual(1, document.children.length);
testing.expectEqual(3, content.children.length);
</script>
<script id=liveness>
const ptags = document.getElementsByTagName('p');
testing.expectEqual(2, ptags.length);
testing.expectEqual(' And', ptags.item(1).textContent);
// check liveness
let p = document.createElement('p');
p.textContent = 'OK live';
// hasn't been added, still 2
testing.expectEqual(2, ptags.length);
testing.expectEqual('OK live', p.textContent = 'OK live');
testing.expectEqual(' And', getElementsByTagName.item(1).textContent);
testing.expectEqual(true, content.appendChild(p) != undefined);
testing.expectEqual(3, ptags.length);
testing.expectEqual('OK live', ptags.item(2).textContent);
testing.expectEqual(true, content.insertBefore(p, $('#para-empty')) != undefined);
testing.expectEqual('OK live', ptags.item(0).textContent);
testing.expectEqual(3, getElementsByTagName.length);
testing.expectEqual('OK live', getElementsByTagName.item(2).textContent);
testing.expectEqual(true, content.insertBefore(p, pe) != undefined);
testing.expectEqual('OK live', getElementsByTagName.item(0).textContent);
</script>

View File

@@ -1,4 +1,3 @@
<!DOCTYPE html>
<script src="../testing.js"></script>
<script id=implementation>
let impl = document.implementation;

View File

@@ -1,4 +1,3 @@
<!DOCTYPE html>
<body></body>
<script src="../testing.js"></script>
<script id=intersectionObserver>

View File

@@ -1,4 +1,3 @@
<!DOCTYPE html>
<script src="../testing.js"></script>
<script id=messageChannel>
const mc1 = new MessageChannel();

View File

@@ -1,4 +1,3 @@
<!DOCTYPE html>
<div></div>
<div id=d1><p id=p1> And</p></div>
<div id=d2><p id=p2> And</p></div>

View File

@@ -1,4 +1,3 @@
<!DOCTYPE html>
<div id="content"></div>
<script src="../testing.js"></script>

View File

@@ -1,4 +1,3 @@
<!DOCTYPE html>
<body><div id="content">
<a id="link" href="foo" class="ok">OK</a>
<p id="para-empty" class="ok empty">

View File

@@ -1,4 +1,3 @@
<!DOCTYPE html>
<script src="../testing.js"></script>
<script id=nodeFilter>
testing.expectEqual(1, NodeFilter.FILTER_ACCEPT);

Some files were not shown because too many files have changed in this diff Show More