3 Commits

Author SHA1 Message Date
Pierre Tachoire
02cd5e98f9 event_target: handle window target 2025-01-21 13:50:34 +01:00
Pierre Tachoire
a51c20068f netsurf: handle event target get type 2025-01-21 13:50:34 +01:00
Pierre Tachoire
99a3e4be3f upgrade vendor/netsurf/libdom 2025-01-21 13:50:33 +01:00
51 changed files with 1715 additions and 3897 deletions

View File

@@ -16,7 +16,6 @@ on:
- "src/*.zig"
- "vendor/zig-js-runtime"
- ".github/**"
- "vendor/**"
pull_request:
# By default GH trigger on types opened, synchronize and reopened.
@@ -33,7 +32,6 @@ on:
- "src/*.zig"
- "vendor/**"
- ".github/**"
- "vendor/**"
# Allows you to run this workflow manually from the Actions tab
workflow_dispatch:

20
.gitmodules vendored
View File

@@ -1,30 +1,34 @@
[submodule "vendor/zig-js-runtime"]
path = vendor/zig-js-runtime
url = https://github.com/lightpanda-io/zig-js-runtime.git/
url = git@github.com:lightpanda-io/zig-js-runtime.git
[submodule "vendor/netsurf/libwapcaplet"]
path = vendor/netsurf/libwapcaplet
url = https://github.com/lightpanda-io/libwapcaplet.git/
url = git@github.com:lightpanda-io/libwapcaplet.git
[submodule "vendor/netsurf/libparserutils"]
path = vendor/netsurf/libparserutils
url = https://github.com/lightpanda-io/libparserutils.git/
url = git@github.com:lightpanda-io/libparserutils.git
[submodule "vendor/netsurf/libdom"]
path = vendor/netsurf/libdom
url = https://github.com/lightpanda-io/libdom.git/
url = git@github.com:lightpanda-io/libdom.git
[submodule "vendor/netsurf/share/netsurf-buildsystem"]
path = vendor/netsurf/share/netsurf-buildsystem
url = https://source.netsurf-browser.org/buildsystem.git
[submodule "vendor/netsurf/libhubbub"]
path = vendor/netsurf/libhubbub
url = https://github.com/lightpanda-io/libhubbub.git/
url = git@github.com:lightpanda-io/libhubbub.git
[submodule "tests/wpt"]
path = tests/wpt
url = https://github.com/lightpanda-io/wpt
[submodule "vendor/mimalloc"]
path = vendor/mimalloc
url = https://github.com/microsoft/mimalloc.git/
url = git@github.com:microsoft/mimalloc.git
[submodule "vendor/tls.zig"]
path = vendor/tls.zig
url = https://github.com/ianic/tls.zig.git/
url = git@github.com:ianic/tls.zig.git
[submodule "vendor/zig-async-io"]
path = vendor/zig-async-io
url = https://github.com/lightpanda-io/zig-async-io.git/
url = git@github.com:lightpanda-io/zig-async-io.git
[submodule "vendor/websocket.zig"]
path = vendor/websocket.zig
url = git@github.com:lightpanda-io/websocket.zig.git
branch = lightpanda

View File

@@ -5,7 +5,7 @@ ARG ZIG_MINISIG=RWSGOq2NVecA2UPNdBUZykf1CCb147pkmdtYxgb3Ti+JO/wCYvhbAb/U
ARG OS=linux
ARG ARCH=x86_64
ARG V8=11.1.134
ARG ZIG_V8=v0.1.11
ARG ZIG_V8=v0.1.9
RUN apt-get update -yq && \
apt-get install -yq xz-utils \

View File

@@ -3,8 +3,6 @@
ZIG := zig
BC := $(dir $(abspath $(lastword $(MAKEFILE_LIST))))
# option test filter make unittest F="server"
F=
# OS and ARCH
kernel = $(shell uname -ms)
@@ -44,7 +42,7 @@ help:
# $(ZIG) commands
# ------------
.PHONY: build build-dev run run-release shell test bench download-zig wpt unittest
.PHONY: build build-dev run run-release shell test bench download-zig wpt
zig_version = $(shell grep 'recommended_zig_version = "' "vendor/zig-js-runtime/build.zig" | cut -d'"' -f2)
@@ -93,9 +91,6 @@ test:
@$(ZIG) build test -Dengine=v8 || (printf "\e[33mTest ERROR\e[0m\n"; exit 1;)
@printf "\e[33mTest OK\e[0m\n"
unittest:
@TEST_FILTER='${F}' $(ZIG) build unittest -freference-trace --summary all
# Install and build required dependencies commands
# ------------
.PHONY: install-submodule

117
README.md
View File

@@ -7,18 +7,7 @@
<p align="center"><a href="https://lightpanda.io/">lightpanda.io</a></p>
<div align="center">
[![Commit Activity](https://img.shields.io/github/commit-activity/m/lightpanda-io/browser)](https://github.com/lightpanda-io/browser/commits/main)
[![License](https://img.shields.io/github/license/lightpanda-io/browser)](https://github.com/lightpanda-io/browser/blob/main/LICENSE)
[![Twitter Follow](https://img.shields.io/twitter/follow/lightpanda_io)](https://twitter.com/lightpanda_io)
[![GitHub stars](https://img.shields.io/github/stars/lightpanda-io/browser)](https://github.com/lightpanda-io/browser)
</div>
<div align="center">
<a href="https://trendshift.io/repositories/12815" target="_blank"><img src="https://trendshift.io/api/badge/repositories/12815" alt="lightpanda-io%2Fbrowser | Trendshift" style="width: 250px; height: 55px;" width="250" height="55"/></a>
<br />
</div>
Lightpanda is the open-source browser made for headless usage:
@@ -27,7 +16,7 @@ Lightpanda is the open-source browser made for headless usage:
- Support of Web APIs (partial, WIP)
- Compatible with Playwright, Puppeteer through CDP (WIP)
Fast web automation for AI agents, LLM training, scraping and testing with minimal memory footprint:
Fast scraping and web automation with minimal memory footprint:
- Ultra-low memory footprint (9x less than Chrome)
- Exceptionally fast execution (11x faster than Chrome) & instant startup
@@ -36,6 +25,53 @@ Fast web automation for AI agents, LLM training, scraping and testing with minim
See [benchmark details](https://github.com/lightpanda-io/demo).
## Why?
### Javascript execution is mandatory for the modern web
In the good old days, scraping a webpage was as easy as making an HTTP request, cURL-like. Its not possible anymore, because Javascript is everywhere, like it or not:
- Ajax, Single Page App, infinite loading, “click to display”, instant search, etc.
- JS web frameworks: React, Vue, Angular & others
### Chrome is not the right tool
If we need Javascript, why not use a real web browser? Take a huge desktop application, hack it, and run it on the server. Hundreds or thousands of instances of Chrome if you use it at scale. Are you sure its such a good idea?
- Heavy on RAM and CPU, expensive to run
- Hard to package, deploy and maintain at scale
- Bloated, lots of features are not useful in headless usage
### Lightpanda is built for performance
If we want both Javascript and performance in a true headless browser, we need to start from scratch. Not another iteration of Chromium, really from a blank page. Crazy right? But thats we did:
- Not based on Chromium, Blink or WebKit
- Low-level system programming language (Zig) with optimisations in mind
- Opinionated: without graphical rendering
## Status
Lightpanda is still a work in progress and is currently at a Beta stage.
:warning: You should expect most websites to fail or crash.
Here are the key features we have implemented:
- [x] HTTP loader
- [x] HTML parser and DOM tree (based on Netsurf libs)
- [x] Javascript support (v8)
- [x] Basic DOM APIs
- [x] Ajax
- [x] XHR API
- [x] Fetch API
- [x] DOM dump
- [x] Basic CDP/websockets server
NOTE: There are hundreds of Web APIs. Developing a browser (even just for headless mode) is a huge task. Coverage will increase over time.
You can also follow the progress of our Javascript support in our dedicated [zig-js-runtime](https://github.com/lightpanda-io/zig-js-runtime#development) project.
## Quick start
### Install from the nightly builds
@@ -85,7 +121,7 @@ Once the CDP server started, you can run a Puppeteer script by configuring the
`browserWSEndpoint`.
```js
'use strict'
'use scrict'
import puppeteer from 'puppeteer-core';
@@ -140,9 +176,9 @@ brew install cmake
You can run `make install` to install deps all in one (or `make install-dev` if you need the development versions).
Be aware that the build task is very long and cpu consuming, as you will build from sources all dependencies, including the v8 Javascript engine.
Be aware that the build task is very long and cpu consuming, as you will build from sources all dependancies, including the v8 Javascript engine.
#### Step by step build dependency
#### Step by step build dependancy
The project uses git submodules for dependencies.
@@ -186,7 +222,7 @@ This build task is very long and cpu consuming, as you will build v8 from source
make install-zig-js-runtime
```
For dev env, use `make install-zig-js-runtime-dev`.
For dev env, use `make iinstall-zig-js-runtime-dev`.
## Test
@@ -235,50 +271,3 @@ Lightpanda accepts pull requests through GitHub.
You have to sign our [CLA](CLA.md) during the pull request process otherwise
we're not able to accept your contributions.
## Why?
### Javascript execution is mandatory for the modern web
In the good old days, scraping a webpage was as easy as making an HTTP request, cURL-like. Its not possible anymore, because Javascript is everywhere, like it or not:
- Ajax, Single Page App, infinite loading, “click to display”, instant search, etc.
- JS web frameworks: React, Vue, Angular & others
### Chrome is not the right tool
If we need Javascript, why not use a real web browser? Take a huge desktop application, hack it, and run it on the server. Hundreds or thousands of instances of Chrome if you use it at scale. Are you sure its such a good idea?
- Heavy on RAM and CPU, expensive to run
- Hard to package, deploy and maintain at scale
- Bloated, lots of features are not useful in headless usage
### Lightpanda is built for performance
If we want both Javascript and performance in a true headless browser, we need to start from scratch. Not another iteration of Chromium, really from a blank page. Crazy right? But thats what we did:
- Not based on Chromium, Blink or WebKit
- Low-level system programming language (Zig) with optimisations in mind
- Opinionated: without graphical rendering
## Status
Lightpanda is still a work in progress and is currently at a Beta stage.
:warning: You should expect most websites to fail or crash.
Here are the key features we have implemented:
- [x] HTTP loader
- [x] HTML parser and DOM tree (based on Netsurf libs)
- [x] Javascript support (v8)
- [x] Basic DOM APIs
- [x] Ajax
- [x] XHR API
- [x] Fetch API
- [x] DOM dump
- [x] Basic CDP/websockets server
NOTE: There are hundreds of Web APIs. Developing a browser (even just for headless mode) is a huge task. Coverage will increase over time.
You can also follow the progress of our Javascript support in our dedicated [zig-js-runtime](https://github.com/lightpanda-io/zig-js-runtime#development) project.

View File

@@ -98,8 +98,8 @@ pub fn build(b: *std.Build) !void {
// compile
const tests = b.addTest(.{
.root_source_file = b.path("src/main_tests.zig"),
.test_runner = b.path("src/main_tests.zig"),
.root_source_file = b.path("src/run_tests.zig"),
.test_runner = b.path("src/test_runner.zig"),
.target = target,
.optimize = mode,
});
@@ -119,27 +119,6 @@ pub fn build(b: *std.Build) !void {
const test_step = b.step("test", "Run unit tests");
test_step.dependOn(&run_tests.step);
// unittest
// ----
// compile
const unit_tests = b.addTest(.{
.root_source_file = b.path("src/unit_tests.zig"),
.test_runner = b.path("src/unit_tests.zig"),
.target = target,
.optimize = mode,
});
try common(b, unit_tests, options);
const run_unit_tests = b.addRunArtifact(unit_tests);
if (b.args) |args| {
run_unit_tests.addArgs(args);
}
// step
const unit_test_step = b.step("unittest", "Run unit tests");
unit_test_step.dependOn(&run_unit_tests.step);
// wpt
// -----
@@ -189,6 +168,11 @@ fn common(
.root_source_file = b.path("vendor/tls.zig/src/main.zig"),
});
step.root_module.addImport("tls", tlsmod);
const wsmod = b.addModule("websocket", .{
.root_source_file = b.path("vendor/websocket.zig/src/websocket.zig"),
});
step.root_module.addImport("websocket", wsmod);
}
fn moduleNetSurf(b: *std.Build, target: std.Build.ResolvedTarget) !*std.Build.Module {

View File

@@ -27,7 +27,6 @@ const XHR = @import("xhr/xhr.zig");
const Storage = @import("storage/storage.zig");
const URL = @import("url/url.zig");
const Iterators = @import("iterator/iterator.zig");
const XMLSerializer = @import("xmlserializer/xmlserializer.zig");
pub const HTMLDocument = @import("html/document.zig").HTMLDocument;
@@ -41,7 +40,6 @@ pub const Interfaces = generate.Tuple(.{
Storage.Interfaces,
URL.Interfaces,
Iterators.Interfaces,
XMLSerializer.Interfaces,
}){};
});
pub const UserContext = @import("user_context.zig").UserContext;

View File

@@ -24,7 +24,7 @@ const Types = @import("root").Types;
const parser = @import("netsurf");
const Loader = @import("loader.zig").Loader;
const Dump = @import("dump.zig");
const Mime = @import("mime.zig").Mime;
const Mime = @import("mime.zig");
const jsruntime = @import("jsruntime");
const Loop = jsruntime.Loop;
@@ -36,9 +36,6 @@ const apiweb = @import("../apiweb.zig");
const Window = @import("../html/window.zig").Window;
const Walker = @import("../dom/walker.zig").WalkerDepthFirst;
const URL = @import("../url/url.zig").URL;
const Location = @import("../html/location.zig").Location;
const storage = @import("../storage/storage.zig");
const FetchResult = @import("../http/Client.zig").Client.FetchResult;
@@ -82,12 +79,6 @@ pub const Browser = struct {
self.session.deinit();
try Session.init(&self.session, alloc, loop, uri);
}
pub fn currentPage(self: *Browser) ?*Page {
if (self.session.page == null) return null;
return &self.session.page.?;
}
};
// Session is like a browser's tab.
@@ -111,9 +102,7 @@ pub const Session = struct {
loader: Loader,
env: Env = undefined,
inspector: ?jsruntime.Inspector = null,
window: Window,
// TODO move the shed to the browser?
storageShed: storage.Shed,
page: ?Page = null,
@@ -153,7 +142,7 @@ pub const Session = struct {
}
fn deinit(self: *Session) void {
if (self.page) |*p| p.deinit();
if (self.page) |*p| p.end();
if (self.inspector) |inspector| {
inspector.deinit(self.alloc);
@@ -212,10 +201,6 @@ pub const Page = struct {
uri: std.Uri = undefined,
origin: ?[]const u8 = null,
// html url and location
url: ?URL = null,
location: Location = .{},
raw_data: ?[]const u8 = null,
fn init(
@@ -259,14 +244,6 @@ pub const Page = struct {
self.session.env.stop();
// TODO unload document: https://html.spec.whatwg.org/#unloading-documents
if (self.url) |*u| u.deinit(self.arena.allocator());
self.url = null;
self.location.url = null;
self.session.window.replaceLocation(&self.location) catch |e| {
log.err("reset window location: {any}", .{e});
};
self.doc = null;
// clear netsurf memory arena.
parser.deinit();
@@ -274,7 +251,6 @@ pub const Page = struct {
}
pub fn deinit(self: *Page) void {
self.end();
self.arena.deinit();
self.session.page = null;
}
@@ -332,11 +308,6 @@ pub const Page = struct {
self.rawuri = try alloc.dupe(u8, uri);
self.uri = std.Uri.parse(self.rawuri.?) catch try std.Uri.parseAfterScheme("", self.rawuri.?);
if (self.url) |*prev| prev.deinit(alloc);
self.url = try URL.constructor(alloc, self.rawuri.?, null);
self.location.url = &self.url.?;
try self.session.window.replaceLocation(&self.location);
// prepare origin value.
var buf = std.ArrayList(u8).init(alloc);
defer buf.deinit();
@@ -383,10 +354,8 @@ pub const Page = struct {
defer alloc.free(ct.?);
log.debug("header content-type: {s}", .{ct.?});
var mime = try Mime.parse(alloc, ct.?);
defer mime.deinit();
if (mime.isHTML()) {
const mime = try Mime.parse(ct.?);
if (mime.eql(Mime.HTML)) {
try self.loadHTMLDoc(req.reader(), mime.charset orelse "utf-8", auxData);
} else {
log.info("non-HTML document: {s}", .{ct.?});
@@ -422,7 +391,7 @@ pub const Page = struct {
// TODO set the referrer to the document.
try self.session.window.replaceDocument(html_doc);
self.session.window.replaceDocument(html_doc);
self.session.window.setStorageShelf(
try self.session.storageShed.getOrPut(self.origin orelse "null"),
);

View File

@@ -25,87 +25,82 @@ const Walker = @import("../dom/walker.zig").WalkerChildren;
// writer must be a std.io.Writer
pub fn writeHTML(doc: *parser.Document, writer: anytype) !void {
try writer.writeAll("<!DOCTYPE html>\n");
try writeChildren(parser.documentToNode(doc), writer);
try writeNode(parser.documentToNode(doc), writer);
try writer.writeAll("\n");
}
pub fn writeNode(node: *parser.Node, writer: anytype) anyerror!void {
switch (try parser.nodeType(node)) {
.element => {
// open the tag
const tag = try parser.nodeLocalName(node);
try writer.writeAll("<");
try writer.writeAll(tag);
// write the attributes
const map = try parser.nodeGetAttributes(node);
const ln = try parser.namedNodeMapGetLength(map);
var i: u32 = 0;
while (i < ln) {
const attr = try parser.namedNodeMapItem(map, i) orelse break;
try writer.writeAll(" ");
try writer.writeAll(try parser.attributeGetName(attr));
try writer.writeAll("=\"");
const attribute_value = try parser.attributeGetValue(attr) orelse "";
try writeEscapedAttributeValue(writer, attribute_value);
try writer.writeAll("\"");
i += 1;
}
try writer.writeAll(">");
// void elements can't have any content.
if (try isVoid(parser.nodeToElement(node))) return;
// write the children
// TODO avoid recursion
try writeChildren(node, writer);
// close the tag
try writer.writeAll("</");
try writer.writeAll(tag);
try writer.writeAll(">");
},
.text => {
const v = try parser.nodeValue(node) orelse return;
try writeEscapedTextNode(writer, v);
},
.cdata_section => {
const v = try parser.nodeValue(node) orelse return;
try writer.writeAll("<![CDATA[");
try writer.writeAll(v);
try writer.writeAll("]]>");
},
.comment => {
const v = try parser.nodeValue(node) orelse return;
try writer.writeAll("<!--");
try writer.writeAll(v);
try writer.writeAll("-->");
},
// TODO handle processing instruction dump
.processing_instruction => return,
// document fragment is outside of the main document DOM, so we
// don't output it.
.document_fragment => return,
// document will never be called, but required for completeness.
.document => return,
// done globally instead, but required for completeness.
.document_type => return,
// deprecated
.attribute => return,
.entity_reference => return,
.entity => return,
.notation => return,
}
}
// writer must be a std.io.Writer
pub fn writeChildren(root: *parser.Node, writer: anytype) !void {
pub fn writeNode(root: *parser.Node, writer: anytype) !void {
const walker = Walker{};
var next: ?*parser.Node = null;
while (true) {
next = try walker.get_next(root, next) orelse break;
try writeNode(next.?, writer);
switch (try parser.nodeType(next.?)) {
.element => {
// open the tag
const tag = try parser.nodeLocalName(next.?);
try writer.writeAll("<");
try writer.writeAll(tag);
// write the attributes
const map = try parser.nodeGetAttributes(next.?);
const ln = try parser.namedNodeMapGetLength(map);
var i: u32 = 0;
while (i < ln) {
const attr = try parser.namedNodeMapItem(map, i) orelse break;
try writer.writeAll(" ");
try writer.writeAll(try parser.attributeGetName(attr));
try writer.writeAll("=\"");
try writer.writeAll(try parser.attributeGetValue(attr) orelse "");
try writer.writeAll("\"");
i += 1;
}
try writer.writeAll(">");
// void elements can't have any content.
if (try isVoid(parser.nodeToElement(next.?))) continue;
// write the children
// TODO avoid recursion
try writeNode(next.?, writer);
// close the tag
try writer.writeAll("</");
try writer.writeAll(tag);
try writer.writeAll(">");
},
.text => {
const v = try parser.nodeValue(next.?) orelse continue;
try writer.writeAll(v);
},
.cdata_section => {
const v = try parser.nodeValue(next.?) orelse continue;
try writer.writeAll("<![CDATA[");
try writer.writeAll(v);
try writer.writeAll("]]>");
},
.comment => {
const v = try parser.nodeValue(next.?) orelse continue;
try writer.writeAll("<!--");
try writer.writeAll(v);
try writer.writeAll("-->");
},
// TODO handle processing instruction dump
.processing_instruction => continue,
// document fragment is outside of the main document DOM, so we
// don't output it.
.document_fragment => continue,
// document will never be called, but required for completeness.
.document => continue,
// done globally instead, but required for completeness.
.document_type => continue,
// deprecated
.attribute => continue,
.entity_reference => continue,
.entity => continue,
.notation => continue,
}
}
}
@@ -120,87 +115,18 @@ fn isVoid(elem: *parser.Element) !bool {
};
}
fn writeEscapedTextNode(writer: anytype, value: []const u8) !void {
var v = value;
while (v.len > 0) {
const index = std.mem.indexOfAnyPos(u8, v, 0, &.{ '&', '<', '>' }) orelse {
return writer.writeAll(v);
};
try writer.writeAll(v[0..index]);
switch (v[index]) {
'&' => try writer.writeAll("&amp;"),
'<' => try writer.writeAll("&lt;"),
'>' => try writer.writeAll("&gt;"),
else => unreachable,
}
v = v[index + 1 ..];
}
}
fn writeEscapedAttributeValue(writer: anytype, value: []const u8) !void {
var v = value;
while (v.len > 0) {
const index = std.mem.indexOfAnyPos(u8, v, 0, &.{ '&', '<', '>', '"' }) orelse {
return writer.writeAll(v);
};
try writer.writeAll(v[0..index]);
switch (v[index]) {
'&' => try writer.writeAll("&amp;"),
'<' => try writer.writeAll("&lt;"),
'>' => try writer.writeAll("&gt;"),
'"' => try writer.writeAll("&quot;"),
else => unreachable,
}
v = v[index + 1 ..];
}
}
const testing = std.testing;
test "dump.writeHTML" {
try testWriteHTML(
"<div id=\"content\">Over 9000!</div>",
"<div id=\"content\">Over 9000!</div>",
);
const out = try std.fs.openFileAbsolute("/dev/null", .{ .mode = .write_only });
defer out.close();
try testWriteHTML(
"<root><!-- a comment --></root>",
"<root><!-- a comment --></root>",
);
const file = try std.fs.cwd().openFile("test.html", .{});
defer file.close();
try testWriteHTML(
"<p>&lt; &gt; &amp;</p>",
"<p>&lt; &gt; &amp;</p>",
);
try testWriteHTML(
"<p id=\"&quot;&gt;&lt;&amp;&quot;''\">wat?</p>",
"<p id='\">&lt;&amp;&quot;&#39;&apos;'>wat?</p>",
);
try testWriteFullHTML(
\\<!DOCTYPE html>
\\<html><head><title>It's over what?</title><meta name="a" value="b">
\\</head><body>9000</body></html>
\\
, "<html><title>It's over what?</title><meta name=a value=\"b\">\n<body>9000");
}
fn testWriteHTML(comptime expected_body: []const u8, src: []const u8) !void {
const expected =
"<!DOCTYPE html>\n<html><head></head><body>" ++
expected_body ++
"</body></html>\n";
return testWriteFullHTML(expected, src);
}
fn testWriteFullHTML(comptime expected: []const u8, src: []const u8) !void {
var buf = std.ArrayListUnmanaged(u8){};
defer buf.deinit(testing.allocator);
const doc_html = try parser.documentHTMLParseFromStr(src);
const doc_html = try parser.documentHTMLParse(file.reader(), "UTF-8");
// ignore close error
defer parser.documentHTMLClose(doc_html) catch {};
const doc = parser.documentHTMLToDocument(doc_html);
try writeHTML(doc, buf.writer(testing.allocator));
try testing.expectEqualStrings(expected, buf.items);
try writeHTML(doc, out);
}

View File

@@ -79,19 +79,13 @@ pub const Loader = struct {
}
};
test "loader: get" {
test "basic url get" {
const alloc = std.testing.allocator;
var loader = Loader.init(alloc);
defer loader.deinit();
const uri = try std.Uri.parse("http://localhost:9582/loader");
var result = try loader.get(alloc, uri);
var result = try loader.get(alloc, "https://en.wikipedia.org/wiki/Main_Page");
defer result.deinit();
try std.testing.expectEqual(.ok, result.req.response.status);
var res: [128]u8 = undefined;
const size = try result.req.readAll(&res);
try std.testing.expectEqual(6, size);
try std.testing.expectEqualStrings("Hello!", res[0..6]);
try std.testing.expect(result.req.response.status == std.http.Status.ok);
}

View File

@@ -17,375 +17,141 @@
// along with this program. If not, see <https://www.gnu.org/licenses/>.
const std = @import("std");
const Allocator = std.mem.Allocator;
pub const Mime = struct {
content_type: ContentType,
params: []const u8 = "",
charset: ?[]const u8 = null,
arena: std.heap.ArenaAllocator,
pub const ContentTypeEnum = enum {
text_xml,
text_html,
text_plain,
other,
};
pub const ContentType = union(ContentTypeEnum) {
text_xml: void,
text_html: void,
text_plain: void,
other: struct { type: []const u8, sub_type: []const u8 },
};
pub fn parse(allocator: Allocator, input: []const u8) !Mime {
if (input.len > 255) {
return error.TooBig;
}
var arena = std.heap.ArenaAllocator.init(allocator);
errdefer arena.deinit();
var trimmed = trim(input);
const content_type, const type_len = try parseContentType(trimmed);
if (type_len >= trimmed.len) {
return .{ .arena = arena, .content_type = content_type };
}
const params = trimLeft(trimmed[type_len..]);
var charset: ?[]const u8 = null;
var it = std.mem.splitScalar(u8, params, ';');
while (it.next()) |attr| {
const i = std.mem.indexOfScalarPos(u8, attr, 0, '=') orelse return error.Invalid;
const name = trimLeft(attr[0..i]);
const value = trimRight(attr[i + 1 ..]);
if (value.len == 0) {
return error.Invalid;
}
switch (name.len) {
7 => if (isCaseEqual("charset", name)) {
charset = try parseValue(arena.allocator(), value);
},
else => {},
}
}
return .{
.arena = arena,
.params = params,
.charset = charset,
.content_type = content_type,
};
}
pub fn deinit(self: *Mime) void {
self.arena.deinit();
}
pub fn isHTML(self: *const Mime) bool {
return self.content_type == .text_html;
}
fn parseContentType(value: []const u8) !struct { ContentType, usize } {
const separator = std.mem.indexOfScalarPos(u8, value, 0, '/') orelse {
return error.Invalid;
};
const end = std.mem.indexOfScalarPos(u8, value, separator, ';') orelse blk: {
break :blk value.len;
};
const main_type = value[0..separator];
const sub_type = trimRight(value[separator + 1 .. end]);
if (parseCommonContentType(main_type, sub_type)) |content_type| {
return .{ content_type, end + 1 };
}
if (main_type.len == 0) {
return error.Invalid;
}
if (validType(main_type) == false) {
return error.Invalid;
}
if (sub_type.len == 0) {
return error.Invalid;
}
if (validType(sub_type) == false) {
return error.Invalid;
}
const content_type = ContentType{ .other = .{
.type = main_type,
.sub_type = sub_type,
} };
return .{ content_type, end + 1 };
}
fn parseCommonContentType(main_type: []const u8, sub_type: []const u8) ?ContentType {
switch (main_type.len) {
4 => if (isCaseEqual("text", main_type)) {
switch (sub_type.len) {
3 => if (isCaseEqual("xml", sub_type)) {
return .{ .text_xml = {} };
},
4 => if (isCaseEqual("html", sub_type)) {
return .{ .text_html = {} };
},
5 => if (isCaseEqual("plain", sub_type)) {
return .{ .text_plain = {} };
},
else => {},
}
},
else => {},
}
return null;
}
const T_SPECIAL = blk: {
var v = [_]bool{false} ** 256;
for ("()<>@,;:\\\"/[]?=") |b| {
v[b] = true;
}
break :blk v;
};
fn parseValue(allocator: Allocator, value: []const u8) ![]const u8 {
if (value[0] != '"') {
return value;
}
// 1 to skip the opening quote
var value_pos: usize = 1;
var unescaped_len: usize = 0;
const last = value.len - 1;
while (value_pos < value.len) {
switch (value[value_pos]) {
'"' => break,
'\\' => {
if (value_pos == last) {
return error.Invalid;
}
const next = value[value_pos + 1];
if (T_SPECIAL[next] == false) {
return error.Invalid;
}
value_pos += 2;
},
else => value_pos += 1,
}
unescaped_len += 1;
}
if (unescaped_len == 0) {
return error.Invalid;
}
value_pos = 1;
const owned = try allocator.alloc(u8, unescaped_len);
for (0..unescaped_len) |i| {
switch (value[value_pos]) {
'"' => break,
'\\' => {
owned[i] = value[value_pos + 1];
value_pos += 2;
},
else => |c| {
owned[i] = c;
value_pos += 1;
},
}
}
return owned;
}
const VALID_CODEPOINTS = blk: {
var v: [256]bool = undefined;
for (0..256) |i| {
v[i] = std.ascii.isAlphanumeric(i);
}
for ("!#$%&\\*+-.^'_`|~") |b| {
v[b] = true;
}
break :blk v;
};
fn validType(value: []const u8) bool {
for (value) |b| {
if (VALID_CODEPOINTS[b] == false) {
return false;
}
}
return true;
}
fn trim(s: []const u8) []const u8 {
return std.mem.trim(u8, s, &std.ascii.whitespace);
}
fn trimLeft(s: []const u8) []const u8 {
return std.mem.trimLeft(u8, s, &std.ascii.whitespace);
}
fn trimRight(s: []const u8) []const u8 {
return std.mem.trimRight(u8, s, &std.ascii.whitespace);
}
fn isCaseEqual(comptime target: anytype, value: []const u8) bool {
// - 8 beause we don't care about the sentinel
const bit_len = @bitSizeOf(@TypeOf(target.*)) - 8;
const byte_len = bit_len / 8;
const T = @Type(.{ .Int = .{
.bits = bit_len,
.signedness = .unsigned,
} });
const bit_target: T = @bitCast(@as(*const [byte_len]u8, target).*);
if (@as(T, @bitCast(value[0..byte_len].*)) == bit_target) {
return true;
}
return std.ascii.eqlIgnoreCase(value, target);
}
};
const testing = std.testing;
test "Mime: invalid " {
const invalids = [_][]const u8{
"",
"text",
"text /html",
"text/ html",
"text / html",
"text/html other",
"text/html; x",
"text/html; x=",
"text/html; x= ",
"text/html; = ",
"text/html;=",
"text/html; charset=\"\"",
"text/html; charset=\"",
"text/html; charset=\"\\",
"text/html; charset=\"\\a\"", // invalid to escape non special characters
};
for (invalids) |invalid| {
try testing.expectError(error.Invalid, Mime.parse(undefined, invalid));
}
}
const strparser = @import("../str/parser.zig");
const Reader = strparser.Reader;
const trim = strparser.trim;
test "Mime: parse common" {
try expect(.{ .content_type = .{ .text_xml = {} } }, "text/xml");
try expect(.{ .content_type = .{ .text_html = {} } }, "text/html");
try expect(.{ .content_type = .{ .text_plain = {} } }, "text/plain");
const Self = @This();
try expect(.{ .content_type = .{ .text_xml = {} } }, "text/xml;");
try expect(.{ .content_type = .{ .text_html = {} } }, "text/html;");
try expect(.{ .content_type = .{ .text_plain = {} } }, "text/plain;");
try expect(.{ .content_type = .{ .text_xml = {} } }, " \ttext/xml");
try expect(.{ .content_type = .{ .text_html = {} } }, "text/html ");
try expect(.{ .content_type = .{ .text_plain = {} } }, "text/plain \t\t");
try expect(.{ .content_type = .{ .text_xml = {} } }, "TEXT/xml");
try expect(.{ .content_type = .{ .text_html = {} } }, "text/Html");
try expect(.{ .content_type = .{ .text_plain = {} } }, "TEXT/PLAIN");
try expect(.{ .content_type = .{ .text_xml = {} } }, " TeXT/xml");
try expect(.{ .content_type = .{ .text_html = {} } }, "teXt/HtML ;");
try expect(.{ .content_type = .{ .text_plain = {} } }, "tExT/PlAiN;");
}
test "Mime: parse uncommon" {
const text_javascript = Expectation{
.content_type = .{ .other = .{ .type = "text", .sub_type = "javascript" } },
};
try expect(text_javascript, "text/javascript");
try expect(text_javascript, "text/javascript;");
try expect(text_javascript, " text/javascript\t ");
try expect(text_javascript, " text/javascript\t ;");
try expect(
.{ .content_type = .{ .other = .{ .type = "Text", .sub_type = "Javascript" } } },
"Text/Javascript",
);
}
test "Mime: parse charset" {
try expect(.{
.content_type = .{ .text_xml = {} },
.charset = "utf-8",
.params = "charset=utf-8",
}, "text/xml; charset=utf-8");
try expect(.{
.content_type = .{ .text_xml = {} },
.charset = "utf-8",
.params = "charset=\"utf-8\"",
}, "text/xml;charset=\"utf-8\"");
try expect(.{
.content_type = .{ .text_xml = {} },
.charset = "\\ \" ",
.params = "charset=\"\\\\ \\\" \"",
}, "text/xml;charset=\"\\\\ \\\" \" ");
}
test "Mime: isHTML" {
const isHTML = struct {
fn isHTML(expected: bool, input: []const u8) !void {
var mime = try Mime.parse(testing.allocator, input);
defer mime.deinit();
try testing.expectEqual(expected, mime.isHTML());
}
}.isHTML;
try isHTML(true, "text/html");
try isHTML(true, "text/html;");
try isHTML(true, "text/html; charset=utf-8");
try isHTML(false, "text/htm"); // htm not html
try isHTML(false, "text/plain");
try isHTML(false, "over/9000");
}
const Expectation = struct {
content_type: Mime.ContentType,
params: []const u8 = "",
charset: ?[]const u8 = null,
const MimeError = error{
Empty,
TooBig,
Invalid,
InvalidChar,
};
fn expect(expected: Expectation, input: []const u8) !void {
var actual = try Mime.parse(testing.allocator, input);
defer actual.deinit();
mtype: []const u8,
msubtype: []const u8,
params: []const u8 = "",
try testing.expectEqual(
std.meta.activeTag(expected.content_type),
std.meta.activeTag(actual.content_type),
);
charset: ?[]const u8 = null,
boundary: ?[]const u8 = null,
switch (expected.content_type) {
.other => |e| {
const a = actual.content_type.other;
try testing.expectEqualStrings(e.type, a.type);
try testing.expectEqualStrings(e.sub_type, a.sub_type);
},
else => {}, // already asserted above
pub const Empty = Self{ .mtype = "", .msubtype = "" };
pub const HTML = Self{ .mtype = "text", .msubtype = "html" };
pub const Javascript = Self{ .mtype = "application", .msubtype = "javascript" };
// https://mimesniff.spec.whatwg.org/#http-token-code-point
fn isHTTPCodePoint(c: u8) bool {
return switch (c) {
'!', '#', '$', '%', '&', '\'', '*', '+', '-', '.', '^' => return true,
'_', '`', '|', '~' => return true,
else => std.ascii.isAlphanumeric(c),
};
}
fn valid(s: []const u8) bool {
const ln = s.len;
var i: usize = 0;
while (i < ln) {
if (!isHTTPCodePoint(s[i])) return false;
i += 1;
}
return true;
}
// https://mimesniff.spec.whatwg.org/#parsing-a-mime-type
pub fn parse(s: []const u8) Self.MimeError!Self {
const ln = s.len;
if (ln == 0) return MimeError.Empty;
// limit input size
if (ln > 255) return MimeError.TooBig;
var res = Self{ .mtype = "", .msubtype = "" };
var r = Reader{ .s = s };
res.mtype = trim(r.until('/'));
if (res.mtype.len == 0) return MimeError.Invalid;
if (!valid(res.mtype)) return MimeError.InvalidChar;
if (!r.skip()) return MimeError.Invalid;
res.msubtype = trim(r.until(';'));
if (res.msubtype.len == 0) return MimeError.Invalid;
if (!valid(res.msubtype)) return MimeError.InvalidChar;
if (!r.skip()) return res;
res.params = trim(r.tail());
if (res.params.len == 0) return MimeError.Invalid;
// parse well known parameters.
// don't check invalid parameter format.
var rp = Reader{ .s = res.params };
while (true) {
const name = trim(rp.until('='));
if (!rp.skip()) return res;
const value = trim(rp.until(';'));
if (std.ascii.eqlIgnoreCase(name, "charset")) {
res.charset = value;
}
if (std.ascii.eqlIgnoreCase(name, "boundary")) {
res.boundary = value;
}
if (!rp.skip()) return res;
}
try testing.expectEqualStrings(expected.params, actual.params);
return res;
}
if (expected.charset) |ec| {
try testing.expectEqualStrings(ec, actual.charset.?);
} else {
try testing.expectEqual(null, actual.charset);
test "parse valid" {
for ([_][]const u8{
"text/html",
" \ttext/html",
"text \t/html",
"text/ \thtml",
"text/html \t",
}) |tc| {
const m = try Self.parse(tc);
try testing.expectEqualStrings("text", m.mtype);
try testing.expectEqualStrings("html", m.msubtype);
}
const m2 = try Self.parse("text/javascript1.5");
try testing.expectEqualStrings("text", m2.mtype);
try testing.expectEqualStrings("javascript1.5", m2.msubtype);
const m3 = try Self.parse("text/html; charset=utf-8");
try testing.expectEqualStrings("text", m3.mtype);
try testing.expectEqualStrings("html", m3.msubtype);
try testing.expectEqualStrings("charset=utf-8", m3.params);
try testing.expectEqualStrings("utf-8", m3.charset.?);
const m4 = try Self.parse("text/html; boundary=----");
try testing.expectEqualStrings("text", m4.mtype);
try testing.expectEqualStrings("html", m4.msubtype);
try testing.expectEqualStrings("boundary=----", m4.params);
try testing.expectEqualStrings("----", m4.boundary.?);
}
test "parse invalid" {
for ([_][]const u8{
"",
"te xt/html;",
"te@xt/html;",
"text/ht@ml;",
"text/html;",
"/text/html",
"/html",
}) |tc| {
_ = Self.parse(tc) catch continue;
try testing.expect(false);
}
}
// Compare type and subtype.
pub fn eql(self: Self, b: Self) bool {
if (!std.mem.eql(u8, self.mtype, b.mtype)) return false;
return std.mem.eql(u8, self.msubtype, b.msubtype);
}

View File

@@ -34,7 +34,6 @@ const IncomingMessage = @import("msg.zig").IncomingMessage;
const Input = @import("msg.zig").Input;
const inspector = @import("inspector.zig").inspector;
const dom = @import("dom.zig").dom;
const cdpdom = @import("dom.zig");
const css = @import("css.zig").css;
const security = @import("security.zig").security;
@@ -122,7 +121,7 @@ pub fn dispatch(
pub const State = struct {
executionContextId: u32 = 0,
contextID: ?[]const u8 = null,
sessionID: SessionID = .CONTEXTSESSIONID0497A05C95417CF4,
sessionID: ?[]const u8 = null,
frameID: []const u8 = FrameID,
url: []const u8 = URLBase,
securityOrigin: []const u8 = URLBase,
@@ -130,33 +129,6 @@ pub const State = struct {
loaderID: []const u8 = LoaderID,
page_life_cycle_events: bool = false, // TODO; Target based value
// DOM
nodelist: cdpdom.NodeList,
nodesearchlist: cdpdom.NodeSearchList,
pub fn init(alloc: std.mem.Allocator) State {
return .{
.nodelist = cdpdom.NodeList.init(alloc),
.nodesearchlist = cdpdom.NodeSearchList.init(alloc),
};
}
pub fn deinit(self: *State) void {
self.nodelist.deinit();
// deinit all node searches.
for (self.nodesearchlist.items) |*s| s.deinit();
self.nodesearchlist.deinit();
}
pub fn reset(self: *State) void {
self.nodelist.reset();
// deinit all node searches.
for (self.nodesearchlist.items) |*s| s.deinit();
self.nodesearchlist.clearAndFree();
}
};
// Utils
@@ -234,9 +206,6 @@ pub fn sendEvent(
params: T,
sessionID: ?[]const u8,
) !void {
// some clients like chromedp expects empty parameters structs.
if (T == void) @compileError("sendEvent: use struct{} instead of void for empty parameters");
log_cdp.debug("Event > method {s}, sessionID {?s}", .{ name, sessionID });
const Resp = struct {
method: []const u8,
@@ -253,21 +222,8 @@ pub fn sendEvent(
// ------
// TODO: hard coded IDs
pub const SessionID = enum {
BROWSERSESSIONID597D9875C664CAC0,
CONTEXTSESSIONID0497A05C95417CF4,
pub fn parse(str: []const u8) !SessionID {
inline for (@typeInfo(SessionID).Enum.fields) |enumField| {
if (std.mem.eql(u8, str, enumField.name)) {
return @field(SessionID, enumField.name);
}
}
return error.InvalidSessionID;
}
};
pub const BrowserSessionID = @tagName(SessionID.BROWSERSESSIONID597D9875C664CAC0);
pub const ContextSessionID = @tagName(SessionID.CONTEXTSESSIONID0497A05C95417CF4);
pub const BrowserSessionID = "BROWSERSESSIONID597D9875C664CAC0";
pub const ContextSessionID = "CONTEXTSESSIONID0497A05C95417CF4";
pub const URLBase = "chrome://newtab/";
pub const LoaderID = "LOADERID24DD2FD56CF1EF33C965C79C";
pub const FrameID = "FRAMEIDD8AED408A0467AC93100BCDBE";

View File

@@ -24,18 +24,11 @@ const cdp = @import("cdp.zig");
const result = cdp.result;
const IncomingMessage = @import("msg.zig").IncomingMessage;
const Input = @import("msg.zig").Input;
const css = @import("../dom/css.zig");
const parser = @import("netsurf");
const log = std.log.scoped(.cdp);
const Methods = enum {
enable,
getDocument,
performSearch,
getSearchResults,
discardSearchResults,
};
pub fn dom(
@@ -49,10 +42,6 @@ pub fn dom(
return switch (method) {
.enable => enable(alloc, msg, ctx),
.getDocument => getDocument(alloc, msg, ctx),
.performSearch => performSearch(alloc, msg, ctx),
.getSearchResults => getSearchResults(alloc, msg, ctx),
.discardSearchResults => discardSearchResults(alloc, msg, ctx),
};
}
@@ -68,275 +57,3 @@ fn enable(
return result(alloc, input.id, null, null, input.sessionId);
}
// NodeList references tree nodes with an array id.
pub const NodeList = struct {
coll: List,
const List = std.ArrayList(*parser.Node);
pub fn init(alloc: std.mem.Allocator) NodeList {
return .{
.coll = List.init(alloc),
};
}
pub fn deinit(self: *NodeList) void {
self.coll.deinit();
}
pub fn reset(self: *NodeList) void {
self.coll.clearAndFree();
}
pub fn set(self: *NodeList, node: *parser.Node) !NodeId {
for (self.coll.items, 0..) |n, i| {
if (n == node) return @intCast(i);
}
try self.coll.append(node);
return @intCast(self.coll.items.len);
}
};
const NodeId = u32;
const Node = struct {
nodeId: NodeId,
parentId: ?NodeId = null,
backendNodeId: NodeId,
nodeType: u32,
nodeName: []const u8 = "",
localName: []const u8 = "",
nodeValue: []const u8 = "",
childNodeCount: ?u32 = null,
children: ?[]const Node = null,
documentURL: ?[]const u8 = null,
baseURL: ?[]const u8 = null,
xmlVersion: []const u8 = "",
compatibilityMode: []const u8 = "NoQuirksMode",
isScrollable: bool = false,
fn init(n: *parser.Node, nlist: *NodeList) !Node {
const id = try nlist.set(n);
return .{
.nodeId = id,
.backendNodeId = id,
.nodeType = @intFromEnum(try parser.nodeType(n)),
.nodeName = try parser.nodeName(n),
.localName = try parser.nodeLocalName(n),
.nodeValue = try parser.nodeValue(n) orelse "",
};
}
fn initChildren(
self: *Node,
alloc: std.mem.Allocator,
n: *parser.Node,
nlist: *NodeList,
) !std.ArrayList(Node) {
const children = try parser.nodeGetChildNodes(n);
const ln = try parser.nodeListLength(children);
self.childNodeCount = ln;
var list = try std.ArrayList(Node).initCapacity(alloc, ln);
var i: u32 = 0;
while (i < ln) {
defer i += 1;
const child = try parser.nodeListItem(children, i) orelse continue;
try list.append(try Node.init(child, nlist));
}
self.children = list.items;
return list;
}
};
// https://chromedevtools.github.io/devtools-protocol/tot/DOM/#method-getDocument
fn getDocument(
alloc: std.mem.Allocator,
msg: *IncomingMessage,
ctx: *Ctx,
) ![]const u8 {
// input
const Params = struct {
depth: ?u32 = null,
pierce: ?bool = null,
};
const input = try Input(Params).get(alloc, msg);
defer input.deinit();
std.debug.assert(input.sessionId != null);
log.debug("Req > id {d}, method {s}", .{ input.id, "DOM.getDocument" });
// retrieve the root node
const page = ctx.browser.currentPage() orelse return error.NoPage;
if (page.doc == null) return error.NoDocument;
const node = parser.documentToNode(page.doc.?);
var n = try Node.init(node, &ctx.state.nodelist);
var list = try n.initChildren(alloc, node, &ctx.state.nodelist);
defer list.deinit();
// output
const Resp = struct {
root: Node,
};
const resp: Resp = .{
.root = n,
};
const res = try result(alloc, input.id, Resp, resp, input.sessionId);
try ctx.send(res);
return "";
}
pub const NodeSearch = struct {
coll: List,
name: []u8,
alloc: std.mem.Allocator,
var count: u8 = 0;
const List = std.ArrayListUnmanaged(NodeId);
pub fn initCapacity(alloc: std.mem.Allocator, ln: usize) !NodeSearch {
count += 1;
return .{
.alloc = alloc,
.coll = try List.initCapacity(alloc, ln),
.name = try std.fmt.allocPrint(alloc, "{d}", .{count}),
};
}
pub fn deinit(self: *NodeSearch) void {
self.coll.deinit(self.alloc);
self.alloc.free(self.name);
}
pub fn append(self: *NodeSearch, id: NodeId) !void {
try self.coll.append(self.alloc, id);
}
};
pub const NodeSearchList = std.ArrayList(NodeSearch);
// https://chromedevtools.github.io/devtools-protocol/tot/DOM/#method-performSearch
fn performSearch(
alloc: std.mem.Allocator,
msg: *IncomingMessage,
ctx: *Ctx,
) ![]const u8 {
// input
const Params = struct {
query: []const u8,
includeUserAgentShadowDOM: ?bool = null,
};
const input = try Input(Params).get(alloc, msg);
defer input.deinit();
std.debug.assert(input.sessionId != null);
log.debug("Req > id {d}, method {s}", .{ input.id, "DOM.performSearch" });
// retrieve the root node
const page = ctx.browser.currentPage() orelse return error.NoPage;
if (page.doc == null) return error.NoDocument;
const list = try css.querySelectorAll(alloc, parser.documentToNode(page.doc.?), input.params.query);
const ln = list.nodes.items.len;
var ns = try NodeSearch.initCapacity(alloc, ln);
for (list.nodes.items) |n| {
const id = try ctx.state.nodelist.set(n);
try ns.append(id);
}
try ctx.state.nodesearchlist.append(ns);
// output
const Resp = struct {
searchId: []const u8,
resultCount: u32,
};
const resp: Resp = .{
.searchId = ns.name,
.resultCount = @intCast(ln),
};
return result(alloc, input.id, Resp, resp, input.sessionId);
}
// https://chromedevtools.github.io/devtools-protocol/tot/DOM/#method-discardSearchResults
fn discardSearchResults(
alloc: std.mem.Allocator,
msg: *IncomingMessage,
ctx: *Ctx,
) ![]const u8 {
// input
const Params = struct {
searchId: []const u8,
};
const input = try Input(Params).get(alloc, msg);
defer input.deinit();
std.debug.assert(input.sessionId != null);
log.debug("Req > id {d}, method {s}", .{ input.id, "DOM.discardSearchResults" });
// retrieve the search from context
for (ctx.state.nodesearchlist.items, 0..) |*s, i| {
if (!std.mem.eql(u8, s.name, input.params.searchId)) continue;
s.deinit();
_ = ctx.state.nodesearchlist.swapRemove(i);
break;
}
return result(alloc, input.id, null, null, input.sessionId);
}
// https://chromedevtools.github.io/devtools-protocol/tot/DOM/#method-getSearchResults
fn getSearchResults(
alloc: std.mem.Allocator,
msg: *IncomingMessage,
ctx: *Ctx,
) ![]const u8 {
// input
const Params = struct {
searchId: []const u8,
fromIndex: u32,
toIndex: u32,
};
const input = try Input(Params).get(alloc, msg);
defer input.deinit();
std.debug.assert(input.sessionId != null);
log.debug("Req > id {d}, method {s}", .{ input.id, "DOM.getSearchResults" });
if (input.params.fromIndex >= input.params.toIndex) return error.BadIndices;
// retrieve the search from context
var ns: ?*const NodeSearch = undefined;
for (ctx.state.nodesearchlist.items) |s| {
if (!std.mem.eql(u8, s.name, input.params.searchId)) continue;
ns = &s;
break;
}
if (ns == null) return error.searchResultNotFound;
const items = ns.?.coll.items;
if (input.params.fromIndex >= items.len) return error.BadFromIndex;
if (input.params.toIndex > items.len) return error.BadToIndex;
// output
const Resp = struct {
nodeIds: []NodeId,
};
const resp: Resp = .{
.nodeIds = ns.?.coll.items[input.params.fromIndex..input.params.toIndex],
};
return result(alloc, input.id, Resp, resp, input.sessionId);
}

View File

@@ -259,7 +259,6 @@ fn navigate(
log.debug("Req > id {d}, method {s}", .{ input.id, "page.navigate" });
// change state
ctx.state.reset();
ctx.state.url = input.params.url;
// TODO: hard coded ID
ctx.state.loaderID = "AF8667A203C5392DBE9AC290044AA4C2";
@@ -330,11 +329,11 @@ fn navigate(
// Send Runtime.executionContextsCleared event
// TODO: noop event, we have no env context at this point, is it necesarry?
try sendEvent(alloc, ctx, "Runtime.executionContextsCleared", struct {}, .{}, input.sessionId);
try sendEvent(alloc, ctx, "Runtime.executionContextsCleared", void, {}, input.sessionId);
// Launch navigate, the page must have been created by a
// target.createTarget.
var p = ctx.browser.currentPage() orelse return error.NoPage;
var p = ctx.browser.session.page orelse return error.NoPage;
ctx.state.executionContextId += 1;
const auxData = try std.fmt.allocPrint(
alloc,
@@ -362,16 +361,6 @@ fn navigate(
);
}
// DOM.documentUpdated
try sendEvent(
alloc,
ctx,
"DOM.documentUpdated",
struct {},
.{},
input.sessionId,
);
// frameNavigated event
const FrameNavigated = struct {
frame: Frame,

View File

@@ -117,12 +117,7 @@ fn sendInspector(
}
}
if (msg.sessionId) |s| {
ctx.state.sessionID = cdp.SessionID.parse(s) catch |err| {
log.err("parse sessionID: {s} {any}", .{ s, err });
return err;
};
}
ctx.state.sessionID = msg.sessionId;
// remove awaitPromise true params
// TODO: delete when Promise are correctly handled by zig-js-runtime
@@ -131,12 +126,12 @@ fn sendInspector(
const buf = try alloc.alloc(u8, msg.json.len + 1);
defer alloc.free(buf);
_ = std.mem.replace(u8, msg.json, "\"awaitPromise\":true", "\"awaitPromise\":false", buf);
try ctx.sendInspector(buf);
ctx.sendInspector(buf);
return "";
}
}
try ctx.sendInspector(msg.json);
ctx.sendInspector(msg.json);
if (msg.id == null) return "";

View File

@@ -344,16 +344,10 @@ fn createTarget(
ctx.state.securityOrigin = "://";
ctx.state.secureContextType = "InsecureScheme";
ctx.state.loaderID = LoaderID;
if (msg.sessionId) |s| {
ctx.state.sessionID = cdp.SessionID.parse(s) catch |err| {
log.err("parse sessionID: {s} {any}", .{ s, err });
return err;
};
}
ctx.state.sessionID = msg.sessionId;
// TODO stop the previous page instead?
if (ctx.browser.currentPage() != null) return error.pageAlreadyExists;
if (ctx.browser.session.page != null) return error.pageAlreadyExists;
// create the page
const p = try ctx.browser.session.createPage();
@@ -464,7 +458,7 @@ fn closeTarget(
null,
);
if (ctx.browser.currentPage()) |page| page.end();
if (ctx.browser.session.page != null) ctx.browser.session.page.?.end();
return "";
}

View File

@@ -21,6 +21,7 @@ const std = @import("std");
const jsruntime = @import("jsruntime");
const Case = jsruntime.test_utils.Case;
const checkCases = jsruntime.test_utils.checkCases;
const generate = @import("../generate.zig");
const parser = @import("netsurf");
@@ -31,12 +32,12 @@ const ProcessingInstruction = @import("processing_instruction.zig").ProcessingIn
const HTMLElem = @import("../html/elements.zig");
// CharacterData interfaces
pub const Interfaces = .{
pub const Interfaces = generate.Tuple(.{
Comment,
Text.Text,
Text.Interfaces,
ProcessingInstruction,
};
});
// CharacterData implementation
pub const CharacterData = struct {

View File

@@ -16,6 +16,8 @@
// You should have received a copy of the GNU Affero General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
const generate = @import("../generate.zig");
const DOMException = @import("exceptions.zig").DOMException;
const EventTarget = @import("event_target.zig").EventTarget;
const DOMImplementation = @import("implementation.zig").DOMImplementation;
@@ -25,7 +27,7 @@ const NodeList = @import("nodelist.zig");
const Nod = @import("node.zig");
const MutationObserver = @import("mutation_observer.zig");
pub const Interfaces = .{
pub const Interfaces = generate.Tuple(.{
DOMException,
EventTarget,
DOMImplementation,
@@ -35,4 +37,4 @@ pub const Interfaces = .{
Nod.Node,
Nod.Interfaces,
MutationObserver.Interfaces,
};
});

View File

@@ -26,7 +26,7 @@ const checkCases = jsruntime.test_utils.checkCases;
const Variadic = jsruntime.Variadic;
const collection = @import("html_collection.zig");
const dump = @import("../browser/dump.zig");
const writeNode = @import("../browser/dump.zig").writeNode;
const css = @import("css.zig");
const Node = @import("node.zig").Node;
@@ -102,17 +102,7 @@ pub const Element = struct {
var buf = std.ArrayList(u8).init(alloc);
defer buf.deinit();
try dump.writeChildren(parser.elementToNode(self), buf.writer());
// TODO express the caller owned the slice.
// https://github.com/lightpanda-io/jsruntime-lib/issues/195
return buf.toOwnedSlice();
}
pub fn get_outerHTML(self: *parser.Element, alloc: std.mem.Allocator) ![]const u8 {
var buf = std.ArrayList(u8).init(alloc);
defer buf.deinit();
try dump.writeNode(parser.elementToNode(self), buf.writer());
try writeNode(parser.elementToNode(self), buf.writer());
// TODO express the caller owned the slice.
// https://github.com/lightpanda-io/jsruntime-lib/issues/195
return buf.toOwnedSlice();
@@ -480,9 +470,4 @@ pub fn testExecFn(
.{ .src = "document.getElementById('para-empty').innerHTML.trim()", .ex = "<span id=\"para-empty-child\"></span>" },
};
try checkCases(js_env, &innerHTML);
var outerHTML = [_]Case{
.{ .src = "document.getElementById('para').outerHTML", .ex = "<p id=\"para\"> And</p>" },
};
try checkCases(js_env, &outerHTML);
}

View File

@@ -29,6 +29,7 @@ const EventHandler = @import("../events/event.zig").EventHandler;
const DOMException = @import("exceptions.zig").DOMException;
const Nod = @import("node.zig");
const Window = @import("../html/window.zig").Window;
// EventTarget interfaces
pub const Union = Nod.Union;
@@ -40,9 +41,10 @@ pub const EventTarget = struct {
pub const mem_guarantied = true;
pub fn toInterface(et: *parser.EventTarget) !Union {
// NOTE: for now we state that all EventTarget are Nodes
// TODO: handle other types (eg. Window)
return Nod.Node.toInterface(@as(*parser.Node, @ptrCast(et)));
return switch (try parser.eventTargetGetType(et)) {
.window => .{ .Window = @as(*Window, @ptrCast(et)) },
.node => Nod.Node.toInterface(@as(*parser.Node, @ptrCast(et))),
};
}
// JS funcs

View File

@@ -26,13 +26,15 @@ const CallbackResult = jsruntime.CallbackResult;
const Case = jsruntime.test_utils.Case;
const checkCases = jsruntime.test_utils.checkCases;
const generate = @import("../generate.zig");
const NodeList = @import("nodelist.zig").NodeList;
pub const Interfaces = .{
pub const Interfaces = generate.Tuple(.{
MutationObserver,
MutationRecord,
MutationRecords,
};
});
const Walker = @import("../dom/walker.zig").WalkerChildren;

View File

@@ -47,7 +47,7 @@ const HTML = @import("../html/html.zig");
const HTMLElem = @import("../html/elements.zig");
// Node interfaces
pub const Interfaces = .{
pub const Interfaces = generate.Tuple(.{
Attr,
CData.CharacterData,
CData.Interfaces,
@@ -57,10 +57,12 @@ pub const Interfaces = .{
DocumentFragment,
HTMLCollection,
HTMLCollectionIterator,
HTML.Interfaces,
};
pub const Union = generate.Union(Interfaces);
HTML.Interfaces,
});
const Generated = generate.Union.compile(Interfaces);
pub const Union = Generated._union;
pub const Tags = Generated._enum;
// Node implementation
pub const Node = struct {

View File

@@ -25,6 +25,7 @@ const Callback = jsruntime.Callback;
const CallbackResult = jsruntime.CallbackResult;
const Case = jsruntime.test_utils.Case;
const checkCases = jsruntime.test_utils.checkCases;
const generate = @import("../generate.zig");
const NodeUnion = @import("node.zig").Union;
const Node = @import("node.zig").Node;
@@ -35,10 +36,10 @@ const log = std.log.scoped(.nodelist);
const DOMException = @import("exceptions.zig").DOMException;
pub const Interfaces = .{
pub const Interfaces = generate.Tuple(.{
NodeListIterator,
NodeList,
};
});
pub const NodeListIterator = struct {
pub const mem_guarantied = true;

View File

@@ -21,6 +21,7 @@ const std = @import("std");
const jsruntime = @import("jsruntime");
const Case = jsruntime.test_utils.Case;
const checkCases = jsruntime.test_utils.checkCases;
const generate = @import("../generate.zig");
const parser = @import("netsurf");
@@ -30,9 +31,9 @@ const CDATASection = @import("cdata_section.zig").CDATASection;
const UserContext = @import("../user_context.zig").UserContext;
// Text interfaces
pub const Interfaces = .{
pub const Interfaces = generate.Tuple(.{
CDATASection,
};
});
pub const Text = struct {
pub const Self = parser.Text;

View File

@@ -37,12 +37,12 @@ const ProgressEvent = @import("../xhr/progress_event.zig").ProgressEvent;
const log = std.log.scoped(.events);
// Event interfaces
pub const Interfaces = .{
pub const Interfaces = generate.Tuple(.{
Event,
ProgressEvent,
};
pub const Union = generate.Union(Interfaces);
});
const Generated = generate.Union.compile(Interfaces);
pub const Union = Generated._union;
// https://dom.spec.whatwg.org/#event
pub const Event = struct {

View File

@@ -17,213 +17,430 @@
// along with this program. If not, see <https://www.gnu.org/licenses/>.
const std = @import("std");
const builtin = @import("builtin");
// ----
const Type = std.builtin.Type;
// Utils
// -----
fn itoa(comptime i: u8) ![]const u8 {
var len: usize = undefined;
if (i < 10) {
len = 1;
} else if (i < 100) {
len = 2;
} else if (i < 1000) {
len = 3;
} else {
return error.GenerateTooMuchMembers;
}
var buf: [len]u8 = undefined;
return try std.fmt.bufPrint(buf[0..], "{d}", .{i});
}
fn fmtName(comptime T: type) [:0]const u8 {
var it = std.mem.splitBackwards(u8, @typeName(T), ".");
return it.first() ++ "";
}
// Union
// -----
// Generate a flatten tagged Union from a Tuple
pub fn Union(interfaces: anytype) type {
// @setEvalBranchQuota(10000);
const tuple = Tuple(interfaces){};
const fields = std.meta.fields(@TypeOf(tuple));
// Generate a flatten tagged Union from various structs and union of structs
// TODO: make this function more generic
// TODO: dedup
pub const Union = struct {
_enum: type,
_union: type,
const tag_type = switch (fields.len) {
0 => unreachable,
1 => u0,
2 => u1,
3...4 => u2,
5...8 => u3,
9...16 => u4,
17...32 => u5,
33...64 => u6,
65...128 => u7,
129...256 => u8,
else => @compileError("Too many interfaces to generate union"),
};
// second iteration to generate tags
var enum_fields: [fields.len]Type.EnumField = undefined;
for (fields, 0..) |field, index| {
const member = @field(tuple, field.name);
const full_name = @typeName(member);
const separator = std.mem.lastIndexOfScalar(u8, full_name, '.') orelse unreachable;
const name = full_name[separator + 1 ..];
enum_fields[index] = .{
.name = name ++ "",
.value = index,
};
pub fn compile(comptime tuple: anytype) Union {
return private_compile(tuple) catch |err| @compileError(@errorName(err));
}
const enum_info = Type.Enum{
.tag_type = tag_type,
.fields = &enum_fields,
.decls = &.{},
.is_exhaustive = true,
};
const enum_T = @Type(.{ .Enum = enum_info });
fn private_compile(comptime tuple: anytype) !Union {
@setEvalBranchQuota(10000);
// third iteration to generate union type
var union_fields: [fields.len]Type.UnionField = undefined;
for (fields, enum_fields, 0..) |field, e, index| {
var FT = @field(tuple, field.name);
if (@hasDecl(FT, "Self")) {
FT = *(@field(FT, "Self"));
// check types provided
const tuple_T = @TypeOf(tuple);
const tuple_info = @typeInfo(tuple_T);
if (tuple_info != .Struct or !tuple_info.Struct.is_tuple) {
return error.GenerateArgNotTuple;
}
union_fields[index] = .{
.type = FT,
.name = e.name,
.alignment = @alignOf(FT),
const tuple_members = tuple_info.Struct.fields;
// first iteration to get the total number of members
var members_nb = 0;
for (tuple_members) |member| {
const member_T = @field(tuple, member.name);
const member_info = @typeInfo(member_T);
if (member_info == .Union) {
const member_union = member_info.Union;
members_nb += member_union.fields.len;
} else if (member_info == .Struct) {
members_nb += 1;
} else {
return error.GenerateMemberNotUnionOrStruct;
}
}
// define the tag type regarding the members nb
var tag_type: type = undefined;
if (members_nb < 3) {
tag_type = u1;
} else if (members_nb < 4) {
tag_type = u2;
} else if (members_nb < 8) {
tag_type = u3;
} else if (members_nb < 16) {
tag_type = u4;
} else if (members_nb < 32) {
tag_type = u5;
} else if (members_nb < 64) {
tag_type = u6;
} else if (members_nb < 128) {
tag_type = u7;
} else if (members_nb < 256) {
tag_type = u8;
} else if (members_nb < 65536) {
tag_type = u16;
} else {
return error.GenerateTooMuchMembers;
}
// second iteration to generate tags
var enum_fields: [members_nb]std.builtin.Type.EnumField = undefined;
var done = 0;
for (tuple_members) |member| {
const member_T = @field(tuple, member.name);
const member_info = @typeInfo(member_T);
if (member_info == .Union) {
const member_union = member_info.Union;
for (member_union.fields) |field| {
enum_fields[done] = .{
.name = fmtName(field.type),
.value = done,
};
done += 1;
}
} else if (member_info == .Struct) {
enum_fields[done] = .{
.name = fmtName(member_T),
.value = done,
};
done += 1;
}
}
const decls: [0]std.builtin.Type.Declaration = undefined;
const enum_info = std.builtin.Type.Enum{
.tag_type = tag_type,
.fields = &enum_fields,
.decls = &decls,
.is_exhaustive = true,
};
const enum_T = @Type(std.builtin.Type{ .Enum = enum_info });
// third iteration to generate union type
var union_fields: [members_nb]std.builtin.Type.UnionField = undefined;
done = 0;
for (tuple_members, 0..) |member, i| {
const member_T = @field(tuple, member.name);
const member_info = @typeInfo(member_T);
if (member_info == .Union) {
const member_union = member_info.Union;
for (member_union.fields) |field| {
var T: type = undefined;
if (@hasDecl(field.type, "Self")) {
T = @field(field.type, "Self");
T = *T;
} else {
T = field.type;
}
union_fields[done] = .{
.name = fmtName(field.type),
.type = T,
.alignment = @alignOf(T),
};
done += 1;
}
} else if (member_info == .Struct) {
const member_name = try itoa(i);
var T = @field(tuple, member_name);
if (@hasDecl(T, "Self")) {
T = @field(T, "Self");
T = *T;
}
union_fields[done] = .{
// UnionField.name expect a null terminated string.
// concatenate the `[]const u8` string with an empty string
// literal (`name ++ ""`) to explicitly coerce it to `[:0]const
// u8`.
.name = fmtName(member_T) ++ "",
.type = T,
.alignment = @alignOf(T),
};
done += 1;
}
}
const union_info = std.builtin.Type.Union{
.layout = .auto,
.tag_type = enum_T,
.fields = &union_fields,
.decls = &decls,
};
const union_T = @Type(std.builtin.Type{ .Union = union_info });
return .{
._enum = enum_T,
._union = union_T,
};
}
return @Type(.{ .Union = .{
.layout = .auto,
.tag_type = enum_T,
.fields = &union_fields,
.decls = &.{},
} });
}
};
// Tuple
// -----
// Flattens and depuplicates a list of nested tuples. For example
// input: {A, B, {C, B, D}, {A, E}}
// output {A, B, C, D, E}
pub fn Tuple(args: anytype) type {
@setEvalBranchQuota(100000);
const count = countInterfaces(args, 0);
var interfaces: [count]type = undefined;
_ = flattenInterfaces(args, &interfaces, 0);
const unfiltered_count, const filter_set = filterMap(count, interfaces);
var field_index: usize = 0;
var fields: [unfiltered_count]Type.StructField = undefined;
for (filter_set, 0..) |filter, i| {
if (filter) {
continue;
}
fields[field_index] = .{
.name = std.fmt.comptimePrint("{d}", .{field_index}),
.type = type,
// has to be true in order to properly capture the default value
.is_comptime = true,
.alignment = @alignOf(type),
.default_value = @ptrCast(&interfaces[i]),
};
field_index += 1;
}
return @Type(.{ .Struct = .{
.layout = .auto,
.fields = &fields,
.decls = &.{},
.is_tuple = true,
} });
}
fn countInterfaces(args: anytype, count: usize) usize {
var new_count = count;
for (@typeInfo(@TypeOf(args)).Struct.fields) |f| {
const member = @field(args, f.name);
if (@TypeOf(member) == type) {
new_count += 1;
fn tupleNb(comptime tuple: anytype) usize {
var nb = 0;
for (@typeInfo(@TypeOf(tuple)).Struct.fields) |member| {
const member_T = @field(tuple, member.name);
if (@TypeOf(member_T) == type) {
nb += 1;
} else {
new_count = countInterfaces(member, new_count);
const member_info = @typeInfo(@TypeOf(member_T));
if (member_info != .Struct and !member_info.Struct.is_tuple) {
@compileError("GenerateMemberNotTypeOrTuple");
}
for (member_info.Struct.fields) |field| {
if (@TypeOf(@field(member_T, field.name)) != type) {
@compileError("GenerateMemberTupleChildNotType");
}
}
nb += member_info.Struct.fields.len;
}
}
return new_count;
return nb;
}
fn flattenInterfaces(args: anytype, interfaces: []type, index: usize) usize {
var new_index = index;
for (@typeInfo(@TypeOf(args)).Struct.fields) |f| {
const member = @field(args, f.name);
if (@TypeOf(member) == type) {
interfaces[new_index] = member;
new_index += 1;
fn tupleTypes(comptime nb: usize, comptime tuple: anytype) [nb]type {
var types: [nb]type = undefined;
var done = 0;
for (@typeInfo(@TypeOf(tuple)).Struct.fields) |member| {
const T = @field(tuple, member.name);
if (@TypeOf(T) == type) {
types[done] = T;
done += 1;
} else {
new_index = flattenInterfaces(member, interfaces, new_index);
}
}
return new_index;
}
fn filterMap(comptime count: usize, interfaces: [count]type) struct { usize, [count]bool } {
var map: [count]bool = undefined;
var unfiltered_count: usize = 0;
outer: for (interfaces, 0..) |iface, i| {
for (interfaces[i + 1 ..]) |check| {
if (iface == check) {
map[i] = true;
continue :outer;
const info = @typeInfo(@TypeOf(T));
for (info.Struct.fields) |field| {
types[done] = @field(T, field.name);
done += 1;
}
}
map[i] = false;
unfiltered_count += 1;
}
return .{ unfiltered_count, map };
return types;
}
test "generate.Union" {
const Astruct = struct {
pub const Self = Other;
const Other = struct {};
};
const Bstruct = struct {
value: u8 = 0,
};
const Cstruct = struct {
value: u8 = 0,
};
const value = Union(.{ Astruct, Bstruct, .{Cstruct} });
const ti = @typeInfo(value).Union;
try std.testing.expectEqual(3, ti.fields.len);
try std.testing.expectEqualStrings("*generate.test.generate.Union.Astruct.Other", @typeName(ti.fields[0].type));
try std.testing.expectEqualStrings(ti.fields[0].name, "Astruct");
try std.testing.expectEqual(Bstruct, ti.fields[1].type);
try std.testing.expectEqualStrings(ti.fields[1].name, "Bstruct");
try std.testing.expectEqual(Cstruct, ti.fields[2].type);
try std.testing.expectEqualStrings(ti.fields[2].name, "Cstruct");
fn isDup(comptime nb: usize, comptime list: [nb]type, comptime T: type, comptime i: usize) bool {
for (list, 0..) |item, index| {
if (i >= index) {
// check sequentially
continue;
}
if (T == item) {
return true;
}
}
return false;
}
test "generate.Tuple" {
const Astruct = struct {};
const Bstruct = struct {
value: u8 = 0,
};
const Cstruct = struct {
value: u8 = 0,
};
{
const tuple = Tuple(.{ Astruct, Bstruct }){};
const ti = @typeInfo(@TypeOf(tuple)).Struct;
try std.testing.expectEqual(true, ti.is_tuple);
try std.testing.expectEqual(2, ti.fields.len);
try std.testing.expectEqual(Astruct, tuple.@"0");
try std.testing.expectEqual(Bstruct, tuple.@"1");
}
{
// dedupe
const tuple = Tuple(.{ Cstruct, Astruct, .{Astruct}, Bstruct, .{ Astruct, .{ Astruct, Bstruct } } }){};
const ti = @typeInfo(@TypeOf(tuple)).Struct;
try std.testing.expectEqual(true, ti.is_tuple);
try std.testing.expectEqual(3, ti.fields.len);
try std.testing.expectEqual(Cstruct, tuple.@"0");
try std.testing.expectEqual(Astruct, tuple.@"1");
try std.testing.expectEqual(Bstruct, tuple.@"2");
fn dedupIndexes(comptime nb: usize, comptime types: [nb]type) [nb]i32 {
var dedup_indexes: [nb]i32 = undefined;
for (types, 0..) |T, i| {
if (isDup(nb, types, T, i)) {
dedup_indexes[i] = -1;
} else {
dedup_indexes[i] = i;
}
}
return dedup_indexes;
}
fn dedupNb(comptime nb: usize, comptime dedup_indexes: [nb]i32) usize {
var dedup_nb = 0;
for (dedup_indexes) |index| {
if (index != -1) {
dedup_nb += 1;
}
}
return dedup_nb;
}
fn TupleT(comptime tuple: anytype) type {
@setEvalBranchQuota(100000);
// logic
const nb = tupleNb(tuple);
const types = tupleTypes(nb, tuple);
const dedup_indexes = dedupIndexes(nb, types);
const dedup_nb = dedupNb(nb, dedup_indexes);
// generate the tuple type
var fields: [dedup_nb]std.builtin.Type.StructField = undefined;
var done = 0;
for (dedup_indexes) |index| {
if (index == -1) {
continue;
}
fields[done] = .{
// StructField.name expect a null terminated string.
// concatenate the `[]const u8` string with an empty string
// literal (`name ++ ""`) to explicitly coerce it to `[:0]const
// u8`.
.name = try itoa(done) ++ "",
.type = type,
.default_value = null,
.is_comptime = false,
.alignment = @alignOf(type),
};
done += 1;
}
const decls: [0]std.builtin.Type.Declaration = undefined;
const info = std.builtin.Type.Struct{
.layout = .auto,
.fields = &fields,
.decls = &decls,
.is_tuple = true,
};
return @Type(std.builtin.Type{ .Struct = info });
}
// Create a flatten tuple from various structs and tuple of structs
// Duplicates will be removed.
// TODO: make this function more generic
pub fn Tuple(comptime tuple: anytype) TupleT(tuple) {
// check types provided
const tuple_T = @TypeOf(tuple);
const tuple_info = @typeInfo(tuple_T);
if (tuple_info != .Struct or !tuple_info.Struct.is_tuple) {
@compileError("GenerateArgNotTuple");
}
// generate the type
const T = TupleT(tuple);
// logic
const nb = tupleNb(tuple);
const types = tupleTypes(nb, tuple);
const dedup_indexes = dedupIndexes(nb, types);
// instantiate the tuple
var t: T = undefined;
var done = 0;
for (dedup_indexes) |index| {
if (index == -1) {
continue;
}
const name = try itoa(done);
@field(t, name) = types[index];
done += 1;
}
return t;
}
// Tests
// -----
const Error = error{
GenerateArgNotTuple,
GenerateMemberNotUnionOrStruct,
GenerateMemberNotTupleOrStruct,
GenerateMemberTupleNotStruct,
GenerateTooMuchMembers,
};
const Astruct = struct {
value: u8 = 0,
};
const Bstruct = struct {
value: u8 = 0,
};
const Cstruct = struct {
value: u8 = 0,
};
const Dstruct = struct {
value: u8 = 0,
};
pub fn tests() !void {
// Union from structs
const FromStructs = try Union.private_compile(.{ Astruct, Bstruct, Cstruct });
const from_structs_enum = @typeInfo(FromStructs._enum);
try std.testing.expect(from_structs_enum == .Enum);
try std.testing.expect(from_structs_enum.Enum.fields.len == 3);
try std.testing.expect(from_structs_enum.Enum.tag_type == u2);
try std.testing.expect(from_structs_enum.Enum.fields[0].value == 0);
try std.testing.expectEqualStrings(from_structs_enum.Enum.fields[0].name, "Astruct");
const from_structs_union = @typeInfo(FromStructs._union);
try std.testing.expect(from_structs_union == .Union);
try std.testing.expect(from_structs_union.Union.tag_type == FromStructs._enum);
try std.testing.expect(from_structs_union.Union.fields.len == 3);
try std.testing.expect(from_structs_union.Union.fields[0].type == Astruct);
try std.testing.expectEqualStrings(from_structs_union.Union.fields[0].name, "Astruct");
// Union from union and structs
const FromMix = try Union.private_compile(.{ FromStructs._union, Dstruct });
const from_mix_enum = @typeInfo(FromMix._enum);
try std.testing.expect(from_mix_enum == .Enum);
try std.testing.expect(from_mix_enum.Enum.fields.len == 4);
try std.testing.expect(from_mix_enum.Enum.tag_type == u3);
try std.testing.expect(from_mix_enum.Enum.fields[0].value == 0);
try std.testing.expectEqualStrings(from_mix_enum.Enum.fields[3].name, "Dstruct");
const from_mix_union = @typeInfo(FromMix._union);
try std.testing.expect(from_mix_union == .Union);
try std.testing.expect(from_mix_union.Union.tag_type == FromMix._enum);
try std.testing.expect(from_mix_union.Union.fields.len == 4);
try std.testing.expect(from_mix_union.Union.fields[3].type == Dstruct);
try std.testing.expectEqualStrings(from_mix_union.Union.fields[3].name, "Dstruct");
std.debug.print("Generate Union: OK\n", .{});
// Tuple from structs
const tuple_structs = .{ Astruct, Bstruct };
const tFromStructs = Tuple(tuple_structs);
const t_from_structs = @typeInfo(@TypeOf(tFromStructs));
try std.testing.expect(t_from_structs == .Struct);
try std.testing.expect(t_from_structs.Struct.is_tuple);
try std.testing.expect(t_from_structs.Struct.fields.len == 2);
try std.testing.expect(@field(tFromStructs, "0") == Astruct);
try std.testing.expect(@field(tFromStructs, "1") == Bstruct);
// Tuple from tuple and structs
const tuple_mix = .{ tFromStructs, Cstruct };
const tFromMix = Tuple(tuple_mix);
const t_from_mix = @typeInfo(@TypeOf(tFromMix));
try std.testing.expect(t_from_mix == .Struct);
try std.testing.expect(t_from_mix.Struct.is_tuple);
try std.testing.expect(t_from_mix.Struct.fields.len == 3);
try std.testing.expect(@field(tFromMix, "0") == Astruct);
try std.testing.expect(@field(tFromMix, "1") == Bstruct);
try std.testing.expect(@field(tFromMix, "2") == Cstruct);
// Tuple with dedup
const tuple_dedup = .{ Cstruct, Astruct, tFromStructs, Bstruct };
const tFromDedup = Tuple(tuple_dedup);
const t_from_dedup = @typeInfo(@TypeOf(tFromDedup));
try std.testing.expect(t_from_dedup == .Struct);
try std.testing.expect(t_from_dedup.Struct.is_tuple);
try std.testing.expect(t_from_dedup.Struct.fields.len == 3);
try std.testing.expect(@field(tFromDedup, "0") == Cstruct);
try std.testing.expect(@field(tFromDedup, "1") == Astruct);
try std.testing.expect(@field(tFromDedup, "2") == Bstruct);
std.debug.print("Generate Tuple: OK\n", .{});
}

95
src/handler.zig Normal file
View File

@@ -0,0 +1,95 @@
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
//
// Francis Bouvier <francis@lightpanda.io>
// Pierre Tachoire <pierre@lightpanda.io>
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Affero General Public License as
// published by the Free Software Foundation, either version 3 of the
// License, or (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Affero General Public License for more details.
//
// You should have received a copy of the GNU Affero General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
const std = @import("std");
const ws = @import("websocket");
const Msg = @import("msg.zig").Msg;
const log = std.log.scoped(.handler);
pub const Stream = struct {
addr: std.net.Address,
socket: std.posix.socket_t = undefined,
ws_host: []const u8,
ws_port: u16,
ws_conn: *ws.Conn = undefined,
fn connectCDP(self: *Stream) !void {
const flags: u32 = std.posix.SOCK.STREAM;
const proto = blk: {
if (self.addr.any.family == std.posix.AF.UNIX) break :blk @as(u32, 0);
break :blk std.posix.IPPROTO.TCP;
};
const socket = try std.posix.socket(self.addr.any.family, flags, proto);
try std.posix.connect(
socket,
&self.addr.any,
self.addr.getOsSockLen(),
);
log.debug("connected to Stream server", .{});
self.socket = socket;
}
fn closeCDP(self: *const Stream) void {
const close_msg: []const u8 = .{ 5, 0, 0, 0 } ++ "close";
self.recv(close_msg) catch |err| {
log.err("stream close error: {any}", .{err});
};
std.posix.close(self.socket);
}
fn start(self: *Stream, ws_conn: *ws.Conn) !void {
try self.connectCDP();
self.ws_conn = ws_conn;
}
pub fn recv(self: *const Stream, data: []const u8) !void {
var pos: usize = 0;
while (pos < data.len) {
const len = try std.posix.write(self.socket, data[pos..]);
pos += len;
}
}
pub fn send(self: *const Stream, data: []const u8) !void {
return self.ws_conn.write(data);
}
};
pub const Handler = struct {
stream: *Stream,
pub fn init(_: ws.Handshake, ws_conn: *ws.Conn, stream: *Stream) !Handler {
try stream.start(ws_conn);
return .{ .stream = stream };
}
pub fn close(self: *Handler) void {
self.stream.closeCDP();
}
pub fn clientMessage(self: *Handler, data: []const u8) !void {
var header: [4]u8 = undefined;
Msg.setSize(data.len, &header);
try self.stream.recv(&header);
try self.stream.recv(data);
}
};

View File

@@ -28,7 +28,6 @@ const Node = @import("../dom/node.zig").Node;
const Document = @import("../dom/document.zig").Document;
const NodeList = @import("../dom/nodelist.zig").NodeList;
const HTMLElem = @import("elements.zig");
const Location = @import("location.zig").Location;
const collection = @import("../dom/html_collection.zig");
const Walker = @import("../dom/walker.zig").WalkerDepthFirst;
@@ -158,10 +157,6 @@ pub const HTMLDocument = struct {
return try parser.documentHTMLGetCurrentScript(self);
}
pub fn get_location(self: *parser.DocumentHTML) !?*Location {
return try parser.documentHTMLGetLocation(Location, self);
}
pub fn get_designMode(_: *parser.DocumentHTML) []const u8 {
return "off";
}

View File

@@ -99,8 +99,9 @@ pub const Interfaces = .{
HTMLVideoElement,
CSSProperties,
};
pub const Union = generate.Union(Interfaces);
const Generated = generate.Union.compile(Interfaces);
pub const Union = Generated._union;
pub const Tags = Generated._enum;
// Abstract class
// --------------

View File

@@ -23,9 +23,8 @@ const HTMLElem = @import("elements.zig");
const Window = @import("window.zig").Window;
const Navigator = @import("navigator.zig").Navigator;
const History = @import("history.zig").History;
const Location = @import("location.zig").Location;
pub const Interfaces = .{
pub const Interfaces = generate.Tuple(.{
HTMLDocument,
HTMLElem.HTMLElement,
HTMLElem.HTMLMediaElement,
@@ -33,5 +32,4 @@ pub const Interfaces = .{
Window,
Navigator,
History,
Location,
};
});

View File

@@ -1,129 +0,0 @@
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
//
// Francis Bouvier <francis@lightpanda.io>
// Pierre Tachoire <pierre@lightpanda.io>
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Affero General Public License as
// published by the Free Software Foundation, either version 3 of the
// License, or (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Affero General Public License for more details.
//
// You should have received a copy of the GNU Affero General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
const std = @import("std");
const builtin = @import("builtin");
const jsruntime = @import("jsruntime");
const URL = @import("../url/url.zig").URL;
const Case = jsruntime.test_utils.Case;
const checkCases = jsruntime.test_utils.checkCases;
// https://html.spec.whatwg.org/multipage/nav-history-apis.html#the-location-interface
pub const Location = struct {
pub const mem_guarantied = true;
url: ?*URL = null,
pub fn deinit(_: *Location, _: std.mem.Allocator) void {}
pub fn get_href(self: *Location, alloc: std.mem.Allocator) ![]const u8 {
if (self.url) |u| return u.get_href(alloc);
return "";
}
pub fn get_protocol(self: *Location, alloc: std.mem.Allocator) ![]const u8 {
if (self.url) |u| return u.get_protocol(alloc);
return "";
}
pub fn get_host(self: *Location, alloc: std.mem.Allocator) ![]const u8 {
if (self.url) |u| return u.get_host(alloc);
return "";
}
pub fn get_hostname(self: *Location) []const u8 {
if (self.url) |u| return u.get_hostname();
return "";
}
pub fn get_port(self: *Location, alloc: std.mem.Allocator) ![]const u8 {
if (self.url) |u| return u.get_port(alloc);
return "";
}
pub fn get_pathname(self: *Location) []const u8 {
if (self.url) |u| return u.get_pathname();
return "";
}
pub fn get_search(self: *Location, alloc: std.mem.Allocator) ![]const u8 {
if (self.url) |u| return u.get_search(alloc);
return "";
}
pub fn get_hash(self: *Location, alloc: std.mem.Allocator) ![]const u8 {
if (self.url) |u| return u.get_hash(alloc);
return "";
}
pub fn get_origin(self: *Location, alloc: std.mem.Allocator) ![]const u8 {
if (self.url) |u| return u.get_origin(alloc);
return "";
}
// TODO
pub fn _assign(_: *Location, url: []const u8) !void {
_ = url;
}
// TODO
pub fn _replace(_: *Location, url: []const u8) !void {
_ = url;
}
// TODO
pub fn _reload(_: *Location) !void {}
pub fn _toString(self: *Location, alloc: std.mem.Allocator) ![]const u8 {
return try self.get_href(alloc);
}
};
// Tests
// -----
pub fn testExecFn(
_: std.mem.Allocator,
js_env: *jsruntime.Env,
) anyerror!void {
var location = [_]Case{
.{ .src = "location.href", .ex = "https://lightpanda.io/opensource-browser/" },
.{ .src = "document.location.href", .ex = "https://lightpanda.io/opensource-browser/" },
.{ .src = "location.host", .ex = "lightpanda.io" },
.{ .src = "location.hostname", .ex = "lightpanda.io" },
.{ .src = "location.origin", .ex = "https://lightpanda.io" },
.{ .src = "location.pathname", .ex = "/opensource-browser/" },
.{ .src = "location.hash", .ex = "" },
.{ .src = "location.port", .ex = "" },
.{ .src = "location.search", .ex = "" },
};
try checkCases(js_env, &location);
}

View File

@@ -27,12 +27,9 @@ const Loop = jsruntime.Loop;
const EventTarget = @import("../dom/event_target.zig").EventTarget;
const Navigator = @import("navigator.zig").Navigator;
const History = @import("history.zig").History;
const Location = @import("location.zig").Location;
const storage = @import("../storage/storage.zig");
var emptyLocation = Location{};
// https://dom.spec.whatwg.org/#interface-window-extensions
// https://html.spec.whatwg.org/multipage/nav-history-apis.html#window
pub const Window = struct {
@@ -46,7 +43,6 @@ pub const Window = struct {
document: ?*parser.DocumentHTML = null,
target: []const u8,
history: History = .{},
location: *Location = &emptyLocation,
storageShelf: ?*storage.Shelf = null,
@@ -61,20 +57,12 @@ pub const Window = struct {
return Window{
.target = target orelse "",
.navigator = navigator orelse .{},
.base = .{ .et_type = @intFromEnum(parser.EventTargetType.window) },
};
}
pub fn replaceLocation(self: *Window, loc: *Location) !void {
self.location = loc;
if (self.document != null) {
try parser.documentHTMLSetLocation(Location, self.document.?, self.location);
}
}
pub fn replaceDocument(self: *Window, doc: *parser.DocumentHTML) !void {
pub fn replaceDocument(self: *Window, doc: *parser.DocumentHTML) void {
self.document = doc;
try parser.documentHTMLSetLocation(Location, doc, self.location);
}
pub fn setStorageShelf(self: *Window, shelf: *storage.Shelf) void {
@@ -89,10 +77,6 @@ pub const Window = struct {
return &self.navigator;
}
pub fn get_location(self: *Window) *Location {
return self.location;
}
pub fn get_self(self: *Window) *Window {
return self;
}

View File

@@ -1,8 +1,10 @@
const std = @import("std");
pub const Interfaces = .{
const generate = @import("../generate.zig");
pub const Interfaces = generate.Tuple(.{
U32Iterator,
};
});
pub const U32Iterator = struct {
pub const mem_guarantied = true;
@@ -15,39 +17,19 @@ pub const U32Iterator = struct {
done: bool,
};
pub fn _next(self: *U32Iterator) Return {
pub fn _next(self: *U32Iterator) !Return {
const i = self.index;
if (i >= self.length) {
return .{
return Return{
.value = 0,
.done = true,
};
}
self.index = i + 1;
return .{
self.index += 1;
return Return{
.value = i,
.done = false,
};
}
};
const testing = std.testing;
test "U32Iterator" {
const Return = U32Iterator.Return;
{
var it = U32Iterator{ .length = 0 };
try testing.expectEqual(Return{ .value = 0, .done = true }, it._next());
try testing.expectEqual(Return{ .value = 0, .done = true }, it._next());
}
{
var it = U32Iterator{ .length = 3 };
try testing.expectEqual(Return{ .value = 0, .done = false }, it._next());
try testing.expectEqual(Return{ .value = 1, .done = false }, it._next());
try testing.expectEqual(Return{ .value = 2, .done = false }, it._next());
try testing.expectEqual(Return{ .value = 0, .done = true }, it._next());
try testing.expectEqual(Return{ .value = 0, .done = true }, it._next());
}
}

View File

@@ -20,9 +20,12 @@ const std = @import("std");
const builtin = @import("builtin");
const jsruntime = @import("jsruntime");
const websocket = @import("websocket");
const Browser = @import("browser/browser.zig").Browser;
const server = @import("server.zig");
const handler = @import("handler.zig");
const MaxSize = @import("msg.zig").MaxSize;
const parser = @import("netsurf");
const apiweb = @import("apiweb.zig");
@@ -83,9 +86,11 @@ const CliMode = union(CliModeTag) {
const Server = struct {
execname: []const u8 = undefined,
args: *std.process.ArgIterator = undefined,
addr: std.net.Address = undefined,
host: []const u8 = Host,
port: u16 = Port,
timeout: u8 = Timeout,
tcp: bool = false, // undocumented TCP mode
// default options
const Host = "127.0.0.1";
@@ -155,6 +160,10 @@ const CliMode = union(CliModeTag) {
return printUsageExit(execname, 1);
}
}
if (std.mem.eql(u8, "--tcp", opt)) {
_server.tcp = true;
continue;
}
// unknown option
if (std.mem.startsWith(u8, opt, "--")) {
@@ -177,6 +186,10 @@ const CliMode = union(CliModeTag) {
if (default_mode == .server) {
// server mode
_server.addr = std.net.Address.parseIp4(_server.host, _server.port) catch |err| {
log.err("address (host:port) {any}\n", .{err});
return printUsageExit(execname, 1);
};
_server.execname = execname;
_server.args = args;
return CliMode{ .server = _server };
@@ -234,19 +247,65 @@ pub fn main() !void {
switch (cli_mode) {
.server => |opts| {
const address = std.net.Address.parseIp4(opts.host, opts.port) catch |err| {
log.err("address (host:port) {any}\n", .{err});
// Stream server
const addr = blk: {
if (opts.tcp) {
break :blk opts.addr;
} else {
const unix_path = "/tmp/lightpanda";
std.fs.deleteFileAbsolute(unix_path) catch {}; // file could not exists
break :blk try std.net.Address.initUnix(unix_path);
}
};
const socket = server.listen(addr) catch |err| {
log.err("Server listen error: {any}\n", .{err});
return printUsageExit(opts.execname, 1);
};
defer std.posix.close(socket);
log.debug("Server opts: listening internally on {any}...", .{addr});
const timeout = std.time.ns_per_s * @as(u64, opts.timeout);
// loop
var loop = try jsruntime.Loop.init(alloc);
defer loop.deinit();
const timeout = std.time.ns_per_s * @as(u64, opts.timeout);
server.run(alloc, address, timeout, &loop) catch |err| {
log.err("Server error", .{});
return err;
// TCP server mode
if (opts.tcp) {
return server.handle(alloc, &loop, socket, null, timeout);
}
// start stream server in separate thread
var stream = handler.Stream{
.ws_host = opts.host,
.ws_port = opts.port,
.addr = addr,
};
const cdp_thread = try std.Thread.spawn(
.{ .allocator = alloc },
server.handle,
.{ alloc, &loop, socket, &stream, timeout },
);
// Websocket server
var ws = try websocket.Server(handler.Handler).init(alloc, .{
.port = opts.port,
.address = opts.host,
.max_message_size = MaxSize + 14, // overhead websocket
.max_conn = 1,
.handshake = .{
.timeout = 3,
.max_size = 1024,
// since we aren't using hanshake.headers
// we can set this to 0 to save a few bytes.
.max_headers = 0,
},
});
defer ws.deinit();
try ws.listen(&stream);
cdp_thread.join();
},
.fetch => |opts| {

View File

@@ -55,7 +55,7 @@ fn execJS(
// alias global as self and window
var window = Window.create(null, null);
try window.replaceDocument(doc);
window.replaceDocument(doc);
window.setStorageShelf(&storageShelf);
try js_env.bindGlobal(window);

166
src/msg.zig Normal file
View File

@@ -0,0 +1,166 @@
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
//
// Francis Bouvier <francis@lightpanda.io>
// Pierre Tachoire <pierre@lightpanda.io>
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Affero General Public License as
// published by the Free Software Foundation, either version 3 of the
// License, or (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Affero General Public License for more details.
//
// You should have received a copy of the GNU Affero General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
const std = @import("std");
pub const HeaderSize = 4;
pub const MsgSize = 256 * 1204; // 256KB
// NOTE: Theorically we could go up to 4GB with a 4 bytes binary encoding
// but we prefer to put a lower hard limit for obvious memory size reasons.
pub const MaxSize = HeaderSize + MsgSize;
pub const Msg = struct {
pub fn getSize(data: []const u8) usize {
return std.mem.readInt(u32, data[0..HeaderSize], .little);
}
pub fn setSize(len: usize, header: *[4]u8) void {
std.mem.writeInt(u32, header, @intCast(len), .little);
}
};
/// Buffer returns messages from a raw text read stream,
/// with the message size being encoded on the 2 first bytes (little endian)
/// It handles both:
/// - combined messages in one read
/// - single message in several reads (multipart)
/// It's safe (and a good practice) to reuse the same Buffer
/// on several reads of the same stream.
pub const Buffer = struct {
buf: []u8,
size: usize = 0,
pos: usize = 0,
fn isFinished(self: *const Buffer) bool {
return self.pos >= self.size;
}
fn isEmpty(self: *const Buffer) bool {
return self.size == 0 and self.pos == 0;
}
fn reset(self: *Buffer) void {
self.size = 0;
self.pos = 0;
}
// read input
pub fn read(self: *Buffer, input: []const u8) !struct {
msg: []const u8,
left: []const u8,
} {
var _input = input; // make input writable
// msg size
var msg_size: usize = undefined;
if (self.isEmpty()) {
// decode msg size header
msg_size = Msg.getSize(_input);
_input = _input[HeaderSize..];
} else {
msg_size = self.size;
}
// multipart
const is_multipart = !self.isEmpty() or _input.len < msg_size;
if (is_multipart) {
// set msg size on empty Buffer
if (self.isEmpty()) {
self.size = msg_size;
}
// get the new position of the cursor
const new_pos = self.pos + _input.len;
// check max limit size
if (new_pos > MaxSize) {
return error.MsgTooBig;
}
// copy the current input into Buffer
// NOTE: we could use @memcpy but it's not Thread-safe (alias problem)
// see https://www.openmymind.net/Zigs-memcpy-copyForwards-and-copyBackwards/
// Intead we just use std.mem.copyForwards
std.mem.copyForwards(u8, self.buf[self.pos..new_pos], _input[0..]);
// set the new cursor position
self.pos = new_pos;
// if multipart is not finished, go fetch the next input
if (!self.isFinished()) return error.MsgMultipart;
// otherwhise multipart is finished, use its buffer as input
_input = self.buf[0..self.pos];
self.reset();
}
// handle several JSON msg in 1 read
return .{ .msg = _input[0..msg_size], .left = _input[msg_size..] };
}
};
test "Buffer" {
const Case = struct {
input: []const u8,
nb: u8,
};
const cases = [_]Case{
// simple
.{ .input = .{ 2, 0, 0, 0 } ++ "ok", .nb = 1 },
// combined
.{ .input = .{ 2, 0, 0, 0 } ++ "ok" ++ .{ 3, 0, 0, 0 } ++ "foo", .nb = 2 },
// multipart
.{ .input = .{ 9, 0, 0, 0 } ++ "multi", .nb = 0 },
.{ .input = "part", .nb = 1 },
// multipart & combined
.{ .input = .{ 9, 0, 0, 0 } ++ "multi", .nb = 0 },
.{ .input = "part" ++ .{ 2, 0, 0, 0 } ++ "ok", .nb = 2 },
// multipart & combined with other multipart
.{ .input = .{ 9, 0, 0, 0 } ++ "multi", .nb = 0 },
.{ .input = "part" ++ .{ 8, 0, 0, 0 } ++ "co", .nb = 1 },
.{ .input = "mbined", .nb = 1 },
// several multipart
.{ .input = .{ 23, 0, 0, 0 } ++ "multi", .nb = 0 },
.{ .input = "several", .nb = 0 },
.{ .input = "complex", .nb = 0 },
.{ .input = "part", .nb = 1 },
// combined & multipart
.{ .input = .{ 2, 0, 0, 0 } ++ "ok" ++ .{ 9, 0, 0, 0 } ++ "multi", .nb = 1 },
.{ .input = "part", .nb = 1 },
};
var b: [MaxSize]u8 = undefined;
var buf = Buffer{ .buf = &b };
for (cases) |case| {
var nb: u8 = 0;
var input = case.input;
while (input.len > 0) {
const parts = buf.read(input) catch |err| {
if (err == error.MsgMultipart) break; // go to the next case input
return err;
};
nb += 1;
input = parts.left;
}
try std.testing.expect(nb == case.nb);
}
}

View File

@@ -560,6 +560,11 @@ fn eventListenerGetData(lst: *EventListener) ?*anyopaque {
}
// EventTarget
pub const EventTargetType = enum(u4) {
node = c.DOM_EVENT_TARGET_NODE,
window = 2,
};
pub const EventTarget = c.dom_event_target;
pub fn eventTargetToNode(et: *EventTarget) *Node {
@@ -801,6 +806,14 @@ pub fn eventTargetDispatchEvent(et: *EventTarget, event: *Event) !bool {
return res;
}
pub fn eventTargetGetType(et: *EventTarget) !EventTargetType {
var res: c.dom_event_target_type = undefined;
const err = eventTargetVtable(et).dom_event_target_get_type.?(et, &res);
try DOMErr(err);
return @enumFromInt(res);
}
pub fn eventTargetTBaseFieldName(comptime T: type) ?[]const u8 {
std.debug.assert(@inComptime());
switch (@typeInfo(T)) {
@@ -824,8 +837,10 @@ pub const EventTargetTBase = extern struct {
.remove_event_listener = remove_event_listener,
.add_event_listener = add_event_listener,
.iter_event_listener = iter_event_listener,
.dom_event_target_get_type = dom_event_target_get_type,
},
eti: c.dom_event_target_internal = c.dom_event_target_internal{ .listeners = null },
et_type: c.dom_event_target_type = @intFromEnum(EventTargetType.node),
pub fn add_event_listener(et: [*c]c.dom_event_target, t: [*c]c.dom_string, l: ?*c.struct_dom_event_listener, capture: bool) callconv(.C) c.dom_exception {
const self = @as(*Self, @ptrCast(et));
@@ -858,6 +873,17 @@ pub const EventTargetTBase = extern struct {
const self = @as(*Self, @ptrCast(et));
return c._dom_event_target_iter_event_listener(self.eti, t, capture, cur, next, l);
}
pub fn dom_event_target_get_type(
et: [*c]c.dom_event_target,
res: [*c]c.dom_event_target_type,
) callconv(.C) c.dom_exception {
const self = @as(*Self, @ptrCast(et));
res.* = self.et_type;
return c.DOM_NO_ERR;
}
};
// NodeType
@@ -1008,7 +1034,6 @@ pub fn nodeLocalName(node: *Node) ![]const u8 {
var s: ?*String = undefined;
const err = nodeVtable(node).dom_node_get_local_name.?(node, &s);
try DOMErr(err);
if (s == null) return "";
var s_lower: ?*String = undefined;
const errStr = c.dom_string_tolower(s, true, &s_lower);
try DOMErr(errStr);
@@ -1099,7 +1124,6 @@ pub fn nodeName(node: *Node) ![]const u8 {
var s: ?*String = undefined;
const err = nodeVtable(node).dom_node_get_node_name.?(node, &s);
try DOMErr(err);
if (s == null) return "";
return strToData(s.?);
}
@@ -2266,20 +2290,3 @@ pub fn documentHTMLGetCurrentScript(doc: *DocumentHTML) !?*Script {
if (elem == null) return null;
return @ptrCast(elem.?);
}
pub fn documentHTMLSetLocation(T: type, doc: *DocumentHTML, location: *T) !void {
const l = @as(*anyopaque, @ptrCast(location));
const err = documentHTMLVtable(doc).set_location.?(doc, l);
try DOMErr(err);
}
pub fn documentHTMLGetLocation(T: type, doc: *DocumentHTML) !?*T {
var l: ?*anyopaque = undefined;
const err = documentHTMLVtable(doc).get_location.?(doc, &l);
try DOMErr(err);
if (l == null) return null;
const ptr: *align(@alignOf(*T)) anyopaque = @alignCast(l.?);
return @as(*T, @ptrCast(ptr));
}

View File

@@ -29,10 +29,8 @@ const Window = @import("html/window.zig").Window;
const xhr = @import("xhr/xhr.zig");
const storage = @import("storage/storage.zig");
const url = @import("url/url.zig");
const URL = url.URL;
const urlquery = @import("url/query.zig");
const Client = @import("asyncio").Client;
const Location = @import("html/location.zig").Location;
const documentTestExecFn = @import("dom/document.zig").testExecFn;
const HTMLDocumentTestExecFn = @import("html/document.zig").testExecFn;
@@ -100,12 +98,7 @@ fn testExecFn(
// alias global as self and window
var window = Window.create(null, null);
var u = try URL.constructor(alloc, "https://lightpanda.io/opensource-browser/", null);
defer u.deinit(alloc);
var location = Location{ .url = &u };
try window.replaceLocation(&location);
try window.replaceDocument(doc);
window.replaceDocument(doc);
window.setStorageShelf(&storageShelf);
try js_env.bindGlobal(window);
@@ -146,8 +139,6 @@ fn testsAllExecFn(
@import("polyfill/fetch.zig").testExecFn,
@import("html/navigator.zig").testExecFn,
@import("html/history.zig").testExecFn,
@import("html/location.zig").testExecFn,
@import("xmlserializer/xmlserializer.zig").testExecFn,
};
inline for (testFns) |testFn| {
@@ -223,14 +214,8 @@ pub fn main() !void {
try parser.init();
defer parser.deinit();
std.testing.allocator_instance = .{};
try test_fn.func();
if (std.testing.allocator_instance.deinit() == .leak) {
std.debug.print("======Memory Leak: {s}======\n", .{test_fn.name});
} else {
std.debug.print("{s}\tOK\n", .{test_fn.name});
}
std.debug.print("{s}\tOK\n", .{test_fn.name});
}
}
}
@@ -314,6 +299,9 @@ const kb = 1024;
const ms = std.time.ns_per_ms;
test {
const msgTest = @import("msg.zig");
std.testing.refAllDecls(msgTest);
const dumpTest = @import("browser/dump.zig");
std.testing.refAllDecls(dumpTest);
@@ -335,17 +323,13 @@ test {
const queryTest = @import("url/query.zig");
std.testing.refAllDecls(queryTest);
std.testing.refAllDecls(@import("generate.zig"));
std.testing.refAllDecls(@import("cdp/msg.zig"));
// Don't use refAllDecls, as this will pull in the entire project
// and break the test build.
// We should fix this. See this branch & the commit message for details:
// https://github.com/karlseguin/browser/commit/193ab5ceab3d3758ea06db04f7690460d79eb79e
_ = @import("server.zig");
}
fn testJSRuntime(alloc: std.mem.Allocator) !void {
// generate tests
try generate.tests();
// create JS vm
const vm = jsruntime.VM.init();
defer vm.deinit();

File diff suppressed because it is too large Load Diff

View File

@@ -21,13 +21,15 @@ const std = @import("std");
const jsruntime = @import("jsruntime");
const Case = jsruntime.test_utils.Case;
const checkCases = jsruntime.test_utils.checkCases;
const generate = @import("../generate.zig");
const DOMError = @import("netsurf").DOMError;
const log = std.log.scoped(.storage);
pub const Interfaces = .{
pub const Interfaces = generate.Tuple(.{
Bottle,
};
});
// See https://storage.spec.whatwg.org/#model for storage hierarchy.
// A Shed contains map of Shelves. The key is the document's origin.
@@ -149,22 +151,20 @@ pub const Bottle = struct {
}
pub fn _setItem(self: *Bottle, k: []const u8, v: []const u8) !void {
const gop = self.map.getOrPut(self.alloc, k) catch |e| {
const old = self.map.get(k);
if (old != null and std.mem.eql(u8, v, old.?)) return;
// owns k and v by copying them.
const kk = try self.alloc.dupe(u8, k);
errdefer self.alloc.free(kk);
const vv = try self.alloc.dupe(u8, v);
errdefer self.alloc.free(vv);
self.map.put(self.alloc, kk, vv) catch |e| {
log.debug("set item: {any}", .{e});
return DOMError.QuotaExceeded;
};
if (gop.found_existing == false) {
gop.key_ptr.* = try self.alloc.dupe(u8, k);
gop.value_ptr.* = try self.alloc.dupe(u8, v);
return;
}
if (std.mem.eql(u8, v, gop.value_ptr.*) == false) {
self.alloc.free(gop.value_ptr.*);
gop.value_ptr.* = try self.alloc.dupe(u8, v);
}
// > Broadcast this with key, oldValue, and value.
// https://html.spec.whatwg.org/multipage/webstorage.html#the-storageevent-interface
//
@@ -177,10 +177,8 @@ pub const Bottle = struct {
}
pub fn _removeItem(self: *Bottle, k: []const u8) !void {
if (self.map.fetchRemove(k)) |kv| {
self.alloc.free(kv.key);
self.alloc.free(kv.value);
}
const old = self.map.fetchRemove(k);
if (old == null) return;
// > Broadcast this with key, oldValue, and null.
// https://html.spec.whatwg.org/multipage/webstorage.html#the-storageevent-interface
@@ -239,17 +237,14 @@ test "storage bottle" {
var bottle = Bottle.init(std.testing.allocator);
defer bottle.deinit();
try std.testing.expectEqual(0, bottle.get_length());
try std.testing.expectEqual(null, bottle._getItem("foo"));
try std.testing.expect(0 == bottle.get_length());
try std.testing.expect(null == bottle._getItem("foo"));
try bottle._setItem("foo", "bar");
try std.testing.expectEqualStrings("bar", bottle._getItem("foo").?);
try bottle._setItem("foo", "other");
try std.testing.expectEqualStrings("other", bottle._getItem("foo").?);
try std.testing.expect(std.mem.eql(u8, "bar", bottle._getItem("foo").?));
try bottle._removeItem("foo");
try std.testing.expectEqual(0, bottle.get_length());
try std.testing.expectEqual(null, bottle._getItem("foo"));
try std.testing.expect(0 == bottle.get_length());
try std.testing.expect(null == bottle._getItem("foo"));
}

View File

@@ -18,108 +18,89 @@
// some utils to parser strings.
const std = @import("std");
const testing = std.testing;
pub const Reader = struct {
pos: usize = 0,
data: []const u8,
s: []const u8,
i: usize = 0,
pub fn until(self: *Reader, c: u8) []const u8 {
const pos = self.pos;
const data = self.data;
const ln = self.s.len;
const start = self.i;
while (self.i < ln) {
if (c == self.s[self.i]) return self.s[start..self.i];
self.i += 1;
}
const index = std.mem.indexOfScalarPos(u8, data, pos, c) orelse data.len;
self.pos = index;
return data[pos..index];
return self.s[start..self.i];
}
pub fn tail(self: *Reader) []const u8 {
const pos = self.pos;
const data = self.data;
if (pos > data.len) {
return "";
}
self.pos = data.len;
return data[pos..];
if (self.i > self.s.len) return "";
defer self.i = self.s.len;
return self.s[self.i..];
}
pub fn skip(self: *Reader) bool {
const pos = self.pos;
if (pos >= self.data.len) {
return false;
}
self.pos = pos + 1;
if (self.i >= self.s.len) return false;
self.i += 1;
return true;
}
};
// converts a comptime-known string (i.e. null terminated) to an uint
pub fn asUint(comptime string: anytype) AsUintReturn(string) {
const byteLength = @bitSizeOf(@TypeOf(string.*)) / 8 - 1;
const expectedType = *const [byteLength:0]u8;
if (@TypeOf(string) != expectedType) {
@compileError("expected : " ++ @typeName(expectedType) ++
", got: " ++ @typeName(@TypeOf(string)));
}
return @bitCast(@as(*const [byteLength]u8, string).*);
test "Reader.skip" {
var r = Reader{ .s = "foo" };
try testing.expect(r.skip());
try testing.expect(r.skip());
try testing.expect(r.skip());
try testing.expect(!r.skip());
try testing.expect(!r.skip());
}
fn AsUintReturn(comptime string: anytype) type {
return @Type(.{
.Int = .{
.bits = @bitSizeOf(@TypeOf(string.*)) - 8, // (- 8) to exclude sentinel 0
.signedness = .unsigned,
},
});
}
const testing = std.testing;
test "parser.Reader: skip" {
var r = Reader{ .data = "foo" };
try testing.expectEqual(true, r.skip());
try testing.expectEqual(true, r.skip());
try testing.expectEqual(true, r.skip());
try testing.expectEqual(false, r.skip());
try testing.expectEqual(false, r.skip());
}
test "parser.Reader: tail" {
var r = Reader{ .data = "foo" };
test "Reader.tail" {
var r = Reader{ .s = "foo" };
try testing.expectEqualStrings("foo", r.tail());
try testing.expectEqualStrings("", r.tail());
try testing.expectEqualStrings("", r.tail());
}
test "parser.Reader: until" {
var r = Reader{ .data = "foo.bar.baz" };
test "Reader.until" {
var r = Reader{ .s = "foo.bar.baz" };
try testing.expectEqualStrings("foo", r.until('.'));
_ = r.skip();
try testing.expectEqualStrings("bar", r.until('.'));
_ = r.skip();
try testing.expectEqualStrings("baz", r.until('.'));
r = Reader{ .data = "foo" };
r = Reader{ .s = "foo" };
try testing.expectEqualStrings("foo", r.until('.'));
try testing.expectEqualStrings("", r.tail());
r = Reader{ .data = "" };
r = Reader{ .s = "" };
try testing.expectEqualStrings("", r.until('.'));
try testing.expectEqualStrings("", r.tail());
}
test "parser: asUint" {
const ASCII_x = @as(u8, @bitCast([1]u8{'x'}));
const ASCII_ab = @as(u16, @bitCast([2]u8{ 'a', 'b' }));
const ASCII_xyz = @as(u24, @bitCast([3]u8{ 'x', 'y', 'z' }));
const ASCII_abcd = @as(u32, @bitCast([4]u8{ 'a', 'b', 'c', 'd' }));
pub fn trim(s: []const u8) []const u8 {
const ln = s.len;
if (ln == 0) {
return "";
}
var start: usize = 0;
while (start < ln) {
if (!std.ascii.isWhitespace(s[start])) break;
start += 1;
}
try testing.expectEqual(ASCII_x, asUint("x"));
try testing.expectEqual(ASCII_ab, asUint("ab"));
try testing.expectEqual(ASCII_xyz, asUint("xyz"));
try testing.expectEqual(ASCII_abcd, asUint("abcd"));
var end: usize = ln;
while (end > 0) {
if (!std.ascii.isWhitespace(s[end - 1])) break;
end -= 1;
}
try testing.expectEqual(u8, @TypeOf(asUint("x")));
try testing.expectEqual(u16, @TypeOf(asUint("ab")));
try testing.expectEqual(u24, @TypeOf(asUint("xyz")));
try testing.expectEqual(u32, @TypeOf(asUint("abcd")));
return s[start..end];
}
test "trim" {
try testing.expectEqualStrings("", trim(""));
try testing.expectEqualStrings("foo", trim("foo"));
try testing.expectEqualStrings("foo", trim(" \n\tfoo"));
try testing.expectEqualStrings("foo", trim("foo \n\t"));
}

29
src/test_runner.zig Normal file
View File

@@ -0,0 +1,29 @@
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
//
// Francis Bouvier <francis@lightpanda.io>
// Pierre Tachoire <pierre@lightpanda.io>
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Affero General Public License as
// published by the Free Software Foundation, either version 3 of the
// License, or (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Affero General Public License for more details.
//
// You should have received a copy of the GNU Affero General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
const std = @import("std");
const tests = @import("run_tests.zig");
pub const Types = tests.Types;
pub const UserContext = tests.UserContext;
pub const IO = tests.IO;
pub fn main() !void {
try tests.main();
}

View File

@@ -1,347 +0,0 @@
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
//
// Francis Bouvier <francis@lightpanda.io>
// Pierre Tachoire <pierre@lightpanda.io>
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Affero General Public License as
// published by the Free Software Foundation, either version 3 of the
// License, or (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Affero General Public License for more details.
//
// You should have received a copy of the GNU Affero General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
const std = @import("std");
const builtin = @import("builtin");
const Allocator = std.mem.Allocator;
pub const std_options = std.Options{
.http_disable_tls = true,
};
const BORDER = "=" ** 80;
// use in custom panic handler
var current_test: ?[]const u8 = null;
pub fn main() !void {
var mem: [8192]u8 = undefined;
var fba = std.heap.FixedBufferAllocator.init(&mem);
const allocator = fba.allocator();
const env = Env.init(allocator);
defer env.deinit(allocator);
var slowest = SlowTracker.init(allocator, 5);
defer slowest.deinit();
var pass: usize = 0;
var fail: usize = 0;
var skip: usize = 0;
var leak: usize = 0;
const address = try std.net.Address.parseIp("127.0.0.1", 9582);
var listener = try address.listen(.{ .reuse_address = true });
defer listener.deinit();
const http_thread = try std.Thread.spawn(.{}, serverHTTP, .{&listener});
defer http_thread.join();
const printer = Printer.init();
printer.fmt("\r\x1b[0K", .{}); // beginning of line and clear to end of line
for (builtin.test_functions) |t| {
if (std.mem.eql(u8, t.name, "unit_tests.test_0")) {
// don't display anything for this test
try t.func();
continue;
}
var status = Status.pass;
slowest.startTiming();
const is_unnamed_test = isUnnamed(t);
if (env.filter) |f| {
if (!is_unnamed_test and std.mem.indexOf(u8, t.name, f) == null) {
continue;
}
}
const friendly_name = blk: {
const name = t.name;
var it = std.mem.splitScalar(u8, name, '.');
while (it.next()) |value| {
if (std.mem.eql(u8, value, "test")) {
const rest = it.rest();
break :blk if (rest.len > 0) rest else name;
}
}
break :blk name;
};
current_test = friendly_name;
std.testing.allocator_instance = .{};
const result = t.func();
current_test = null;
const ns_taken = slowest.endTiming(friendly_name);
if (std.testing.allocator_instance.deinit() == .leak) {
leak += 1;
printer.status(.fail, "\n{s}\n\"{s}\" - Memory Leak\n{s}\n", .{ BORDER, friendly_name, BORDER });
}
if (result) |_| {
pass += 1;
} else |err| switch (err) {
error.SkipZigTest => {
skip += 1;
status = .skip;
},
else => {
status = .fail;
fail += 1;
printer.status(.fail, "\n{s}\n\"{s}\" - {s}\n{s}\n", .{ BORDER, friendly_name, @errorName(err), BORDER });
if (@errorReturnTrace()) |trace| {
std.debug.dumpStackTrace(trace.*);
}
if (env.fail_first) {
break;
}
},
}
if (env.verbose) {
const ms = @as(f64, @floatFromInt(ns_taken)) / 1_000_000.0;
printer.status(status, "{s} ({d:.2}ms)\n", .{ friendly_name, ms });
} else {
printer.status(status, ".", .{});
}
}
const total_tests = pass + fail;
const status = if (fail == 0) Status.pass else Status.fail;
printer.status(status, "\n{d} of {d} test{s} passed\n", .{ pass, total_tests, if (total_tests != 1) "s" else "" });
if (skip > 0) {
printer.status(.skip, "{d} test{s} skipped\n", .{ skip, if (skip != 1) "s" else "" });
}
if (leak > 0) {
printer.status(.fail, "{d} test{s} leaked\n", .{ leak, if (leak != 1) "s" else "" });
}
printer.fmt("\n", .{});
try slowest.display(printer);
printer.fmt("\n", .{});
std.posix.exit(if (fail == 0) 0 else 1);
}
const Printer = struct {
out: std.fs.File.Writer,
fn init() Printer {
return .{
.out = std.io.getStdErr().writer(),
};
}
fn fmt(self: Printer, comptime format: []const u8, args: anytype) void {
std.fmt.format(self.out, format, args) catch unreachable;
}
fn status(self: Printer, s: Status, comptime format: []const u8, args: anytype) void {
const color = switch (s) {
.pass => "\x1b[32m",
.fail => "\x1b[31m",
.skip => "\x1b[33m",
else => "",
};
const out = self.out;
out.writeAll(color) catch @panic("writeAll failed?!");
std.fmt.format(out, format, args) catch @panic("std.fmt.format failed?!");
self.fmt("\x1b[0m", .{});
}
};
const Status = enum {
pass,
fail,
skip,
text,
};
const SlowTracker = struct {
const SlowestQueue = std.PriorityDequeue(TestInfo, void, compareTiming);
max: usize,
slowest: SlowestQueue,
timer: std.time.Timer,
fn init(allocator: Allocator, count: u32) SlowTracker {
const timer = std.time.Timer.start() catch @panic("failed to start timer");
var slowest = SlowestQueue.init(allocator, {});
slowest.ensureTotalCapacity(count) catch @panic("OOM");
return .{
.max = count,
.timer = timer,
.slowest = slowest,
};
}
const TestInfo = struct {
ns: u64,
name: []const u8,
};
fn deinit(self: SlowTracker) void {
self.slowest.deinit();
}
fn startTiming(self: *SlowTracker) void {
self.timer.reset();
}
fn endTiming(self: *SlowTracker, test_name: []const u8) u64 {
var timer = self.timer;
const ns = timer.lap();
var slowest = &self.slowest;
if (slowest.count() < self.max) {
// Capacity is fixed to the # of slow tests we want to track
// If we've tracked fewer tests than this capacity, than always add
slowest.add(TestInfo{ .ns = ns, .name = test_name }) catch @panic("failed to track test timing");
return ns;
}
{
// Optimization to avoid shifting the dequeue for the common case
// where the test isn't one of our slowest.
const fastest_of_the_slow = slowest.peekMin() orelse unreachable;
if (fastest_of_the_slow.ns > ns) {
// the test was faster than our fastest slow test, don't add
return ns;
}
}
// the previous fastest of our slow tests, has been pushed off.
_ = slowest.removeMin();
slowest.add(TestInfo{ .ns = ns, .name = test_name }) catch @panic("failed to track test timing");
return ns;
}
fn display(self: *SlowTracker, printer: Printer) !void {
var slowest = self.slowest;
const count = slowest.count();
printer.fmt("Slowest {d} test{s}: \n", .{ count, if (count != 1) "s" else "" });
while (slowest.removeMinOrNull()) |info| {
const ms = @as(f64, @floatFromInt(info.ns)) / 1_000_000.0;
printer.fmt(" {d:.2}ms\t{s}\n", .{ ms, info.name });
}
}
fn compareTiming(context: void, a: TestInfo, b: TestInfo) std.math.Order {
_ = context;
return std.math.order(a.ns, b.ns);
}
};
const Env = struct {
verbose: bool,
fail_first: bool,
filter: ?[]const u8,
fn init(allocator: Allocator) Env {
return .{
.verbose = readEnvBool(allocator, "TEST_VERBOSE", true),
.fail_first = readEnvBool(allocator, "TEST_FAIL_FIRST", false),
.filter = readEnv(allocator, "TEST_FILTER"),
};
}
fn deinit(self: Env, allocator: Allocator) void {
if (self.filter) |f| {
allocator.free(f);
}
}
fn readEnv(allocator: Allocator, key: []const u8) ?[]const u8 {
const v = std.process.getEnvVarOwned(allocator, key) catch |err| {
if (err == error.EnvironmentVariableNotFound) {
return null;
}
std.log.warn("failed to get env var {s} due to err {}", .{ key, err });
return null;
};
return v;
}
fn readEnvBool(allocator: Allocator, key: []const u8, deflt: bool) bool {
const value = readEnv(allocator, key) orelse return deflt;
defer allocator.free(value);
return std.ascii.eqlIgnoreCase(value, "true");
}
};
fn isUnnamed(t: std.builtin.TestFn) bool {
const marker = ".test_";
const test_name = t.name;
const index = std.mem.indexOf(u8, test_name, marker) orelse return false;
_ = std.fmt.parseInt(u32, test_name[index + marker.len ..], 10) catch return false;
return true;
}
fn serverHTTP(listener: *std.net.Server) !void {
var read_buffer: [1024]u8 = undefined;
ACCEPT: while (true) {
var conn = try listener.accept();
defer conn.stream.close();
var server = std.http.Server.init(conn, &read_buffer);
while (server.state == .ready) {
var request = server.receiveHead() catch |err| switch (err) {
error.HttpConnectionClosing => continue :ACCEPT,
else => {
std.debug.print("Test HTTP Server error: {}\n", .{err});
return err;
},
};
const path = request.head.target;
if (std.mem.eql(u8, path, "/loader")) {
try writeResponse(&request, .{
.body = "Hello!",
});
}
}
}
}
const Response = struct {
body: []const u8 = "",
status: std.http.Status = .ok,
};
fn writeResponse(req: *std.http.Server.Request, res: Response) !void {
try req.respond(res.body, .{ .status = res.status });
}
test {
std.testing.refAllDecls(@import("url/query.zig"));
std.testing.refAllDecls(@import("browser/dump.zig"));
std.testing.refAllDecls(@import("browser/loader.zig"));
std.testing.refAllDecls(@import("browser/mime.zig"));
std.testing.refAllDecls(@import("cdp/msg.zig"));
std.testing.refAllDecls(@import("css/css.zig"));
std.testing.refAllDecls(@import("css/libdom_test.zig"));
std.testing.refAllDecls(@import("css/match_test.zig"));
std.testing.refAllDecls(@import("css/parser.zig"));
std.testing.refAllDecls(@import("generate.zig"));
std.testing.refAllDecls(@import("http/Client.zig"));
std.testing.refAllDecls(@import("storage/storage.zig"));
std.testing.refAllDecls(@import("iterator/iterator.zig"));
std.testing.refAllDecls(@import("server.zig"));
}

View File

@@ -19,58 +19,58 @@
const std = @import("std");
const Reader = @import("../str/parser.zig").Reader;
const asUint = @import("../str/parser.zig").asUint;
// Values is a map with string key of string values.
pub const Values = struct {
arena: std.heap.ArenaAllocator,
alloc: std.mem.Allocator,
map: std.StringArrayHashMapUnmanaged(List),
const List = std.ArrayListUnmanaged([]const u8);
pub fn init(allocator: std.mem.Allocator) Values {
pub fn init(alloc: std.mem.Allocator) Values {
return .{
.alloc = alloc,
.map = .{},
.arena = std.heap.ArenaAllocator.init(allocator),
};
}
pub fn deinit(self: *Values) void {
self.arena.deinit();
var it = self.map.iterator();
while (it.next()) |entry| {
for (entry.value_ptr.items) |v| self.alloc.free(v);
entry.value_ptr.deinit(self.alloc);
self.alloc.free(entry.key_ptr.*);
}
self.map.deinit(self.alloc);
}
// add the key value couple to the values.
// the key and the value are duplicated.
pub fn append(self: *Values, k: []const u8, v: []const u8) !void {
const allocator = self.arena.allocator();
const owned_value = try allocator.dupe(u8, v);
const vv = try self.alloc.dupe(u8, v);
var gop = try self.map.getOrPut(allocator, k);
if (gop.found_existing) {
return gop.value_ptr.append(allocator, owned_value);
if (self.map.getPtr(k)) |list| {
return try list.append(self.alloc, vv);
}
gop.key_ptr.* = try allocator.dupe(u8, k);
const kk = try self.alloc.dupe(u8, k);
var list = List{};
try list.append(allocator, owned_value);
gop.value_ptr.* = list;
try list.append(self.alloc, vv);
try self.map.put(self.alloc, kk, list);
}
// append by taking the ownership of the key and the value
fn appendOwned(self: *Values, k: []const u8, v: []const u8) !void {
const allocator = self.arena.allocator();
var gop = try self.map.getOrPut(allocator, k);
if (gop.found_existing) {
return gop.value_ptr.append(allocator, v);
if (self.map.getPtr(k)) |list| {
return try list.append(self.alloc, v);
}
var list = List{};
try list.append(allocator, v);
gop.value_ptr.* = list;
try list.append(self.alloc, v);
try self.map.put(self.alloc, k, list);
}
pub fn get(self: *const Values, k: []const u8) []const []const u8 {
pub fn get(self: *Values, k: []const u8) [][]const u8 {
if (self.map.get(k)) |list| {
return list.items;
}
@@ -78,7 +78,7 @@ pub const Values = struct {
return &[_][]const u8{};
}
pub fn first(self: *const Values, k: []const u8) []const u8 {
pub fn first(self: *Values, k: []const u8) []const u8 {
if (self.map.getPtr(k)) |list| {
if (list.items.len == 0) return "";
return list.items[0];
@@ -88,7 +88,10 @@ pub const Values = struct {
}
pub fn delete(self: *Values, k: []const u8) void {
_ = self.map.fetchSwapRemove(k);
if (self.map.getPtr(k)) |list| {
list.deinit(self.alloc);
_ = self.map.fetchSwapRemove(k);
}
}
pub fn deleteValue(self: *Values, k: []const u8, v: []const u8) void {
@@ -102,48 +105,80 @@ pub const Values = struct {
}
}
pub fn count(self: *const Values) usize {
pub fn count(self: *Values) usize {
return self.map.count();
}
pub fn encode(self: *const Values, writer: anytype) !void {
// the caller owned the returned string.
pub fn encode(self: *Values, writer: anytype) !void {
var i: usize = 0;
var it = self.map.iterator();
const first_entry = it.next() orelse return;
try encodeKeyValues(first_entry, writer);
while (it.next()) |entry| {
try writer.writeByte('&');
try encodeKeyValues(entry, writer);
defer i += 1;
if (i > 0) try writer.writeByte('&');
if (entry.value_ptr.items.len == 0) {
try escape(writer, entry.key_ptr.*);
continue;
}
const start = i;
for (entry.value_ptr.items) |v| {
defer i += 1;
if (start < i) try writer.writeByte('&');
try escape(writer, entry.key_ptr.*);
if (v.len > 0) try writer.writeByte('=');
try escape(writer, v);
}
}
}
};
fn encodeKeyValues(entry: anytype, writer: anytype) !void {
const key = entry.key_ptr.*;
try escape(key, writer);
const values = entry.value_ptr.items;
if (values.len == 0) {
return;
}
if (values[0].len > 0) {
try writer.writeByte('=');
try escape(values[0], writer);
}
for (values[1..]) |value| {
try writer.writeByte('&');
try escape(key, writer);
if (value.len > 0) {
try writer.writeByte('=');
try escape(value, writer);
}
}
fn unhex(c: u8) u8 {
if ('0' <= c and c <= '9') return c - '0';
if ('a' <= c and c <= 'f') return c - 'a' + 10;
if ('A' <= c and c <= 'F') return c - 'A' + 10;
return 0;
}
fn escape(raw: []const u8, writer: anytype) !void {
// unescape decodes a percent encoded string.
// The caller owned the returned string.
pub fn unescape(alloc: std.mem.Allocator, s: []const u8) ![]const u8 {
var buf: std.ArrayListUnmanaged(u8) = .{};
defer buf.deinit(alloc);
var i: usize = 0;
while (i < s.len) {
defer i += 1;
switch (s[i]) {
'%' => {
if (i + 2 > s.len) return error.EscapeError;
if (!std.ascii.isHex(s[i + 1])) return error.EscapeError;
if (!std.ascii.isHex(s[i + 2])) return error.EscapeError;
try buf.append(alloc, unhex(s[i + 1]) << 4 | unhex(s[i + 2]));
i += 2;
},
'+' => try buf.append(alloc, ' '), // TODO should we decode or keep as it?
else => try buf.append(alloc, s[i]),
}
}
return try buf.toOwnedSlice(alloc);
}
test "unescape" {
var v: []const u8 = undefined;
const alloc = std.testing.allocator;
v = try unescape(alloc, "%7E");
try std.testing.expect(std.mem.eql(u8, "~", v));
alloc.free(v);
}
pub fn escape(writer: anytype, raw: []const u8) !void {
var start: usize = 0;
for (raw, 0..) |char, index| {
if ('a' <= char and char <= 'z' or 'A' <= char and char <= 'Z' or '0' <= char and char <= '9') {
@@ -161,17 +196,15 @@ pub fn parseQuery(alloc: std.mem.Allocator, s: []const u8) !Values {
var values = Values.init(alloc);
errdefer values.deinit();
const arena = values.arena.allocator();
const ln = s.len;
if (ln == 0) return values;
var r = Reader{ .data = s };
var r = Reader{ .s = s };
while (true) {
const param = r.until('&');
if (param.len == 0) break;
var rr = Reader{ .data = param };
var rr = Reader{ .s = param };
const k = rr.until('=');
if (k.len == 0) continue;
@@ -179,8 +212,8 @@ pub fn parseQuery(alloc: std.mem.Allocator, s: []const u8) !Values {
const v = rr.tail();
// decode k and v
const kk = try unescape(arena, k);
const vv = try unescape(arena, v);
const kk = try unescape(alloc, k);
const vv = try unescape(alloc, v);
try values.appendOwned(kk, vv);
@@ -190,244 +223,61 @@ pub fn parseQuery(alloc: std.mem.Allocator, s: []const u8) !Values {
return values;
}
// The return'd string may or may not be allocated. Callers should use arenas
fn unescape(allocator: std.mem.Allocator, input: []const u8) ![]const u8 {
const HEX_CHAR = comptime blk: {
var all = std.mem.zeroes([256]bool);
for ('a'..('f' + 1)) |b| all[b] = true;
for ('A'..('F' + 1)) |b| all[b] = true;
for ('0'..('9' + 1)) |b| all[b] = true;
break :blk all;
};
const HEX_DECODE = comptime blk: {
var all = std.mem.zeroes([256]u8);
for ('a'..('z' + 1)) |b| all[b] = b - 'a' + 10;
for ('A'..('Z' + 1)) |b| all[b] = b - 'A' + 10;
for ('0'..('9' + 1)) |b| all[b] = b - '0';
break :blk all;
};
var has_plus = false;
var unescaped_len = input.len;
{
// Figure out if we have any spaces and what the final unescaped length
// will be (which will let us know if we have anything to unescape in
// the first place)
var i: usize = 0;
while (i < input.len) {
const c = input[i];
if (c == '%') {
if (i + 2 >= input.len or !HEX_CHAR[input[i + 1]] or !HEX_CHAR[input[i + 2]]) {
return error.EscapeError;
}
i += 3;
unescaped_len -= 2;
} else if (c == '+') {
has_plus = true;
i += 1;
} else {
i += 1;
}
}
}
// no encoding, and no plus. nothing to unescape
if (unescaped_len == input.len and has_plus == false) {
return input;
}
var unescaped = try allocator.alloc(u8, unescaped_len);
errdefer allocator.free(unescaped);
var input_pos: usize = 0;
for (0..unescaped_len) |unescaped_pos| {
switch (input[input_pos]) {
'+' => {
unescaped[unescaped_pos] = ' ';
input_pos += 1;
},
'%' => {
const encoded = input[input_pos + 1 .. input_pos + 3];
const encoded_as_uint = @as(u16, @bitCast(encoded[0..2].*));
unescaped[unescaped_pos] = switch (encoded_as_uint) {
asUint("20") => ' ',
asUint("21") => '!',
asUint("22") => '"',
asUint("23") => '#',
asUint("24") => '$',
asUint("25") => '%',
asUint("26") => '&',
asUint("27") => '\'',
asUint("28") => '(',
asUint("29") => ')',
asUint("2A") => '*',
asUint("2B") => '+',
asUint("2C") => ',',
asUint("2F") => '/',
asUint("3A") => ':',
asUint("3B") => ';',
asUint("3D") => '=',
asUint("3F") => '?',
asUint("40") => '@',
asUint("5B") => '[',
asUint("5D") => ']',
else => HEX_DECODE[encoded[0]] << 4 | HEX_DECODE[encoded[1]],
};
input_pos += 3;
},
else => |c| {
unescaped[unescaped_pos] = c;
input_pos += 1;
},
}
}
return unescaped;
}
const testing = std.testing;
test "url.Query: unescape" {
const allocator = testing.allocator;
const cases = [_]struct { expected: []const u8, input: []const u8, free: bool }{
.{ .expected = "", .input = "", .free = false },
.{ .expected = "over", .input = "over", .free = false },
.{ .expected = "Hello World", .input = "Hello World", .free = false },
.{ .expected = "~", .input = "%7E", .free = true },
.{ .expected = "~", .input = "%7e", .free = true },
.{ .expected = "Hello~World", .input = "Hello%7eWorld", .free = true },
.{ .expected = "Hello World", .input = "Hello++World", .free = true },
};
for (cases) |case| {
const value = try unescape(allocator, case.input);
defer if (case.free) {
allocator.free(value);
};
try testing.expectEqualStrings(case.expected, value);
}
try testing.expectError(error.EscapeError, unescape(undefined, "%"));
try testing.expectError(error.EscapeError, unescape(undefined, "%a"));
try testing.expectError(error.EscapeError, unescape(undefined, "%1"));
try testing.expectError(error.EscapeError, unescape(undefined, "123%45%6"));
try testing.expectError(error.EscapeError, unescape(undefined, "%zzzzz"));
try testing.expectError(error.EscapeError, unescape(undefined, "%0\xff"));
}
test "url.Query: parseQuery" {
try testParseQuery(.{}, "");
try testParseQuery(.{}, "&");
try testParseQuery(.{ .a = [_][]const u8{"b"} }, "a=b");
try testParseQuery(.{ .hello = [_][]const u8{"world"} }, "hello=world");
try testParseQuery(.{ .hello = [_][]const u8{ "world", "all" } }, "hello=world&hello=all");
try testParseQuery(.{
.a = [_][]const u8{"b"},
.b = [_][]const u8{"c"},
}, "a=b&b=c");
try testParseQuery(.{ .a = [_][]const u8{""} }, "a");
try testParseQuery(.{ .a = [_][]const u8{ "", "", "" } }, "a&a&a");
try testParseQuery(.{ .abc = [_][]const u8{""} }, "abc");
try testParseQuery(.{
.abc = [_][]const u8{""},
.dde = [_][]const u8{ "", "" },
}, "abc&dde&dde");
try testParseQuery(.{
.@"power is >" = [_][]const u8{"9,000?"},
}, "power%20is%20%3E=9%2C000%3F");
}
test "url.Query.Values: get/first/count" {
var values = Values.init(testing.allocator);
test "parse empty query" {
var values = try parseQuery(std.testing.allocator, "");
defer values.deinit();
{
// empty
try testing.expectEqual(0, values.count());
try testing.expectEqual(0, values.get("").len);
try testing.expectEqualStrings("", values.first(""));
try testing.expectEqual(0, values.get("key").len);
try testing.expectEqualStrings("", values.first("key"));
}
{
// add 1 value => key
try values.appendOwned("key", "value");
try testing.expectEqual(1, values.count());
try testing.expectEqual(1, values.get("key").len);
try testing.expectEqualSlices(
[]const u8,
&.{"value"},
values.get("key"),
);
try testing.expectEqualStrings("value", values.first("key"));
}
{
// add another value for the same key
try values.appendOwned("key", "another");
try testing.expectEqual(1, values.count());
try testing.expectEqual(2, values.get("key").len);
try testing.expectEqualSlices(
[]const u8,
&.{ "value", "another" },
values.get("key"),
);
try testing.expectEqualStrings("value", values.first("key"));
}
{
// add a new key (and value)
try values.appendOwned("over", "9000!");
try testing.expectEqual(2, values.count());
try testing.expectEqual(2, values.get("key").len);
try testing.expectEqual(1, values.get("over").len);
try testing.expectEqualSlices(
[]const u8,
&.{"9000!"},
values.get("over"),
);
try testing.expectEqualStrings("9000!", values.first("over"));
}
try std.testing.expect(values.count() == 0);
}
test "url.Query.Values: encode" {
var values = try parseQuery(
testing.allocator,
"hello=world&i%20will%20not%20fear=%3E%3E&a=b&a=c",
);
test "parse empty query &" {
var values = try parseQuery(std.testing.allocator, "&");
defer values.deinit();
try std.testing.expect(values.count() == 0);
}
test "parse query" {
var values = try parseQuery(std.testing.allocator, "a=b&b=c");
defer values.deinit();
try std.testing.expect(values.count() == 2);
try std.testing.expect(values.get("a").len == 1);
try std.testing.expect(std.mem.eql(u8, values.get("a")[0], "b"));
try std.testing.expect(std.mem.eql(u8, values.first("a"), "b"));
try std.testing.expect(values.get("b").len == 1);
try std.testing.expect(std.mem.eql(u8, values.get("b")[0], "c"));
try std.testing.expect(std.mem.eql(u8, values.first("b"), "c"));
}
test "parse query no value" {
var values = try parseQuery(std.testing.allocator, "a");
defer values.deinit();
try std.testing.expect(values.count() == 1);
try std.testing.expect(std.mem.eql(u8, values.first("a"), ""));
}
test "parse query dup" {
var values = try parseQuery(std.testing.allocator, "a=b&a=c");
defer values.deinit();
try std.testing.expect(values.count() == 1);
try std.testing.expect(std.mem.eql(u8, values.first("a"), "b"));
try std.testing.expect(values.get("a").len == 2);
}
test "encode query" {
var values = try parseQuery(std.testing.allocator, "a=b&b=c");
defer values.deinit();
try values.append("a", "~");
var buf: std.ArrayListUnmanaged(u8) = .{};
defer buf.deinit(testing.allocator);
try values.encode(buf.writer(testing.allocator));
try testing.expectEqualStrings(
"hello=world&i%20will%20not%20fear=%3E%3E&a=b&a=c",
buf.items,
);
}
defer buf.deinit(std.testing.allocator);
fn testParseQuery(expected: anytype, query: []const u8) !void {
var values = try parseQuery(testing.allocator, query);
defer values.deinit();
try values.encode(buf.writer(std.testing.allocator));
var count: usize = 0;
inline for (@typeInfo(@TypeOf(expected)).Struct.fields) |f| {
const actual = values.get(f.name);
const expect = @field(expected, f.name);
try testing.expectEqual(expect.len, actual.len);
for (expect, actual) |e, a| {
try testing.expectEqualStrings(e, a);
}
count += 1;
}
try testing.expectEqual(count, values.count());
try std.testing.expect(std.mem.eql(u8, buf.items, "a=b&a=%7E&b=c"));
}

View File

@@ -21,13 +21,14 @@ const std = @import("std");
const jsruntime = @import("jsruntime");
const Case = jsruntime.test_utils.Case;
const checkCases = jsruntime.test_utils.checkCases;
const generate = @import("../generate.zig");
const query = @import("query.zig");
pub const Interfaces = .{
pub const Interfaces = generate.Tuple(.{
URL,
URLSearchParams,
};
});
// https://url.spec.whatwg.org/#url
//

View File

@@ -91,7 +91,7 @@ pub fn run(arena: *std.heap.ArenaAllocator, comptime dir: []const u8, f: []const
// setup global env vars.
var window = Window.create(null, null);
try window.replaceDocument(html_doc);
window.replaceDocument(html_doc);
window.setStorageShelf(&storageShelf);
try js_env.bindGlobal(&window);

View File

@@ -21,6 +21,7 @@ const std = @import("std");
const jsruntime = @import("jsruntime");
const Case = jsruntime.test_utils.Case;
const checkCases = jsruntime.test_utils.checkCases;
const generate = @import("../generate.zig");
const DOMError = @import("netsurf").DOMError;
const DOMException = @import("../dom/exceptions.zig").DOMException;
@@ -28,7 +29,7 @@ const DOMException = @import("../dom/exceptions.zig").DOMException;
const ProgressEvent = @import("progress_event.zig").ProgressEvent;
const XMLHttpRequestEventTarget = @import("event_target.zig").XMLHttpRequestEventTarget;
const Mime = @import("../browser/mime.zig").Mime;
const Mime = @import("../browser/mime.zig");
const Loop = jsruntime.Loop;
const Client = @import("asyncio").Client;
@@ -41,11 +42,11 @@ const log = std.log.scoped(.xhr);
// XHR interfaces
// https://xhr.spec.whatwg.org/#interface-xmlhttprequest
pub const Interfaces = .{
pub const Interfaces = generate.Tuple(.{
XMLHttpRequestEventTarget,
XMLHttpRequestUpload,
XMLHttpRequest,
};
});
pub const XMLHttpRequestUpload = struct {
pub const prototype = *XMLHttpRequestEventTarget;
@@ -141,7 +142,7 @@ pub const XMLHttpRequest = struct {
// https://lightpanda.slack.com/archives/C05TRU6RBM1/p1707819010681019
// response_override_mime_type: ?[]const u8 = null,
response_mime: ?Mime = null,
response_mime: Mime = undefined,
response_obj: ?ResponseObj = null,
send_flag: bool = false,
@@ -313,11 +314,8 @@ pub const XMLHttpRequest = struct {
if (self.response_obj) |v| v.deinit();
self.response_obj = null;
self.response_mime = Mime.Empty;
self.response_type = .Empty;
if (self.response_mime) |*mime| {
mime.deinit();
self.response_mime = null;
}
// TODO should we clearRetainingCapacity instead?
self.headers.clearAndFree();
@@ -339,9 +337,6 @@ pub const XMLHttpRequest = struct {
self.reset();
self.headers.deinit();
self.response_headers.deinit();
if (self.response_mime) |*mime| {
mime.deinit();
}
self.proto.deinit(alloc);
}
@@ -550,7 +545,7 @@ pub const XMLHttpRequest = struct {
// extract a mime type from headers.
const ct = self.response_headers.getFirstValue("Content-Type") orelse "text/xml";
self.response_mime = Mime.parse(self.alloc, ct) catch |e| return self.onErr(e);
self.response_mime = Mime.parse(ct) catch |e| return self.onErr(e);
// TODO handle override mime type
@@ -826,14 +821,13 @@ pub const XMLHttpRequest = struct {
// TODO parse XML.
// https://xhr.spec.whatwg.org/#response-object
fn setResponseObjDocument(self: *XMLHttpRequest, alloc: std.mem.Allocator) void {
const response_mime = &self.response_mime.?;
const isHTML = response_mime.isHTML();
const isHTML = self.response_mime.eql(Mime.HTML);
// TODO If finalMIME is not an HTML MIME type or an XML MIME type, then
// return.
if (!isHTML) return;
const ccharset = alloc.dupeZ(u8, response_mime.charset orelse "utf-8") catch {
const ccharset = alloc.dupeZ(u8, self.response_mime.charset orelse "utf-8") catch {
self.response_obj = .{ .Failure = true };
return;
};

View File

@@ -1,71 +0,0 @@
// Copyright (C) 2023-2025 Lightpanda (Selecy SAS)
//
// Francis Bouvier <francis@lightpanda.io>
// Pierre Tachoire <pierre@lightpanda.io>
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Affero General Public License as
// published by the Free Software Foundation, either version 3 of the
// License, or (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Affero General Public License for more details.
//
// You should have received a copy of the GNU Affero General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
//
const std = @import("std");
const jsruntime = @import("jsruntime");
const Case = jsruntime.test_utils.Case;
const checkCases = jsruntime.test_utils.checkCases;
const DOMError = @import("netsurf").DOMError;
const parser = @import("netsurf");
const dump = @import("../browser/dump.zig");
pub const Interfaces = .{
XMLSerializer,
};
// https://w3c.github.io/DOM-Parsing/#dom-xmlserializer-constructor
pub const XMLSerializer = struct {
pub const mem_guarantied = true;
pub fn constructor() !XMLSerializer {
return .{};
}
pub fn deinit(_: *XMLSerializer, _: std.mem.Allocator) void {}
pub fn _serializeToString(_: XMLSerializer, alloc: std.mem.Allocator, root: *parser.Node) ![]const u8 {
var buf = std.ArrayList(u8).init(alloc);
defer buf.deinit();
if (try parser.nodeType(root) == .document) {
try dump.writeHTML(@as(*parser.Document, @ptrCast(root)), buf.writer());
} else {
try dump.writeNode(root, buf.writer());
}
// TODO express the caller owned the slice.
// https://github.com/lightpanda-io/jsruntime-lib/issues/195
return try buf.toOwnedSlice();
}
};
// Tests
// -----
pub fn testExecFn(
_: std.mem.Allocator,
js_env: *jsruntime.Env,
) anyerror!void {
var serializer = [_]Case{
.{ .src = "const s = new XMLSerializer()", .ex = "undefined" },
.{ .src = "s.serializeToString(document.getElementById('para'))", .ex = "<p id=\"para\"> And</p>" },
};
try checkCases(js_env, &serializer);
}

1
vendor/websocket.zig vendored Submodule

Submodule vendor/websocket.zig added at 1b49626c78