mirror of
https://github.com/lightpanda-io/browser.git
synced 2026-04-03 16:10:29 +00:00
Compare commits
4 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
3864aa4a6b | ||
|
|
4dd014de41 | ||
|
|
224a7ca0fe | ||
|
|
226d1ff183 |
2
.github/workflows/e2e-integration-test.yml
vendored
2
.github/workflows/e2e-integration-test.yml
vendored
@@ -62,7 +62,7 @@ jobs:
|
||||
- name: run end to end integration tests
|
||||
continue-on-error: true
|
||||
run: |
|
||||
./lightpanda serve --http-proxy ${{ secrets.MASSIVE_PROXY_RESIDENTIAL_US }} --log-level error & echo $! > LPD.pid
|
||||
./lightpanda serve --log-level error & echo $! > LPD.pid
|
||||
go run integration/main.go |tee result.log
|
||||
kill `cat LPD.pid`
|
||||
|
||||
|
||||
@@ -5,8 +5,8 @@
|
||||
.minimum_zig_version = "0.15.2",
|
||||
.dependencies = .{
|
||||
.v8 = .{
|
||||
.url = "https://github.com/lightpanda-io/zig-v8-fork/archive/refs/tags/v0.3.7.tar.gz",
|
||||
.hash = "v8-0.0.0-xddH67uBBAD95hWsPQz3Ni1PlZjdywtPXrGUAp8rSKco",
|
||||
.url = "https://github.com/lightpanda-io/zig-v8-fork/archive/715ccbae21d7528eba951f78af4dfd48835fc172.tar.gz",
|
||||
.hash = "v8-0.0.0-xddH65-HBADXFCII9ucZE3NgbkWmwsbTbsx8qevYVki5",
|
||||
},
|
||||
// .v8 = .{ .path = "../zig-v8-fork" },
|
||||
.brotli = .{
|
||||
|
||||
@@ -55,7 +55,7 @@ pub fn init(allocator: Allocator, config: *const Config) !*App {
|
||||
.arena_pool = undefined,
|
||||
};
|
||||
|
||||
app.network = try Network.init(allocator, app, config);
|
||||
app.network = try Network.init(allocator, config);
|
||||
errdefer app.network.deinit();
|
||||
|
||||
app.platform = try Platform.init();
|
||||
|
||||
@@ -156,13 +156,6 @@ pub fn userAgentSuffix(self: *const Config) ?[]const u8 {
|
||||
};
|
||||
}
|
||||
|
||||
pub fn httpCacheDir(self: *const Config) ?[]const u8 {
|
||||
return switch (self.mode) {
|
||||
inline .serve, .fetch, .mcp => |opts| opts.common.http_cache_dir,
|
||||
else => null,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn cdpTimeout(self: *const Config) usize {
|
||||
return switch (self.mode) {
|
||||
.serve => |opts| if (opts.timeout > 604_800) 604_800_000 else @as(usize, opts.timeout) * 1000,
|
||||
@@ -280,7 +273,6 @@ pub const Common = struct {
|
||||
log_format: ?log.Format = null,
|
||||
log_filter_scopes: ?[]log.Scope = null,
|
||||
user_agent_suffix: ?[]const u8 = null,
|
||||
http_cache_dir: ?[]const u8 = null,
|
||||
|
||||
web_bot_auth_key_file: ?[]const u8 = null,
|
||||
web_bot_auth_keyid: ?[]const u8 = null,
|
||||
@@ -400,11 +392,6 @@ pub fn printUsageAndExit(self: *const Config, success: bool) void {
|
||||
\\
|
||||
\\--web-bot-auth-domain
|
||||
\\ Your domain e.g. yourdomain.com
|
||||
\\
|
||||
\\--http-cache-dir
|
||||
\\ Path to a directory to use as a Filesystem Cache for network resources.
|
||||
\\ Omitting this will result is no caching.
|
||||
\\ Defaults to no caching.
|
||||
;
|
||||
|
||||
// MAX_HELP_LEN|
|
||||
@@ -1079,14 +1066,5 @@ fn parseCommonArg(
|
||||
return true;
|
||||
}
|
||||
|
||||
if (std.mem.eql(u8, "--http-cache-dir", opt)) {
|
||||
const str = args.next() orelse {
|
||||
log.fatal(.app, "missing argument value", .{ .arg = "--http-cache-dir" });
|
||||
return error.InvalidArgument;
|
||||
};
|
||||
common.http_cache_dir = try allocator.dupe(u8, str);
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
@@ -32,9 +32,6 @@ const CookieJar = @import("webapi/storage/Cookie.zig").Jar;
|
||||
const http = @import("../network/http.zig");
|
||||
const Network = @import("../network/Network.zig");
|
||||
const Robots = @import("../network/Robots.zig");
|
||||
const Cache = @import("../network/cache/Cache.zig");
|
||||
const CacheMetadata = Cache.CachedMetadata;
|
||||
const CachedResponse = Cache.CachedResponse;
|
||||
|
||||
const IS_DEBUG = builtin.mode == .Debug;
|
||||
|
||||
@@ -314,73 +311,7 @@ pub fn request(self: *Client, req: Request) !void {
|
||||
return self.fetchRobotsThenProcessRequest(robots_url, req);
|
||||
}
|
||||
|
||||
fn serveFromCache(req: Request, cached: *const CachedResponse) !void {
|
||||
const response = Response.fromCached(req.ctx, cached);
|
||||
defer switch (cached.data) {
|
||||
.buffer => |_| {},
|
||||
.file => |f| f.file.close(),
|
||||
};
|
||||
|
||||
if (req.start_callback) |cb| {
|
||||
try cb(response);
|
||||
}
|
||||
|
||||
const proceed = try req.header_callback(response);
|
||||
if (!proceed) {
|
||||
req.error_callback(req.ctx, error.Abort);
|
||||
return;
|
||||
}
|
||||
|
||||
switch (cached.data) {
|
||||
.buffer => |data| {
|
||||
if (data.len > 0) {
|
||||
try req.data_callback(response, data);
|
||||
}
|
||||
},
|
||||
.file => |f| {
|
||||
const file = f.file;
|
||||
|
||||
var buf: [1024]u8 = undefined;
|
||||
var file_reader = file.reader(&buf);
|
||||
try file_reader.seekTo(f.offset);
|
||||
const reader = &file_reader.interface;
|
||||
|
||||
var read_buf: [1024]u8 = undefined;
|
||||
var remaining = f.len;
|
||||
|
||||
while (remaining > 0) {
|
||||
const read_len = @min(read_buf.len, remaining);
|
||||
const n = try reader.readSliceShort(read_buf[0..read_len]);
|
||||
if (n == 0) break;
|
||||
remaining -= n;
|
||||
try req.data_callback(response, read_buf[0..n]);
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
try req.done_callback(req.ctx);
|
||||
}
|
||||
|
||||
fn processRequest(self: *Client, req: Request) !void {
|
||||
if (self.network.cache) |*cache| {
|
||||
if (req.method == .GET) {
|
||||
const arena = try self.network.app.arena_pool.acquire(.{ .debug = "HttpClient.processRequest.cache" });
|
||||
defer self.network.app.arena_pool.release(arena);
|
||||
|
||||
var iter = req.headers.iterator();
|
||||
const req_header_list = try iter.collect(arena);
|
||||
|
||||
if (cache.get(arena, .{
|
||||
.url = req.url,
|
||||
.timestamp = std.time.timestamp(),
|
||||
.request_headers = req_header_list.items,
|
||||
})) |cached| {
|
||||
defer req.headers.deinit();
|
||||
return serveFromCache(req, &cached);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const transfer = try self.makeTransfer(req);
|
||||
|
||||
transfer.req.notification.dispatch(.http_request_start, &.{ .transfer = transfer });
|
||||
@@ -468,10 +399,8 @@ fn fetchRobotsThenProcessRequest(self: *Client, robots_url: [:0]const u8, req: R
|
||||
try entry.value_ptr.append(self.allocator, req);
|
||||
}
|
||||
|
||||
fn robotsHeaderCallback(response: Response) !bool {
|
||||
const ctx: *RobotsRequestContext = @ptrCast(@alignCast(response.ctx));
|
||||
// Robots callbacks only happen on real live requests.
|
||||
const transfer = response.inner.transfer;
|
||||
fn robotsHeaderCallback(transfer: *Transfer) !bool {
|
||||
const ctx: *RobotsRequestContext = @ptrCast(@alignCast(transfer.ctx));
|
||||
|
||||
if (transfer.response_header) |hdr| {
|
||||
log.debug(.browser, "robots status", .{ .status = hdr.status, .robots_url = ctx.robots_url });
|
||||
@@ -485,8 +414,8 @@ fn robotsHeaderCallback(response: Response) !bool {
|
||||
return true;
|
||||
}
|
||||
|
||||
fn robotsDataCallback(response: Response, data: []const u8) !void {
|
||||
const ctx: *RobotsRequestContext = @ptrCast(@alignCast(response.ctx));
|
||||
fn robotsDataCallback(transfer: *Transfer, data: []const u8) !void {
|
||||
const ctx: *RobotsRequestContext = @ptrCast(@alignCast(transfer.ctx));
|
||||
try ctx.buffer.appendSlice(ctx.client.allocator, data);
|
||||
}
|
||||
|
||||
@@ -705,43 +634,13 @@ fn makeTransfer(self: *Client, req: Request) !*Transfer {
|
||||
.id = id,
|
||||
.url = req.url,
|
||||
.req = req,
|
||||
.ctx = req.ctx,
|
||||
.client = self,
|
||||
.max_response_size = self.network.config.httpMaxResponseSize(),
|
||||
};
|
||||
return transfer;
|
||||
}
|
||||
|
||||
fn requestFailed(transfer: *Transfer, err: anyerror, comptime execute_callback: bool) void {
|
||||
if (transfer._notified_fail) {
|
||||
// we can force a failed request within a callback, which will eventually
|
||||
// result in this being called again in the more general loop. We do this
|
||||
// because we can raise a more specific error inside a callback in some cases
|
||||
return;
|
||||
}
|
||||
|
||||
transfer._notified_fail = true;
|
||||
|
||||
transfer.req.notification.dispatch(.http_request_fail, &.{
|
||||
.transfer = transfer,
|
||||
.err = err,
|
||||
});
|
||||
|
||||
if (execute_callback) {
|
||||
transfer.req.error_callback(transfer.req.ctx, err);
|
||||
} else if (transfer.req.shutdown_callback) |cb| {
|
||||
cb(transfer.req.ctx);
|
||||
}
|
||||
}
|
||||
|
||||
// Same restriction as changeProxy. Should be ok since this is only called on
|
||||
// BrowserContext deinit.
|
||||
pub fn restoreOriginalProxy(self: *Client) !void {
|
||||
try self.ensureNoActiveConnection();
|
||||
|
||||
self.http_proxy = self.network.config.httpProxy();
|
||||
self.use_proxy = self.http_proxy != null;
|
||||
}
|
||||
|
||||
fn makeRequest(self: *Client, conn: *http.Connection, transfer: *Transfer) anyerror!void {
|
||||
{
|
||||
// Reset per-response state for retries (auth challenge, queue).
|
||||
@@ -775,7 +674,7 @@ fn makeRequest(self: *Client, conn: *http.Connection, transfer: *Transfer) anyer
|
||||
self.active += 1;
|
||||
|
||||
if (transfer.req.start_callback) |cb| {
|
||||
cb(Response.fromTransfer(transfer)) catch |err| {
|
||||
cb(transfer) catch |err| {
|
||||
transfer.deinit();
|
||||
return err;
|
||||
};
|
||||
@@ -843,10 +742,7 @@ fn processOneMessage(self: *Client, msg: http.Handles.MultiMessage, transfer: *T
|
||||
// TODO give a way to configure the number of auth retries.
|
||||
if (transfer._auth_challenge != null and transfer._tries < 10) {
|
||||
var wait_for_interception = false;
|
||||
transfer.req.notification.dispatch(
|
||||
.http_request_auth_required,
|
||||
&.{ .transfer = transfer, .wait_for_interception = &wait_for_interception },
|
||||
);
|
||||
transfer.req.notification.dispatch(.http_request_auth_required, &.{ .transfer = transfer, .wait_for_interception = &wait_for_interception });
|
||||
if (wait_for_interception) {
|
||||
self.intercepted += 1;
|
||||
if (comptime IS_DEBUG) {
|
||||
@@ -945,11 +841,10 @@ fn processOneMessage(self: *Client, msg: http.Handles.MultiMessage, transfer: *T
|
||||
}
|
||||
}
|
||||
|
||||
const body = transfer._stream_buffer.items;
|
||||
|
||||
// Replay buffered body through user's data_callback.
|
||||
if (transfer._stream_buffer.items.len > 0) {
|
||||
try transfer.req.data_callback(Response.fromTransfer(transfer), body);
|
||||
const body = transfer._stream_buffer.items;
|
||||
try transfer.req.data_callback(transfer, body);
|
||||
|
||||
transfer.req.notification.dispatch(.http_response_data, &.{
|
||||
.data = body,
|
||||
@@ -962,19 +857,11 @@ fn processOneMessage(self: *Client, msg: http.Handles.MultiMessage, transfer: *T
|
||||
}
|
||||
}
|
||||
|
||||
if (transfer._pending_cache_metadata) |metadata| {
|
||||
const cache = &self.network.cache.?;
|
||||
cache.put(metadata.*, body) catch |err| {
|
||||
log.warn(.cache, "cache put failed", .{ .err = err });
|
||||
};
|
||||
}
|
||||
|
||||
// release conn ASAP so that it's available; some done_callbacks
|
||||
// will load more resources.
|
||||
transfer.releaseConn();
|
||||
|
||||
try transfer.req.done_callback(transfer.req.ctx);
|
||||
|
||||
try transfer.req.done_callback(transfer.ctx);
|
||||
transfer.req.notification.dispatch(.http_request_done, &.{
|
||||
.transfer = transfer,
|
||||
});
|
||||
@@ -1052,9 +939,9 @@ pub const Request = struct {
|
||||
// arbitrary data that can be associated with this request
|
||||
ctx: *anyopaque = undefined,
|
||||
|
||||
start_callback: ?*const fn (response: Response) anyerror!void = null,
|
||||
header_callback: *const fn (response: Response) anyerror!bool,
|
||||
data_callback: *const fn (response: Response, data: []const u8) anyerror!void,
|
||||
start_callback: ?*const fn (transfer: *Transfer) anyerror!void = null,
|
||||
header_callback: *const fn (transfer: *Transfer) anyerror!bool,
|
||||
data_callback: *const fn (transfer: *Transfer, data: []const u8) anyerror!void,
|
||||
done_callback: *const fn (ctx: *anyopaque) anyerror!void,
|
||||
error_callback: *const fn (ctx: *anyopaque, err: anyerror) void,
|
||||
shutdown_callback: ?*const fn (ctx: *anyopaque) void = null,
|
||||
@@ -1080,91 +967,16 @@ pub const Request = struct {
|
||||
};
|
||||
};
|
||||
|
||||
pub const Response = struct {
|
||||
ctx: *anyopaque,
|
||||
inner: union(enum) {
|
||||
transfer: *Transfer,
|
||||
cached: *const CachedResponse,
|
||||
},
|
||||
|
||||
pub fn fromTransfer(transfer: *Transfer) Response {
|
||||
return .{ .ctx = transfer.req.ctx, .inner = .{ .transfer = transfer } };
|
||||
}
|
||||
|
||||
pub fn fromCached(ctx: *anyopaque, resp: *const CachedResponse) Response {
|
||||
return .{ .ctx = ctx, .inner = .{ .cached = resp } };
|
||||
}
|
||||
|
||||
pub fn status(self: Response) ?u16 {
|
||||
return switch (self.inner) {
|
||||
.transfer => |t| if (t.response_header) |rh| rh.status else null,
|
||||
.cached => |c| c.metadata.status,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn contentType(self: Response) ?[]const u8 {
|
||||
return switch (self.inner) {
|
||||
.transfer => |t| if (t.response_header) |*rh| rh.contentType() else null,
|
||||
.cached => |c| c.metadata.content_type,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn contentLength(self: Response) ?u32 {
|
||||
return switch (self.inner) {
|
||||
.transfer => |t| t.getContentLength(),
|
||||
.cached => |c| switch (c.data) {
|
||||
.buffer => |buf| @intCast(buf.len),
|
||||
.file => |f| @intCast(f.len),
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
pub fn redirectCount(self: Response) ?u32 {
|
||||
return switch (self.inner) {
|
||||
.transfer => |t| if (t.response_header) |rh| rh.redirect_count else null,
|
||||
.cached => 0,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn url(self: Response) [:0]const u8 {
|
||||
return switch (self.inner) {
|
||||
.transfer => |t| t.url,
|
||||
.cached => |c| c.metadata.url,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn headerIterator(self: Response) HeaderIterator {
|
||||
return switch (self.inner) {
|
||||
.transfer => |t| t.responseHeaderIterator(),
|
||||
.cached => |c| HeaderIterator{ .list = .{ .list = c.metadata.headers } },
|
||||
};
|
||||
}
|
||||
|
||||
pub fn abort(self: Response, err: anyerror) void {
|
||||
switch (self.inner) {
|
||||
.transfer => |t| t.abort(err),
|
||||
.cached => {},
|
||||
}
|
||||
}
|
||||
|
||||
pub fn format(self: Response, writer: *std.Io.Writer) !void {
|
||||
return switch (self.inner) {
|
||||
.transfer => |t| try t.format(writer),
|
||||
.cached => |c| try c.format(writer),
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
pub const Transfer = struct {
|
||||
arena: ArenaAllocator,
|
||||
id: u32 = 0,
|
||||
req: Request,
|
||||
url: [:0]const u8,
|
||||
ctx: *anyopaque, // copied from req.ctx to make it easier for callback handlers
|
||||
client: *Client,
|
||||
// total bytes received in the response, including the response status line,
|
||||
// the headers, and the [encoded] body.
|
||||
bytes_received: usize = 0,
|
||||
_pending_cache_metadata: ?*CacheMetadata = null,
|
||||
|
||||
aborted: bool = false,
|
||||
|
||||
@@ -1253,7 +1065,7 @@ pub const Transfer = struct {
|
||||
// as abort (doesn't send a notification, doesn't invoke an error callback)
|
||||
fn kill(self: *Transfer) void {
|
||||
if (self.req.shutdown_callback) |cb| {
|
||||
cb(self.req.ctx);
|
||||
cb(self.ctx);
|
||||
}
|
||||
|
||||
if (self._performing or self.client.performing) {
|
||||
@@ -1289,9 +1101,9 @@ pub const Transfer = struct {
|
||||
});
|
||||
|
||||
if (execute_callback) {
|
||||
self.req.error_callback(self.req.ctx, err);
|
||||
self.req.error_callback(self.ctx, err);
|
||||
} else if (self.req.shutdown_callback) |cb| {
|
||||
cb(self.req.ctx);
|
||||
cb(self.ctx);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1540,61 +1352,11 @@ pub const Transfer = struct {
|
||||
.transfer = transfer,
|
||||
});
|
||||
|
||||
const proceed = transfer.req.header_callback(Response.fromTransfer(transfer)) catch |err| {
|
||||
const proceed = transfer.req.header_callback(transfer) catch |err| {
|
||||
log.err(.http, "header_callback", .{ .err = err, .req = transfer });
|
||||
return err;
|
||||
};
|
||||
|
||||
if (transfer.client.network.cache != null and transfer.req.method == .GET) {
|
||||
const rh = &transfer.response_header.?;
|
||||
const allocator = transfer.arena.allocator();
|
||||
|
||||
const vary = if (conn.getResponseHeader("vary", 0)) |h| h.value else null;
|
||||
|
||||
const maybe_cm = try Cache.tryCache(
|
||||
allocator,
|
||||
std.time.timestamp(),
|
||||
transfer.url,
|
||||
rh.status,
|
||||
rh.contentType(),
|
||||
if (conn.getResponseHeader("cache-control", 0)) |h| h.value else null,
|
||||
vary,
|
||||
if (conn.getResponseHeader("age", 0)) |h| h.value else null,
|
||||
conn.getResponseHeader("set-cookie", 0) != null,
|
||||
conn.getResponseHeader("authorization", 0) != null,
|
||||
);
|
||||
|
||||
if (maybe_cm) |cm| {
|
||||
var iter = transfer.responseHeaderIterator();
|
||||
var header_list = try iter.collect(allocator);
|
||||
const end_of_response = header_list.items.len;
|
||||
|
||||
if (vary) |vary_str| {
|
||||
var req_it = transfer.req.headers.iterator();
|
||||
|
||||
while (req_it.next()) |hdr| {
|
||||
var vary_iter = std.mem.splitScalar(u8, vary_str, ',');
|
||||
|
||||
while (vary_iter.next()) |part| {
|
||||
const name = std.mem.trim(u8, part, &std.ascii.whitespace);
|
||||
if (std.ascii.eqlIgnoreCase(hdr.name, name)) {
|
||||
try header_list.append(allocator, .{
|
||||
.name = try allocator.dupe(u8, hdr.name),
|
||||
.value = try allocator.dupe(u8, hdr.value),
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const metadata = try transfer.arena.allocator().create(CacheMetadata);
|
||||
metadata.* = cm;
|
||||
metadata.headers = header_list.items[0..end_of_response];
|
||||
metadata.vary_headers = header_list.items[end_of_response..];
|
||||
transfer._pending_cache_metadata = metadata;
|
||||
}
|
||||
}
|
||||
|
||||
return proceed and transfer.aborted == false;
|
||||
}
|
||||
|
||||
@@ -1693,7 +1455,7 @@ pub const Transfer = struct {
|
||||
fn _fulfill(transfer: *Transfer, status: u16, headers: []const http.Header, body: ?[]const u8) !void {
|
||||
const req = &transfer.req;
|
||||
if (req.start_callback) |cb| {
|
||||
try cb(Response.fromTransfer(transfer));
|
||||
try cb(transfer);
|
||||
}
|
||||
|
||||
transfer.response_header = .{
|
||||
@@ -1712,13 +1474,13 @@ pub const Transfer = struct {
|
||||
}
|
||||
|
||||
lp.assert(transfer._header_done_called == false, "Transfer.fulfill header_done_called", .{});
|
||||
if (try req.header_callback(Response.fromTransfer(transfer)) == false) {
|
||||
if (try req.header_callback(transfer) == false) {
|
||||
transfer.abort(error.Abort);
|
||||
return;
|
||||
}
|
||||
|
||||
if (body) |b| {
|
||||
try req.data_callback(Response.fromTransfer(transfer), b);
|
||||
try req.data_callback(transfer, b);
|
||||
}
|
||||
|
||||
try req.done_callback(req.ctx);
|
||||
@@ -1755,10 +1517,10 @@ pub const Transfer = struct {
|
||||
};
|
||||
|
||||
const Noop = struct {
|
||||
fn headerCallback(_: Response) !bool {
|
||||
fn headerCallback(_: *Transfer) !bool {
|
||||
return true;
|
||||
}
|
||||
fn dataCallback(_: Response, _: []const u8) !void {}
|
||||
fn dataCallback(_: *Transfer, _: []const u8) !void {}
|
||||
fn doneCallback(_: *anyopaque) !void {}
|
||||
fn errorCallback(_: *anyopaque, _: anyerror) void {}
|
||||
};
|
||||
|
||||
@@ -27,9 +27,6 @@ charset: [41]u8 = default_charset,
|
||||
charset_len: usize = default_charset_len,
|
||||
is_default_charset: bool = true,
|
||||
|
||||
type_buf: [127]u8 = @splat(0),
|
||||
sub_type_buf: [127]u8 = @splat(0),
|
||||
|
||||
/// String "UTF-8" continued by null characters.
|
||||
const default_charset = .{ 'U', 'T', 'F', '-', '8' } ++ .{0} ** 36;
|
||||
const default_charset_len = 5;
|
||||
@@ -64,10 +61,7 @@ pub const ContentType = union(ContentTypeEnum) {
|
||||
image_webp: void,
|
||||
application_json: void,
|
||||
unknown: void,
|
||||
other: struct {
|
||||
type: []const u8,
|
||||
sub_type: []const u8,
|
||||
},
|
||||
other: struct { type: []const u8, sub_type: []const u8 },
|
||||
};
|
||||
|
||||
pub fn contentTypeString(mime: *const Mime) []const u8 {
|
||||
@@ -118,18 +112,17 @@ fn parseCharset(value: []const u8) error{ CharsetTooBig, Invalid }![]const u8 {
|
||||
return value;
|
||||
}
|
||||
|
||||
pub fn parse(input: []const u8) !Mime {
|
||||
pub fn parse(input: []u8) !Mime {
|
||||
if (input.len > 255) {
|
||||
return error.TooBig;
|
||||
}
|
||||
|
||||
var buf: [255]u8 = undefined;
|
||||
const normalized = std.ascii.lowerString(&buf, std.mem.trim(u8, input, &std.ascii.whitespace));
|
||||
// Zig's trim API is broken. The return type is always `[]const u8`,
|
||||
// even if the input type is `[]u8`. @constCast is safe here.
|
||||
var normalized = @constCast(std.mem.trim(u8, input, &std.ascii.whitespace));
|
||||
_ = std.ascii.lowerString(normalized, normalized);
|
||||
|
||||
var mime = Mime{ .content_type = undefined };
|
||||
|
||||
const content_type, const type_len = try parseContentType(normalized, &mime.type_buf, &mime.sub_type_buf);
|
||||
const content_type, const type_len = try parseContentType(normalized);
|
||||
if (type_len >= normalized.len) {
|
||||
return .{ .content_type = content_type };
|
||||
}
|
||||
@@ -170,12 +163,13 @@ pub fn parse(input: []const u8) !Mime {
|
||||
}
|
||||
}
|
||||
|
||||
mime.params = params;
|
||||
mime.charset = charset;
|
||||
mime.charset_len = charset_len;
|
||||
mime.content_type = content_type;
|
||||
mime.is_default_charset = !has_explicit_charset;
|
||||
return mime;
|
||||
return .{
|
||||
.params = params,
|
||||
.charset = charset,
|
||||
.charset_len = charset_len,
|
||||
.content_type = content_type,
|
||||
.is_default_charset = !has_explicit_charset,
|
||||
};
|
||||
}
|
||||
|
||||
/// Prescan the first 1024 bytes of an HTML document for a charset declaration.
|
||||
@@ -401,7 +395,7 @@ pub fn isText(mime: *const Mime) bool {
|
||||
}
|
||||
|
||||
// we expect value to be lowercase
|
||||
fn parseContentType(value: []const u8, type_buf: []u8, sub_type_buf: []u8) !struct { ContentType, usize } {
|
||||
fn parseContentType(value: []const u8) !struct { ContentType, usize } {
|
||||
const end = std.mem.indexOfScalarPos(u8, value, 0, ';') orelse value.len;
|
||||
const type_name = trimRight(value[0..end]);
|
||||
const attribute_start = end + 1;
|
||||
@@ -450,18 +444,10 @@ fn parseContentType(value: []const u8, type_buf: []u8, sub_type_buf: []u8) !stru
|
||||
return error.Invalid;
|
||||
}
|
||||
|
||||
@memcpy(type_buf[0..main_type.len], main_type);
|
||||
@memcpy(sub_type_buf[0..sub_type.len], sub_type);
|
||||
|
||||
return .{
|
||||
.{
|
||||
.other = .{
|
||||
.type = type_buf[0..main_type.len],
|
||||
.sub_type = sub_type_buf[0..sub_type.len],
|
||||
},
|
||||
},
|
||||
attribute_start,
|
||||
};
|
||||
return .{ .{ .other = .{
|
||||
.type = main_type,
|
||||
.sub_type = sub_type,
|
||||
} }, attribute_start };
|
||||
}
|
||||
|
||||
const VALID_CODEPOINTS = blk: {
|
||||
@@ -475,13 +461,6 @@ const VALID_CODEPOINTS = blk: {
|
||||
break :blk v;
|
||||
};
|
||||
|
||||
pub fn typeString(self: *const Mime) []const u8 {
|
||||
return switch (self.content_type) {
|
||||
.other => |o| o.type[0..o.type_len],
|
||||
else => "",
|
||||
};
|
||||
}
|
||||
|
||||
fn validType(value: []const u8) bool {
|
||||
for (value) |b| {
|
||||
if (VALID_CODEPOINTS[b] == false) {
|
||||
|
||||
@@ -886,10 +886,12 @@ fn notifyParentLoadComplete(self: *Page) void {
|
||||
parent.iframeCompletedLoading(self.iframe.?);
|
||||
}
|
||||
|
||||
fn pageHeaderDoneCallback(response: HttpClient.Response) !bool {
|
||||
var self: *Page = @ptrCast(@alignCast(response.ctx));
|
||||
fn pageHeaderDoneCallback(transfer: *HttpClient.Transfer) !bool {
|
||||
var self: *Page = @ptrCast(@alignCast(transfer.ctx));
|
||||
|
||||
const response_url = response.url();
|
||||
const header = &transfer.response_header.?;
|
||||
|
||||
const response_url = std.mem.span(header.url);
|
||||
if (std.mem.eql(u8, response_url, self.url) == false) {
|
||||
// would be different than self.url in the case of a redirect
|
||||
self.url = try self.arena.dupeZ(u8, response_url);
|
||||
@@ -903,8 +905,8 @@ fn pageHeaderDoneCallback(response: HttpClient.Response) !bool {
|
||||
if (comptime IS_DEBUG) {
|
||||
log.debug(.page, "navigate header", .{
|
||||
.url = self.url,
|
||||
.status = response.status(),
|
||||
.content_type = response.contentType(),
|
||||
.status = header.status,
|
||||
.content_type = header.contentType(),
|
||||
.type = self._type,
|
||||
});
|
||||
}
|
||||
@@ -925,14 +927,14 @@ fn pageHeaderDoneCallback(response: HttpClient.Response) !bool {
|
||||
return true;
|
||||
}
|
||||
|
||||
fn pageDataCallback(response: HttpClient.Response, data: []const u8) !void {
|
||||
var self: *Page = @ptrCast(@alignCast(response.ctx));
|
||||
fn pageDataCallback(transfer: *HttpClient.Transfer, data: []const u8) !void {
|
||||
var self: *Page = @ptrCast(@alignCast(transfer.ctx));
|
||||
|
||||
if (self._parse_state == .pre) {
|
||||
// we lazily do this, because we might need the first chunk of data
|
||||
// to sniff the content type
|
||||
var mime: Mime = blk: {
|
||||
if (response.contentType()) |ct| {
|
||||
if (transfer.response_header.?.contentType()) |ct| {
|
||||
break :blk try Mime.parse(ct);
|
||||
}
|
||||
break :blk Mime.sniff(data);
|
||||
|
||||
@@ -273,24 +273,6 @@ pub fn addFromElement(self: *ScriptManager, comptime from_parser: bool, script_e
|
||||
// Let the outer errdefer handle releasing the arena if client.request fails
|
||||
}
|
||||
|
||||
if (comptime IS_DEBUG) {
|
||||
var ls: js.Local.Scope = undefined;
|
||||
page.js.localScope(&ls);
|
||||
defer ls.deinit();
|
||||
|
||||
log.debug(.http, "script queue", .{
|
||||
.ctx = ctx,
|
||||
.url = remote_url.?,
|
||||
.element = element,
|
||||
.stack = ls.local.stackTrace() catch "???",
|
||||
});
|
||||
}
|
||||
|
||||
{
|
||||
const was_evaluating = self.is_evaluating;
|
||||
self.is_evaluating = true;
|
||||
defer self.is_evaluating = was_evaluating;
|
||||
|
||||
try self.client.request(.{
|
||||
.url = url,
|
||||
.ctx = script,
|
||||
@@ -308,9 +290,20 @@ pub fn addFromElement(self: *ScriptManager, comptime from_parser: bool, script_e
|
||||
.done_callback = Script.doneCallback,
|
||||
.error_callback = Script.errorCallback,
|
||||
});
|
||||
}
|
||||
|
||||
handover = true;
|
||||
|
||||
if (comptime IS_DEBUG) {
|
||||
var ls: js.Local.Scope = undefined;
|
||||
page.js.localScope(&ls);
|
||||
defer ls.deinit();
|
||||
|
||||
log.debug(.http, "script queue", .{
|
||||
.ctx = ctx,
|
||||
.url = remote_url.?,
|
||||
.element = element,
|
||||
.stack = ls.local.stackTrace() catch "???",
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (is_blocking == false) {
|
||||
@@ -701,33 +694,32 @@ pub const Script = struct {
|
||||
self.manager.page.releaseArena(self.arena);
|
||||
}
|
||||
|
||||
fn startCallback(response: HttpClient.Response) !void {
|
||||
log.debug(.http, "script fetch start", .{ .req = response });
|
||||
fn startCallback(transfer: *HttpClient.Transfer) !void {
|
||||
log.debug(.http, "script fetch start", .{ .req = transfer });
|
||||
}
|
||||
|
||||
fn headerCallback(response: HttpClient.Response) !bool {
|
||||
const self: *Script = @ptrCast(@alignCast(response.ctx));
|
||||
|
||||
self.status = response.status().?;
|
||||
if (response.status() != 200) {
|
||||
fn headerCallback(transfer: *HttpClient.Transfer) !bool {
|
||||
const self: *Script = @ptrCast(@alignCast(transfer.ctx));
|
||||
const header = &transfer.response_header.?;
|
||||
self.status = header.status;
|
||||
if (header.status != 200) {
|
||||
log.info(.http, "script header", .{
|
||||
.req = response,
|
||||
.status = response.status(),
|
||||
.content_type = response.contentType(),
|
||||
.req = transfer,
|
||||
.status = header.status,
|
||||
.content_type = header.contentType(),
|
||||
});
|
||||
return false;
|
||||
}
|
||||
|
||||
if (comptime IS_DEBUG) {
|
||||
log.debug(.http, "script header", .{
|
||||
.req = response,
|
||||
.status = response.status(),
|
||||
.content_type = response.contentType(),
|
||||
.req = transfer,
|
||||
.status = header.status,
|
||||
.content_type = header.contentType(),
|
||||
});
|
||||
}
|
||||
|
||||
switch (response.inner) {
|
||||
.transfer => |transfer| {
|
||||
{
|
||||
// temp debug, trying to figure out why the next assert sometimes
|
||||
// fails. Is the buffer just corrupt or is headerCallback really
|
||||
// being called twice?
|
||||
@@ -759,28 +751,25 @@ pub const Script = struct {
|
||||
self.debug_transfer_intercept_state = @intFromEnum(transfer._intercept_state);
|
||||
self.debug_transfer_auth_challenge = transfer._auth_challenge != null;
|
||||
self.debug_transfer_easy_id = if (transfer._conn) |c| @intFromPtr(c._easy) else 0;
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
|
||||
lp.assert(self.source.remote.capacity == 0, "ScriptManager.Header buffer", .{ .capacity = self.source.remote.capacity });
|
||||
var buffer: std.ArrayList(u8) = .empty;
|
||||
if (response.contentLength()) |cl| {
|
||||
if (transfer.getContentLength()) |cl| {
|
||||
try buffer.ensureTotalCapacity(self.arena, cl);
|
||||
}
|
||||
self.source = .{ .remote = buffer };
|
||||
return true;
|
||||
}
|
||||
|
||||
fn dataCallback(response: HttpClient.Response, data: []const u8) !void {
|
||||
const self: *Script = @ptrCast(@alignCast(response.ctx));
|
||||
self._dataCallback(response, data) catch |err| {
|
||||
log.err(.http, "SM.dataCallback", .{ .err = err, .transfer = response, .len = data.len });
|
||||
fn dataCallback(transfer: *HttpClient.Transfer, data: []const u8) !void {
|
||||
const self: *Script = @ptrCast(@alignCast(transfer.ctx));
|
||||
self._dataCallback(transfer, data) catch |err| {
|
||||
log.err(.http, "SM.dataCallback", .{ .err = err, .transfer = transfer, .len = data.len });
|
||||
return err;
|
||||
};
|
||||
}
|
||||
|
||||
fn _dataCallback(self: *Script, _: HttpClient.Response, data: []const u8) !void {
|
||||
fn _dataCallback(self: *Script, _: *HttpClient.Transfer, data: []const u8) !void {
|
||||
try self.source.remote.appendSlice(self.arena, data);
|
||||
}
|
||||
|
||||
|
||||
@@ -21,6 +21,7 @@ const log = @import("../../log.zig");
|
||||
const string = @import("../../string.zig");
|
||||
|
||||
const Page = @import("../Page.zig");
|
||||
const WorkerGlobalScope = @import("../webapi/WorkerGlobalScope.zig");
|
||||
|
||||
const js = @import("js.zig");
|
||||
const Local = @import("Local.zig");
|
||||
@@ -54,9 +55,9 @@ fn initWithContext(self: *Caller, ctx: *Context, v8_context: *const v8.Context)
|
||||
.isolate = ctx.isolate,
|
||||
},
|
||||
.prev_local = ctx.local,
|
||||
.prev_context = ctx.page.js,
|
||||
.prev_context = ctx.global.getJs(),
|
||||
};
|
||||
ctx.page.js = ctx;
|
||||
ctx.global.setJs(ctx);
|
||||
ctx.local = &self.local;
|
||||
}
|
||||
|
||||
@@ -87,7 +88,7 @@ pub fn deinit(self: *Caller) void {
|
||||
|
||||
ctx.call_depth = call_depth;
|
||||
ctx.local = self.prev_local;
|
||||
ctx.page.js = self.prev_context;
|
||||
ctx.global.setJs(self.prev_context);
|
||||
}
|
||||
|
||||
pub const CallOpts = struct {
|
||||
@@ -169,7 +170,7 @@ fn _getIndex(comptime T: type, local: *const Local, func: anytype, idx: u32, inf
|
||||
@field(args, "0") = try TaggedOpaque.fromJS(*T, info.getThis());
|
||||
@field(args, "1") = idx;
|
||||
if (@typeInfo(F).@"fn".params.len == 3) {
|
||||
@field(args, "2") = local.ctx.page;
|
||||
@field(args, "2") = getGlobalArg(@TypeOf(args.@"2"), local.ctx);
|
||||
}
|
||||
const ret = @call(.auto, func, args);
|
||||
return handleIndexedReturn(T, F, true, local, ret, info, opts);
|
||||
@@ -196,7 +197,7 @@ fn _getNamedIndex(comptime T: type, local: *const Local, func: anytype, name: *c
|
||||
@field(args, "0") = try TaggedOpaque.fromJS(*T, info.getThis());
|
||||
@field(args, "1") = try nameToString(local, @TypeOf(args.@"1"), name);
|
||||
if (@typeInfo(F).@"fn".params.len == 3) {
|
||||
@field(args, "2") = local.ctx.page;
|
||||
@field(args, "2") = getGlobalArg(@TypeOf(args.@"2"), local.ctx);
|
||||
}
|
||||
const ret = @call(.auto, func, args);
|
||||
return handleIndexedReturn(T, F, true, local, ret, info, opts);
|
||||
@@ -224,7 +225,7 @@ fn _setNamedIndex(comptime T: type, local: *const Local, func: anytype, name: *c
|
||||
@field(args, "1") = try nameToString(local, @TypeOf(args.@"1"), name);
|
||||
@field(args, "2") = try local.jsValueToZig(@TypeOf(@field(args, "2")), js_value);
|
||||
if (@typeInfo(F).@"fn".params.len == 4) {
|
||||
@field(args, "3") = local.ctx.page;
|
||||
@field(args, "3") = getGlobalArg(@TypeOf(args.@"3"), local.ctx);
|
||||
}
|
||||
const ret = @call(.auto, func, args);
|
||||
return handleIndexedReturn(T, F, false, local, ret, info, opts);
|
||||
@@ -250,7 +251,7 @@ fn _deleteNamedIndex(comptime T: type, local: *const Local, func: anytype, name:
|
||||
@field(args, "0") = try TaggedOpaque.fromJS(*T, info.getThis());
|
||||
@field(args, "1") = try nameToString(local, @TypeOf(args.@"1"), name);
|
||||
if (@typeInfo(F).@"fn".params.len == 3) {
|
||||
@field(args, "2") = local.ctx.page;
|
||||
@field(args, "2") = getGlobalArg(@TypeOf(args.@"2"), local.ctx);
|
||||
}
|
||||
const ret = @call(.auto, func, args);
|
||||
return handleIndexedReturn(T, F, false, local, ret, info, opts);
|
||||
@@ -276,7 +277,7 @@ fn _getEnumerator(comptime T: type, local: *const Local, func: anytype, info: Pr
|
||||
var args: ParameterTypes(F) = undefined;
|
||||
@field(args, "0") = try TaggedOpaque.fromJS(*T, info.getThis());
|
||||
if (@typeInfo(F).@"fn".params.len == 2) {
|
||||
@field(args, "1") = local.ctx.page;
|
||||
@field(args, "1") = getGlobalArg(@TypeOf(args.@"1"), local.ctx);
|
||||
}
|
||||
const ret = @call(.auto, func, args);
|
||||
return handleIndexedReturn(T, F, true, local, ret, info, opts);
|
||||
@@ -434,6 +435,25 @@ fn isPage(comptime T: type) bool {
|
||||
return T == *Page or T == *const Page;
|
||||
}
|
||||
|
||||
fn isExecution(comptime T: type) bool {
|
||||
return T == *js.Execution or T == *const js.Execution;
|
||||
}
|
||||
|
||||
fn getGlobalArg(comptime T: type, ctx: *Context) T {
|
||||
if (comptime isPage(T)) {
|
||||
return switch (ctx.global) {
|
||||
.page => |page| page,
|
||||
.worker => unreachable,
|
||||
};
|
||||
}
|
||||
|
||||
if (comptime isExecution(T)) {
|
||||
return &ctx.execution;
|
||||
}
|
||||
|
||||
@compileError("Unsupported global arg type: " ++ @typeName(T));
|
||||
}
|
||||
|
||||
// These wrap the raw v8 C API to provide a cleaner interface.
|
||||
pub const FunctionCallbackInfo = struct {
|
||||
handle: *const v8.FunctionCallbackInfo,
|
||||
@@ -702,15 +722,16 @@ fn getArgs(comptime F: type, comptime offset: usize, local: *const Local, info:
|
||||
return args;
|
||||
}
|
||||
|
||||
// If the last parameter is the Page, set it, and exclude it
|
||||
// If the last parameter is the Page or Worker, set it, and exclude it
|
||||
// from our params slice, because we don't want to bind it to
|
||||
// a JS argument
|
||||
if (comptime isPage(params[params.len - 1].type.?)) {
|
||||
@field(args, tupleFieldName(params.len - 1 + offset)) = local.ctx.page;
|
||||
const LastParamType = params[params.len - 1].type.?;
|
||||
if (comptime isPage(LastParamType) or isExecution(LastParamType)) {
|
||||
@field(args, tupleFieldName(params.len - 1 + offset)) = getGlobalArg(LastParamType, local.ctx);
|
||||
break :blk params[0 .. params.len - 1];
|
||||
}
|
||||
|
||||
// we have neither a Page nor a JsObject. All params must be
|
||||
// we have neither a Page, Execution, nor a JsObject. All params must be
|
||||
// bound to a JavaScript value.
|
||||
break :blk params;
|
||||
};
|
||||
@@ -759,7 +780,9 @@ fn getArgs(comptime F: type, comptime offset: usize, local: *const Local, info:
|
||||
}
|
||||
|
||||
if (comptime isPage(param.type.?)) {
|
||||
@compileError("Page must be the last parameter (or 2nd last if there's a JsThis): " ++ @typeName(F));
|
||||
@compileError("Page must be the last parameter: " ++ @typeName(F));
|
||||
} else if (comptime isExecution(param.type.?)) {
|
||||
@compileError("Execution must be the last parameter: " ++ @typeName(F));
|
||||
} else if (i >= js_parameter_count) {
|
||||
if (@typeInfo(param.type.?) != .optional) {
|
||||
return error.InvalidArgument;
|
||||
|
||||
@@ -25,10 +25,12 @@ const bridge = @import("bridge.zig");
|
||||
const Env = @import("Env.zig");
|
||||
const Origin = @import("Origin.zig");
|
||||
const Scheduler = @import("Scheduler.zig");
|
||||
const Execution = @import("Execution.zig");
|
||||
|
||||
const Page = @import("../Page.zig");
|
||||
const Session = @import("../Session.zig");
|
||||
const ScriptManager = @import("../ScriptManager.zig");
|
||||
const WorkerGlobalScope = @import("../webapi/WorkerGlobalScope.zig");
|
||||
|
||||
const v8 = js.v8;
|
||||
const Caller = js.Caller;
|
||||
@@ -37,12 +39,38 @@ const Allocator = std.mem.Allocator;
|
||||
|
||||
const IS_DEBUG = @import("builtin").mode == .Debug;
|
||||
|
||||
// Loosely maps to a Browser Page.
|
||||
// Loosely maps to a Browser Page or Worker.
|
||||
const Context = @This();
|
||||
|
||||
pub const GlobalScope = union(enum) {
|
||||
page: *Page,
|
||||
worker: *WorkerGlobalScope,
|
||||
|
||||
pub fn base(self: GlobalScope) [:0]const u8 {
|
||||
return switch (self) {
|
||||
.page => |page| page.base(),
|
||||
.worker => |worker| worker.base(),
|
||||
};
|
||||
}
|
||||
|
||||
pub fn getJs(self: GlobalScope) *Context {
|
||||
return switch (self) {
|
||||
.page => |page| page.js,
|
||||
.worker => |worker| worker.js,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn setJs(self: GlobalScope, ctx: *Context) void {
|
||||
switch (self) {
|
||||
.page => |page| page.js = ctx,
|
||||
.worker => |worker| worker.js = ctx,
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
id: usize,
|
||||
env: *Env,
|
||||
page: *Page,
|
||||
global: GlobalScope,
|
||||
session: *Session,
|
||||
isolate: js.Isolate,
|
||||
|
||||
@@ -111,6 +139,10 @@ script_manager: ?*ScriptManager,
|
||||
// Our macrotasks
|
||||
scheduler: Scheduler,
|
||||
|
||||
// Execution context for worker-compatible APIs. This provides a common
|
||||
// interface that works in both Page and Worker contexts.
|
||||
execution: Execution,
|
||||
|
||||
unknown_properties: (if (IS_DEBUG) std.StringHashMapUnmanaged(UnknownPropertyStat) else void) = if (IS_DEBUG) .{} else {},
|
||||
|
||||
const ModuleEntry = struct {
|
||||
@@ -257,7 +289,16 @@ pub fn toLocal(self: *Context, global: anytype) js.Local.ToLocalReturnType(@Type
|
||||
}
|
||||
|
||||
pub fn getIncumbent(self: *Context) *Page {
|
||||
return fromC(v8.v8__Isolate__GetIncumbentContext(self.env.isolate.handle).?).?.page;
|
||||
const ctx = fromC(v8.v8__Isolate__GetIncumbentContext(self.env.isolate.handle).?).?;
|
||||
return switch (ctx.global) {
|
||||
.page => |page| page,
|
||||
.worker => {
|
||||
if (comptime IS_DEBUG) {
|
||||
std.debug.assert(false);
|
||||
}
|
||||
unreachable;
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
pub fn stringToPersistedFunction(
|
||||
@@ -527,7 +568,7 @@ pub fn dynamicModuleCallback(
|
||||
if (resource_value.isNullOrUndefined()) {
|
||||
// will only be null / undefined in extreme cases (e.g. WPT tests)
|
||||
// where you're
|
||||
break :blk self.page.base();
|
||||
break :blk self.global.base();
|
||||
}
|
||||
|
||||
break :blk js.String.toSliceZ(.{ .local = &local, .handle = resource_name.? }) catch |err| {
|
||||
@@ -867,17 +908,16 @@ pub fn enter(self: *Context, hs: *js.HandleScope) Entered {
|
||||
const isolate = self.isolate;
|
||||
js.HandleScope.init(hs, isolate);
|
||||
|
||||
const page = self.page;
|
||||
const original = page.js;
|
||||
page.js = self;
|
||||
const original = self.global.getJs();
|
||||
self.global.setJs(self);
|
||||
|
||||
const handle: *const v8.Context = @ptrCast(v8.v8__Global__Get(&self.handle, isolate.handle));
|
||||
v8.v8__Context__Enter(handle);
|
||||
return .{ .original = original, .handle = handle, .handle_scope = hs };
|
||||
return .{ .original = original, .handle = handle, .handle_scope = hs, .global = self.global };
|
||||
}
|
||||
|
||||
const Entered = struct {
|
||||
// the context we should restore on the page
|
||||
// the context we should restore on the page/worker
|
||||
original: *Context,
|
||||
|
||||
// the handle of the entered context
|
||||
@@ -885,8 +925,10 @@ const Entered = struct {
|
||||
|
||||
handle_scope: *js.HandleScope,
|
||||
|
||||
global: GlobalScope,
|
||||
|
||||
pub fn exit(self: Entered) void {
|
||||
self.original.page.js = self.original;
|
||||
self.global.setJs(self.original);
|
||||
v8.v8__Context__Exit(self.handle);
|
||||
self.handle_scope.deinit();
|
||||
}
|
||||
@@ -895,7 +937,10 @@ const Entered = struct {
|
||||
pub fn queueMutationDelivery(self: *Context) !void {
|
||||
self.enqueueMicrotask(struct {
|
||||
fn run(ctx: *Context) void {
|
||||
ctx.page.deliverMutations();
|
||||
switch (ctx.global) {
|
||||
.page => |page| page.deliverMutations(),
|
||||
.worker => unreachable,
|
||||
}
|
||||
}
|
||||
}.run);
|
||||
}
|
||||
@@ -903,7 +948,10 @@ pub fn queueMutationDelivery(self: *Context) !void {
|
||||
pub fn queueIntersectionChecks(self: *Context) !void {
|
||||
self.enqueueMicrotask(struct {
|
||||
fn run(ctx: *Context) void {
|
||||
ctx.page.performScheduledIntersectionChecks();
|
||||
switch (ctx.global) {
|
||||
.page => |page| page.performScheduledIntersectionChecks(),
|
||||
.worker => unreachable,
|
||||
}
|
||||
}
|
||||
}.run);
|
||||
}
|
||||
@@ -911,7 +959,10 @@ pub fn queueIntersectionChecks(self: *Context) !void {
|
||||
pub fn queueIntersectionDelivery(self: *Context) !void {
|
||||
self.enqueueMicrotask(struct {
|
||||
fn run(ctx: *Context) void {
|
||||
ctx.page.deliverIntersections();
|
||||
switch (ctx.global) {
|
||||
.page => |page| page.deliverIntersections(),
|
||||
.worker => unreachable,
|
||||
}
|
||||
}
|
||||
}.run);
|
||||
}
|
||||
@@ -919,7 +970,10 @@ pub fn queueIntersectionDelivery(self: *Context) !void {
|
||||
pub fn queueSlotchangeDelivery(self: *Context) !void {
|
||||
self.enqueueMicrotask(struct {
|
||||
fn run(ctx: *Context) void {
|
||||
ctx.page.deliverSlotchangeEvents();
|
||||
switch (ctx.global) {
|
||||
.page => |page| page.deliverSlotchangeEvents(),
|
||||
.worker => unreachable,
|
||||
}
|
||||
}
|
||||
}.run);
|
||||
}
|
||||
|
||||
@@ -34,6 +34,7 @@ const Inspector = @import("Inspector.zig");
|
||||
|
||||
const Page = @import("../Page.zig");
|
||||
const Window = @import("../webapi/Window.zig");
|
||||
const WorkerGlobalScope = @import("../webapi/WorkerGlobalScope.zig");
|
||||
|
||||
const JsApis = bridge.JsApis;
|
||||
const Allocator = std.mem.Allocator;
|
||||
@@ -83,9 +84,6 @@ eternal_function_templates: []v8.Eternal,
|
||||
// Dynamic slice to avoid circular dependency on JsApis.len at comptime
|
||||
templates: []*const v8.FunctionTemplate,
|
||||
|
||||
// Global template created once per isolate and reused across all contexts
|
||||
global_template: v8.Eternal,
|
||||
|
||||
// Inspector associated with the Isolate. Exists when CDP is being used.
|
||||
inspector: ?*Inspector,
|
||||
|
||||
@@ -146,7 +144,6 @@ pub fn init(app: *App, opts: InitOpts) !Env {
|
||||
const templates = try allocator.alloc(*const v8.FunctionTemplate, JsApis.len);
|
||||
errdefer allocator.free(templates);
|
||||
|
||||
var global_eternal: v8.Eternal = undefined;
|
||||
var private_symbols: PrivateSymbols = undefined;
|
||||
{
|
||||
var temp_scope: js.HandleScope = undefined;
|
||||
@@ -164,44 +161,6 @@ pub fn init(app: *App, opts: InitOpts) !Env {
|
||||
templates[i] = @ptrCast(@alignCast(eternal_ptr.?));
|
||||
}
|
||||
|
||||
// Create global template once per isolate
|
||||
const js_global = v8.v8__FunctionTemplate__New__DEFAULT(isolate_handle);
|
||||
const window_name = v8.v8__String__NewFromUtf8(isolate_handle, "Window", v8.kNormal, 6);
|
||||
v8.v8__FunctionTemplate__SetClassName(js_global, window_name);
|
||||
|
||||
// Find Window in JsApis by name (avoids circular import)
|
||||
const window_index = comptime bridge.JsApiLookup.getId(Window.JsApi);
|
||||
v8.v8__FunctionTemplate__Inherit(js_global, templates[window_index]);
|
||||
|
||||
const global_template_local = v8.v8__FunctionTemplate__InstanceTemplate(js_global).?;
|
||||
v8.v8__ObjectTemplate__SetNamedHandler(global_template_local, &.{
|
||||
.getter = bridge.unknownWindowPropertyCallback,
|
||||
.setter = null,
|
||||
.query = null,
|
||||
.deleter = null,
|
||||
.enumerator = null,
|
||||
.definer = null,
|
||||
.descriptor = null,
|
||||
.data = null,
|
||||
.flags = v8.kOnlyInterceptStrings | v8.kNonMasking,
|
||||
});
|
||||
// I don't 100% understand this. We actually set this up in the snapshot,
|
||||
// but for the global instance, it doesn't work. SetIndexedHandler and
|
||||
// SetNamedHandler are set on the Instance template, and that's the key
|
||||
// difference. The context has its own global instance, so we need to set
|
||||
// these back up directly on it. There might be a better way to do this.
|
||||
v8.v8__ObjectTemplate__SetIndexedHandler(global_template_local, &.{
|
||||
.getter = Window.JsApi.index.getter,
|
||||
.setter = null,
|
||||
.query = null,
|
||||
.deleter = null,
|
||||
.enumerator = null,
|
||||
.definer = null,
|
||||
.descriptor = null,
|
||||
.data = null,
|
||||
.flags = 0,
|
||||
});
|
||||
v8.v8__Eternal__New(isolate_handle, @ptrCast(global_template_local), &global_eternal);
|
||||
private_symbols = PrivateSymbols.init(isolate_handle);
|
||||
}
|
||||
|
||||
@@ -221,7 +180,6 @@ pub fn init(app: *App, opts: InitOpts) !Env {
|
||||
.templates = templates,
|
||||
.isolate_params = params,
|
||||
.inspector = inspector,
|
||||
.global_template = global_eternal,
|
||||
.private_symbols = private_symbols,
|
||||
.microtask_queues_are_running = false,
|
||||
.eternal_function_templates = eternal_function_templates,
|
||||
@@ -261,6 +219,17 @@ pub const ContextParams = struct {
|
||||
};
|
||||
|
||||
pub fn createContext(self: *Env, page: *Page, params: ContextParams) !*Context {
|
||||
return self._createContext(page, params);
|
||||
}
|
||||
|
||||
pub fn createWorkerContext(self: *Env, worker: *WorkerGlobalScope, params: ContextParams) !*Context {
|
||||
return self._createContext(worker, params);
|
||||
}
|
||||
|
||||
fn _createContext(self: *Env, global: anytype, params: ContextParams) !*Context {
|
||||
const T = @TypeOf(global);
|
||||
const is_page = T == *Page;
|
||||
|
||||
const context_arena = try self.app.arena_pool.acquire(.{ .debug = params.debug_name });
|
||||
errdefer self.app.arena_pool.release(context_arena);
|
||||
|
||||
@@ -273,12 +242,10 @@ pub fn createContext(self: *Env, page: *Page, params: ContextParams) !*Context {
|
||||
const microtask_queue = v8.v8__MicrotaskQueue__New(isolate.handle, v8.kExplicit).?;
|
||||
errdefer v8.v8__MicrotaskQueue__DELETE(microtask_queue);
|
||||
|
||||
// Get the global template that was created once per isolate
|
||||
const global_template: *const v8.ObjectTemplate = @ptrCast(@alignCast(v8.v8__Eternal__Get(&self.global_template, isolate.handle).?));
|
||||
v8.v8__ObjectTemplate__SetInternalFieldCount(global_template, comptime Snapshot.countInternalFields(Window.JsApi));
|
||||
|
||||
const v8_context = v8.v8__Context__New__Config(isolate.handle, &.{
|
||||
.global_template = global_template,
|
||||
// Restore the context from the snapshot (0 = Page, 1 = Worker)
|
||||
const snapshot_index: u32 = if (comptime is_page) 0 else 1;
|
||||
const v8_context = v8.v8__Context__FromSnapshot__Config(isolate.handle, snapshot_index, &.{
|
||||
.global_template = null,
|
||||
.global_object = null,
|
||||
.microtask_queue = microtask_queue,
|
||||
}).?;
|
||||
@@ -287,36 +254,36 @@ pub fn createContext(self: *Env, page: *Page, params: ContextParams) !*Context {
|
||||
var context_global: v8.Global = undefined;
|
||||
v8.v8__Global__New(isolate.handle, v8_context, &context_global);
|
||||
|
||||
// get the global object for the context, this maps to our Window
|
||||
// Get the global object for the context
|
||||
const global_obj = v8.v8__Context__Global(v8_context).?;
|
||||
|
||||
{
|
||||
// Store our TAO inside the internal field of the global object. This
|
||||
// maps the v8::Object -> Zig instance. Almost all objects have this, and
|
||||
// it gets setup automatically as objects are created, but the Window
|
||||
// object already exists in v8 (it's the global) so we manually create
|
||||
// the mapping here.
|
||||
// maps the v8::Object -> Zig instance.
|
||||
const tao = try params.identity_arena.create(@import("TaggedOpaque.zig"));
|
||||
tao.* = .{
|
||||
.value = @ptrCast(page.window),
|
||||
tao.* = if (comptime is_page) .{
|
||||
.value = @ptrCast(global.window),
|
||||
.prototype_chain = (&Window.JsApi.Meta.prototype_chain).ptr,
|
||||
.prototype_len = @intCast(Window.JsApi.Meta.prototype_chain.len),
|
||||
.subtype = .node, // this probably isn't right, but it's what we've been doing all along
|
||||
.subtype = .node,
|
||||
} else .{
|
||||
.value = @ptrCast(global),
|
||||
.prototype_chain = (&WorkerGlobalScope.JsApi.Meta.prototype_chain).ptr,
|
||||
.prototype_len = @intCast(WorkerGlobalScope.JsApi.Meta.prototype_chain.len),
|
||||
.subtype = null,
|
||||
};
|
||||
v8.v8__Object__SetAlignedPointerInInternalField(global_obj, 0, tao);
|
||||
}
|
||||
|
||||
const context_id = self.context_id;
|
||||
self.context_id = context_id + 1;
|
||||
|
||||
const session = page._session;
|
||||
const session = global._session;
|
||||
const origin = try session.getOrCreateOrigin(null);
|
||||
errdefer session.releaseOrigin(origin);
|
||||
|
||||
const context = try context_arena.create(Context);
|
||||
context.* = .{
|
||||
.env = self,
|
||||
.page = page,
|
||||
.global = if (comptime is_page) .{ .page = global } else .{ .worker = global },
|
||||
.origin = origin,
|
||||
.id = context_id,
|
||||
.session = session,
|
||||
@@ -326,23 +293,32 @@ pub fn createContext(self: *Env, page: *Page, params: ContextParams) !*Context {
|
||||
.templates = self.templates,
|
||||
.call_arena = params.call_arena,
|
||||
.microtask_queue = microtask_queue,
|
||||
.script_manager = &page._script_manager,
|
||||
.script_manager = if (comptime is_page) &global._script_manager else null,
|
||||
.scheduler = .init(context_arena),
|
||||
.identity = params.identity,
|
||||
.identity_arena = params.identity_arena,
|
||||
.execution = undefined,
|
||||
};
|
||||
|
||||
{
|
||||
// Multiple contexts can be created for the same Window (via CDP). We only
|
||||
// need to register the first one.
|
||||
const gop = try params.identity.identity_map.getOrPut(params.identity_arena, @intFromPtr(page.window));
|
||||
context.execution = .{
|
||||
.url = &global.url,
|
||||
.buf = &global.buf,
|
||||
.context = context,
|
||||
.arena = global.arena,
|
||||
.call_arena = params.call_arena,
|
||||
._factory = global._factory,
|
||||
._scheduler = &context.scheduler,
|
||||
};
|
||||
|
||||
// Register in the identity map. Multiple contexts can be created for the
|
||||
// same global (via CDP), so we only register the first one.
|
||||
const identity_ptr = if (comptime is_page) @intFromPtr(global.window) else @intFromPtr(global);
|
||||
const gop = try params.identity.identity_map.getOrPut(params.identity_arena, identity_ptr);
|
||||
if (gop.found_existing == false) {
|
||||
// our window wrapped in a v8::Global
|
||||
var global_global: v8.Global = undefined;
|
||||
v8.v8__Global__New(isolate.handle, global_obj, &global_global);
|
||||
gop.value_ptr.* = global_global;
|
||||
}
|
||||
}
|
||||
|
||||
// Store a pointer to our context inside the v8 context so that, given
|
||||
// a v8 context, we can get our context out
|
||||
@@ -528,13 +504,19 @@ fn promiseRejectCallback(message_handle: v8.PromiseRejectMessage) callconv(.c) v
|
||||
.call_arena = ctx.call_arena,
|
||||
};
|
||||
|
||||
const page = ctx.page;
|
||||
switch (ctx.global) {
|
||||
.page => |page| {
|
||||
page.window.unhandledPromiseRejection(promise_event == v8.kPromiseRejectWithNoHandler, .{
|
||||
.local = &local,
|
||||
.handle = &message_handle,
|
||||
}, page) catch |err| {
|
||||
log.warn(.browser, "unhandled rejection handler", .{ .err = err });
|
||||
};
|
||||
},
|
||||
.worker => {
|
||||
// TODO: Worker promise rejection handling
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
fn fatalCallback(c_location: [*c]const u8, c_message: [*c]const u8) callconv(.c) void {
|
||||
@@ -566,3 +548,50 @@ const PrivateSymbols = struct {
|
||||
self.child_nodes.deinit();
|
||||
}
|
||||
};
|
||||
|
||||
const testing = @import("../../testing.zig");
|
||||
const EventTarget = @import("../webapi/EventTarget.zig");
|
||||
|
||||
test "Env: Worker context " {
|
||||
const session = testing.test_session;
|
||||
|
||||
// Create a dummy WorkerGlobalScope using page's resources (hackish until session.createWorker exists)
|
||||
const worker = try session.factory.eventTarget(WorkerGlobalScope{
|
||||
._session = session,
|
||||
._factory = &session.factory,
|
||||
.arena = session.arena,
|
||||
.url = "about:blank",
|
||||
._proto = undefined,
|
||||
._performance = .init(),
|
||||
});
|
||||
|
||||
const ctx = try testing.test_browser.env.createWorkerContext(worker, .{
|
||||
.identity = &session.identity,
|
||||
.identity_arena = session.arena,
|
||||
.call_arena = session.arena,
|
||||
});
|
||||
defer testing.test_browser.env.destroyContext(ctx);
|
||||
|
||||
var ls: js.Local.Scope = undefined;
|
||||
ctx.localScope(&ls);
|
||||
defer ls.deinit();
|
||||
|
||||
try testing.expectEqual(true, (try ls.local.exec("typeof Node === 'undefined'", null)).isTrue());
|
||||
try testing.expectEqual(true, (try ls.local.exec("typeof WorkerGlobalScope !== 'undefined'", null)).isTrue());
|
||||
}
|
||||
|
||||
test "Env: Page context" {
|
||||
const session = testing.test_session;
|
||||
const page = try session.createPage();
|
||||
defer session.removePage();
|
||||
|
||||
// Page already has a context created, use it directly
|
||||
const ctx = page.js;
|
||||
|
||||
var ls: js.Local.Scope = undefined;
|
||||
ctx.localScope(&ls);
|
||||
defer ls.deinit();
|
||||
|
||||
try testing.expectEqual(true, (try ls.local.exec("typeof Node !== 'undefined'", null)).isTrue());
|
||||
try testing.expectEqual(true, (try ls.local.exec("typeof WorkerGlobalScope === 'undefined'", null)).isTrue());
|
||||
}
|
||||
|
||||
47
src/browser/js/Execution.zig
Normal file
47
src/browser/js/Execution.zig
Normal file
@@ -0,0 +1,47 @@
|
||||
// Copyright (C) 2023-2026 Lightpanda (Selecy SAS)
|
||||
//
|
||||
// Francis Bouvier <francis@lightpanda.io>
|
||||
// Pierre Tachoire <pierre@lightpanda.io>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU Affero General Public License as
|
||||
// published by the Free Software Foundation, either version 3 of the
|
||||
// License, or (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU Affero General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU Affero General Public License
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
//! Execution context for worker-compatible APIs.
|
||||
//!
|
||||
//! This provides a common interface for APIs that work in both Window and Worker
|
||||
//! contexts. Instead of taking `*Page` (which is DOM-specific), these APIs take
|
||||
//! `*Execution` which abstracts the common infrastructure.
|
||||
//!
|
||||
//! The bridge constructs an Execution on-the-fly from the current context,
|
||||
//! whether it's a Page context or a Worker context.
|
||||
|
||||
const std = @import("std");
|
||||
const Context = @import("Context.zig");
|
||||
const Scheduler = @import("Scheduler.zig");
|
||||
const Factory = @import("../Factory.zig");
|
||||
|
||||
const Allocator = std.mem.Allocator;
|
||||
|
||||
const Execution = @This();
|
||||
|
||||
context: *Context,
|
||||
|
||||
// Fields named to match Page for generic code (executor._factory works for both)
|
||||
buf: []u8,
|
||||
arena: Allocator,
|
||||
call_arena: Allocator,
|
||||
_factory: *Factory,
|
||||
_scheduler: *Scheduler,
|
||||
|
||||
// Pointer to the url field (Page or WorkerGlobalScope) - allows access to current url even after navigation
|
||||
url: *[:0]const u8,
|
||||
@@ -332,7 +332,15 @@ pub fn zigValueToJs(self: *const Local, value: anytype, comptime opts: CallOpts)
|
||||
}
|
||||
|
||||
if (@typeInfo(ptr.child) == .@"struct" and @hasDecl(ptr.child, "runtimeGenericWrap")) {
|
||||
const wrap = try value.runtimeGenericWrap(self.ctx.page);
|
||||
const page = switch (self.ctx.global) {
|
||||
.page => |p| p,
|
||||
.worker => {
|
||||
// No Worker-related API currently uses this, so haven't
|
||||
// added support for it
|
||||
unreachable;
|
||||
},
|
||||
};
|
||||
const wrap = try value.runtimeGenericWrap(page);
|
||||
return self.zigValueToJs(wrap, opts);
|
||||
}
|
||||
|
||||
@@ -409,7 +417,15 @@ pub fn zigValueToJs(self: *const Local, value: anytype, comptime opts: CallOpts)
|
||||
// zig fmt: on
|
||||
|
||||
if (@hasDecl(T, "runtimeGenericWrap")) {
|
||||
const wrap = try value.runtimeGenericWrap(self.ctx.page);
|
||||
const page = switch (self.ctx.global) {
|
||||
.page => |p| p,
|
||||
.worker => {
|
||||
// No Worker-related API currently uses this, so haven't
|
||||
// added support for it
|
||||
unreachable;
|
||||
},
|
||||
};
|
||||
const wrap = try value.runtimeGenericWrap(page);
|
||||
return self.zigValueToJs(wrap, opts);
|
||||
}
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
// Copyright (C) 2023-2025 Lightpanda (Selecy SAS)
|
||||
// Copyright (C) 2023-2026 Lightpanda (Selecy SAS)
|
||||
//
|
||||
// Francis Bouvier <francis@lightpanda.io>
|
||||
// Pierre Tachoire <pierre@lightpanda.io>
|
||||
@@ -17,6 +17,7 @@
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
const std = @import("std");
|
||||
const lp = @import("lightpanda");
|
||||
const js = @import("js.zig");
|
||||
const bridge = @import("bridge.zig");
|
||||
const log = @import("../../log.zig");
|
||||
@@ -25,6 +26,8 @@ const IS_DEBUG = @import("builtin").mode == .Debug;
|
||||
|
||||
const v8 = js.v8;
|
||||
const JsApis = bridge.JsApis;
|
||||
const PageJsApis = bridge.PageJsApis;
|
||||
const WorkerJsApis = bridge.WorkerJsApis;
|
||||
|
||||
const Snapshot = @This();
|
||||
|
||||
@@ -135,7 +138,7 @@ pub fn create() !Snapshot {
|
||||
v8.v8__HandleScope__CONSTRUCT(&handle_scope, isolate);
|
||||
defer v8.v8__HandleScope__DESTRUCT(&handle_scope);
|
||||
|
||||
// Create templates (constructors only) FIRST
|
||||
// Create templates for ALL types (JsApis)
|
||||
var templates: [JsApis.len]*const v8.FunctionTemplate = undefined;
|
||||
inline for (JsApis, 0..) |JsApi, i| {
|
||||
@setEvalBranchQuota(10_000);
|
||||
@@ -144,20 +147,19 @@ pub fn create() !Snapshot {
|
||||
}
|
||||
|
||||
// Set up prototype chains BEFORE attaching properties
|
||||
// This must come before attachClass so inheritance is set up first
|
||||
inline for (JsApis, 0..) |JsApi, i| {
|
||||
if (comptime protoIndexLookup(JsApi)) |proto_index| {
|
||||
v8.v8__FunctionTemplate__Inherit(templates[i], templates[proto_index]);
|
||||
}
|
||||
}
|
||||
|
||||
// Set up the global template to inherit from Window's template
|
||||
// This way the global object gets all Window properties through inheritance
|
||||
const context = v8.v8__Context__New(isolate, null, null);
|
||||
v8.v8__Context__Enter(context);
|
||||
defer v8.v8__Context__Exit(context);
|
||||
// Add ALL templates to snapshot (done once, in any context)
|
||||
// We need a context to call AddData, so create a temporary one
|
||||
{
|
||||
const temp_context = v8.v8__Context__New(isolate, null, null);
|
||||
v8.v8__Context__Enter(temp_context);
|
||||
defer v8.v8__Context__Exit(temp_context);
|
||||
|
||||
// Add templates to context snapshot
|
||||
var last_data_index: usize = 0;
|
||||
inline for (JsApis, 0..) |_, i| {
|
||||
@setEvalBranchQuota(10_000);
|
||||
@@ -166,11 +168,6 @@ pub fn create() !Snapshot {
|
||||
data_start = data_index;
|
||||
last_data_index = data_index;
|
||||
} else {
|
||||
// This isn't strictly required, but it means we only need to keep
|
||||
// the first data_index. This is based on the assumption that
|
||||
// addDataWithContext always increases by 1. If we ever hit this
|
||||
// error, then that assumption is wrong and we should capture
|
||||
// all the indexes explicitly in an array.
|
||||
if (data_index != last_data_index + 1) {
|
||||
return error.InvalidDataIndex;
|
||||
}
|
||||
@@ -178,13 +175,91 @@ pub fn create() !Snapshot {
|
||||
}
|
||||
}
|
||||
|
||||
// Realize all templates by getting their functions and attaching to global
|
||||
// V8 requires a default context. We could probably make this our
|
||||
// Page context, but having both the Page and Worker context be
|
||||
// indexed via addContext makes things a little more consistent.
|
||||
v8.v8__SnapshotCreator__setDefaultContext(snapshot_creator, temp_context);
|
||||
}
|
||||
|
||||
{
|
||||
const Window = @import("../webapi/Window.zig");
|
||||
const index = try createSnapshotContext(&PageJsApis, Window.JsApi, isolate, snapshot_creator.?, &templates);
|
||||
std.debug.assert(index == 0);
|
||||
}
|
||||
|
||||
{
|
||||
const WorkerGlobalScope = @import("../webapi/WorkerGlobalScope.zig");
|
||||
const index = try createSnapshotContext(&WorkerJsApis, WorkerGlobalScope.JsApi, isolate, snapshot_creator.?, &templates);
|
||||
std.debug.assert(index == 1);
|
||||
}
|
||||
}
|
||||
|
||||
const blob = v8.v8__SnapshotCreator__createBlob(snapshot_creator, v8.kKeep);
|
||||
|
||||
return .{
|
||||
.owns_data = true,
|
||||
.data_start = data_start,
|
||||
.startup_data = blob,
|
||||
.external_references = external_references,
|
||||
};
|
||||
}
|
||||
|
||||
fn createSnapshotContext(
|
||||
comptime ContextApis: []const type,
|
||||
comptime GlobalScopeApi: type,
|
||||
isolate: *v8.Isolate,
|
||||
snapshot_creator: *v8.SnapshotCreator,
|
||||
templates: []*const v8.FunctionTemplate,
|
||||
) !usize {
|
||||
// Create a global template that inherits from the GlobalScopeApi (Window or WorkerGlobalScope)
|
||||
const global_scope_index = comptime bridge.JsApiLookup.getId(GlobalScopeApi);
|
||||
const js_global = v8.v8__FunctionTemplate__New__DEFAULT(isolate);
|
||||
const class_name = v8.v8__String__NewFromUtf8(isolate, GlobalScopeApi.Meta.name.ptr, v8.kNormal, @intCast(GlobalScopeApi.Meta.name.len));
|
||||
v8.v8__FunctionTemplate__SetClassName(js_global, class_name);
|
||||
v8.v8__FunctionTemplate__Inherit(js_global, templates[global_scope_index]);
|
||||
|
||||
const global_template = v8.v8__FunctionTemplate__InstanceTemplate(js_global).?;
|
||||
v8.v8__ObjectTemplate__SetInternalFieldCount(global_template, comptime countInternalFields(GlobalScopeApi));
|
||||
|
||||
// Set up named/indexed handlers for Window's global object (for named element access like window.myDiv)
|
||||
if (comptime std.mem.eql(u8, GlobalScopeApi.Meta.name, "Window")) {
|
||||
v8.v8__ObjectTemplate__SetNamedHandler(global_template, &.{
|
||||
.getter = bridge.unknownWindowPropertyCallback,
|
||||
.setter = null,
|
||||
.query = null,
|
||||
.deleter = null,
|
||||
.enumerator = null,
|
||||
.definer = null,
|
||||
.descriptor = null,
|
||||
.data = null,
|
||||
.flags = v8.kOnlyInterceptStrings | v8.kNonMasking,
|
||||
});
|
||||
v8.v8__ObjectTemplate__SetIndexedHandler(global_template, &.{
|
||||
.getter = @import("../webapi/Window.zig").JsApi.index.getter,
|
||||
.setter = null,
|
||||
.query = null,
|
||||
.deleter = null,
|
||||
.enumerator = null,
|
||||
.definer = null,
|
||||
.descriptor = null,
|
||||
.data = null,
|
||||
.flags = 0,
|
||||
});
|
||||
}
|
||||
|
||||
const context = v8.v8__Context__New(isolate, global_template, null);
|
||||
v8.v8__Context__Enter(context);
|
||||
defer v8.v8__Context__Exit(context);
|
||||
|
||||
// Initialize embedder data to null so callbacks can detect snapshot creation
|
||||
v8.v8__Context__SetAlignedPointerInEmbedderData(context, 1, null);
|
||||
|
||||
const global_obj = v8.v8__Context__Global(context);
|
||||
|
||||
inline for (JsApis, 0..) |JsApi, i| {
|
||||
const func = v8.v8__FunctionTemplate__GetFunction(templates[i], context);
|
||||
|
||||
// Attach to global if it has a name
|
||||
// Attach constructors for this context's APIs to the global
|
||||
inline for (ContextApis) |JsApi| {
|
||||
const template_index = comptime bridge.JsApiLookup.getId(JsApi);
|
||||
const func = v8.v8__FunctionTemplate__GetFunction(templates[template_index], context);
|
||||
if (@hasDecl(JsApi.Meta, "name")) {
|
||||
if (@hasDecl(JsApi.Meta, "constructor_alias")) {
|
||||
const alias = JsApi.Meta.constructor_alias;
|
||||
@@ -192,12 +267,6 @@ pub fn create() !Snapshot {
|
||||
var maybe_result: v8.MaybeBool = undefined;
|
||||
v8.v8__Object__Set(global_obj, context, v8_class_name, func, &maybe_result);
|
||||
|
||||
// @TODO: This is wrong. This name should be registered with the
|
||||
// illegalConstructorCallback. I.e. new Image() is OK, but
|
||||
// new HTMLImageElement() isn't.
|
||||
// But we _have_ to register the name, i.e. HTMLImageElement
|
||||
// has to be registered so, for now, instead of creating another
|
||||
// template, we just hook it into the constructor.
|
||||
const name = JsApi.Meta.name;
|
||||
const illegal_class_name = v8.v8__String__NewFromUtf8(isolate, name.ptr, v8.kNormal, @intCast(name.len));
|
||||
var maybe_result2: v8.MaybeBool = undefined;
|
||||
@@ -216,8 +285,7 @@ pub fn create() !Snapshot {
|
||||
}
|
||||
|
||||
{
|
||||
// If we want to overwrite the built-in console, we have to
|
||||
// delete the built-in one.
|
||||
// Delete built-in console so we can inject our own
|
||||
const console_key = v8.v8__String__NewFromUtf8(isolate, "console", v8.kNormal, 7);
|
||||
var maybe_deleted: v8.MaybeBool = undefined;
|
||||
v8.v8__Object__Delete(global_obj, context, console_key, &maybe_deleted);
|
||||
@@ -226,9 +294,8 @@ pub fn create() !Snapshot {
|
||||
}
|
||||
}
|
||||
|
||||
// This shouldn't be necessary, but it is:
|
||||
// Set prototype chains on function objects
|
||||
// https://groups.google.com/g/v8-users/c/qAQQBmbi--8
|
||||
// TODO: see if newer V8 engines have a way around this.
|
||||
inline for (JsApis, 0..) |JsApi, i| {
|
||||
if (comptime protoIndexLookup(JsApi)) |proto_index| {
|
||||
const proto_func = v8.v8__FunctionTemplate__GetFunction(templates[proto_index], context);
|
||||
@@ -243,41 +310,16 @@ pub fn create() !Snapshot {
|
||||
}
|
||||
|
||||
{
|
||||
// Custom exception
|
||||
// TODO: this is an horrible hack, I can't figure out how to do this cleanly.
|
||||
// DOMException prototype setup
|
||||
const code_str = "DOMException.prototype.__proto__ = Error.prototype";
|
||||
const code = v8.v8__String__NewFromUtf8(isolate, code_str.ptr, v8.kNormal, @intCast(code_str.len));
|
||||
const script = v8.v8__Script__Compile(context, code, null) orelse return error.ScriptCompileFailed;
|
||||
_ = v8.v8__Script__Run(script, context) orelse return error.ScriptRunFailed;
|
||||
}
|
||||
|
||||
v8.v8__SnapshotCreator__setDefaultContext(snapshot_creator, context);
|
||||
return v8.v8__SnapshotCreator__AddContext(snapshot_creator, context);
|
||||
}
|
||||
|
||||
const blob = v8.v8__SnapshotCreator__createBlob(snapshot_creator, v8.kKeep);
|
||||
|
||||
return .{
|
||||
.owns_data = true,
|
||||
.data_start = data_start,
|
||||
.external_references = external_references,
|
||||
.startup_data = blob,
|
||||
};
|
||||
}
|
||||
|
||||
// Helper to check if a JsApi has a NamedIndexed handler
|
||||
fn hasNamedIndexedGetter(comptime JsApi: type) bool {
|
||||
const declarations = @typeInfo(JsApi).@"struct".decls;
|
||||
inline for (declarations) |d| {
|
||||
const value = @field(JsApi, d.name);
|
||||
const T = @TypeOf(value);
|
||||
if (T == bridge.NamedIndexed) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
// Count total callbacks needed for external_references array
|
||||
fn countExternalReferences() comptime_int {
|
||||
@setEvalBranchQuota(100_000);
|
||||
|
||||
@@ -289,24 +331,24 @@ fn countExternalReferences() comptime_int {
|
||||
// +1 for the noop function shared by various types
|
||||
count += 1;
|
||||
|
||||
// +1 for unknownWindowPropertyCallback used on Window's global template
|
||||
count += 1;
|
||||
|
||||
inline for (JsApis) |JsApi| {
|
||||
// Constructor (only if explicit)
|
||||
if (@hasDecl(JsApi, "constructor")) {
|
||||
count += 1;
|
||||
}
|
||||
|
||||
// Callable (htmldda)
|
||||
if (@hasDecl(JsApi, "callable")) {
|
||||
count += 1;
|
||||
}
|
||||
|
||||
// All other callbacks
|
||||
const declarations = @typeInfo(JsApi).@"struct".decls;
|
||||
inline for (declarations) |d| {
|
||||
const value = @field(JsApi, d.name);
|
||||
const T = @TypeOf(value);
|
||||
if (T == bridge.Accessor) {
|
||||
count += 1; // getter
|
||||
count += 1;
|
||||
if (value.setter != null) {
|
||||
count += 1;
|
||||
}
|
||||
@@ -320,14 +362,13 @@ fn countExternalReferences() comptime_int {
|
||||
count += 1;
|
||||
}
|
||||
} else if (T == bridge.NamedIndexed) {
|
||||
count += 1; // getter
|
||||
count += 1;
|
||||
if (value.setter != null) count += 1;
|
||||
if (value.deleter != null) count += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// In debug mode, add unknown property callbacks for types without NamedIndexed
|
||||
if (comptime IS_DEBUG) {
|
||||
inline for (JsApis) |JsApi| {
|
||||
if (!hasNamedIndexedGetter(JsApi)) {
|
||||
@@ -349,6 +390,9 @@ fn collectExternalReferences() [countExternalReferences()]isize {
|
||||
references[idx] = @bitCast(@intFromPtr(&bridge.Function.noopFunction));
|
||||
idx += 1;
|
||||
|
||||
references[idx] = @bitCast(@intFromPtr(&bridge.unknownWindowPropertyCallback));
|
||||
idx += 1;
|
||||
|
||||
inline for (JsApis) |JsApi| {
|
||||
if (@hasDecl(JsApi, "constructor")) {
|
||||
references[idx] = @bitCast(@intFromPtr(JsApi.constructor.func));
|
||||
@@ -399,7 +443,6 @@ fn collectExternalReferences() [countExternalReferences()]isize {
|
||||
}
|
||||
}
|
||||
|
||||
// In debug mode, collect unknown property callbacks for types without NamedIndexed
|
||||
if (comptime IS_DEBUG) {
|
||||
inline for (JsApis) |JsApi| {
|
||||
if (!hasNamedIndexedGetter(JsApi)) {
|
||||
@@ -412,37 +455,11 @@ fn collectExternalReferences() [countExternalReferences()]isize {
|
||||
return references;
|
||||
}
|
||||
|
||||
// Even if a struct doesn't have a `constructor` function, we still
|
||||
// `generateConstructor`, because this is how we create our
|
||||
// FunctionTemplate. Such classes exist, but they can't be instantiated
|
||||
// via `new ClassName()` - but they could, for example, be created in
|
||||
// Zig and returned from a function call, which is why we need the
|
||||
// FunctionTemplate.
|
||||
fn generateConstructor(comptime JsApi: type, isolate: *v8.Isolate) *const v8.FunctionTemplate {
|
||||
const callback = blk: {
|
||||
if (@hasDecl(JsApi, "constructor")) {
|
||||
break :blk JsApi.constructor.func;
|
||||
fn protoIndexLookup(comptime JsApi: type) ?u16 {
|
||||
return protoIndexLookupFor(&JsApis, JsApi);
|
||||
}
|
||||
|
||||
// Use shared illegal constructor callback
|
||||
break :blk illegalConstructorCallback;
|
||||
};
|
||||
|
||||
const template = v8.v8__FunctionTemplate__New__DEFAULT2(isolate, callback).?;
|
||||
{
|
||||
const internal_field_count = comptime countInternalFields(JsApi);
|
||||
if (internal_field_count > 0) {
|
||||
const instance_template = v8.v8__FunctionTemplate__InstanceTemplate(template);
|
||||
v8.v8__ObjectTemplate__SetInternalFieldCount(instance_template, internal_field_count);
|
||||
}
|
||||
}
|
||||
const name_str = if (@hasDecl(JsApi.Meta, "name")) JsApi.Meta.name else @typeName(JsApi);
|
||||
const class_name = v8.v8__String__NewFromUtf8(isolate, name_str.ptr, v8.kNormal, @intCast(name_str.len));
|
||||
v8.v8__FunctionTemplate__SetClassName(template, class_name);
|
||||
return template;
|
||||
}
|
||||
|
||||
pub fn countInternalFields(comptime JsApi: type) u8 {
|
||||
fn countInternalFields(comptime JsApi: type) u8 {
|
||||
var last_used_id = 0;
|
||||
var cache_count: u8 = 0;
|
||||
|
||||
@@ -480,14 +497,80 @@ pub fn countInternalFields(comptime JsApi: type) u8 {
|
||||
return cache_count + 1;
|
||||
}
|
||||
|
||||
// Attaches JsApi members to the prototype template (normal case)
|
||||
// Shared illegal constructor callback for types without explicit constructors
|
||||
fn illegalConstructorCallback(raw_info: ?*const v8.FunctionCallbackInfo) callconv(.c) void {
|
||||
const isolate = v8.v8__FunctionCallbackInfo__GetIsolate(raw_info);
|
||||
log.warn(.js, "Illegal constructor call", .{});
|
||||
|
||||
const message = v8.v8__String__NewFromUtf8(isolate, "Illegal Constructor", v8.kNormal, 19);
|
||||
const js_exception = v8.v8__Exception__TypeError(message);
|
||||
|
||||
_ = v8.v8__Isolate__ThrowException(isolate, js_exception);
|
||||
var return_value: v8.ReturnValue = undefined;
|
||||
v8.v8__FunctionCallbackInfo__GetReturnValue(raw_info, &return_value);
|
||||
v8.v8__ReturnValue__Set(return_value, js_exception);
|
||||
}
|
||||
|
||||
// Helper to check if a JsApi has a NamedIndexed handler (public for reuse)
|
||||
fn hasNamedIndexedGetter(comptime JsApi: type) bool {
|
||||
const declarations = @typeInfo(JsApi).@"struct".decls;
|
||||
inline for (declarations) |d| {
|
||||
const value = @field(JsApi, d.name);
|
||||
const T = @TypeOf(value);
|
||||
if (T == bridge.NamedIndexed) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
// Generic prototype index lookup for a given API list
|
||||
fn protoIndexLookupFor(comptime ApiList: []const type, comptime JsApi: type) ?u16 {
|
||||
@setEvalBranchQuota(100_000);
|
||||
comptime {
|
||||
const T = JsApi.bridge.type;
|
||||
if (!@hasField(T, "_proto")) {
|
||||
return null;
|
||||
}
|
||||
const Ptr = std.meta.fieldInfo(T, ._proto).type;
|
||||
const F = @typeInfo(Ptr).pointer.child;
|
||||
// Look up in the provided API list
|
||||
for (ApiList, 0..) |Api, i| {
|
||||
if (Api == F.JsApi) {
|
||||
return i;
|
||||
}
|
||||
}
|
||||
@compileError("Prototype " ++ @typeName(F.JsApi) ++ " not found in API list");
|
||||
}
|
||||
}
|
||||
|
||||
// Generate a constructor template for a JsApi type (public for reuse)
|
||||
pub fn generateConstructor(comptime JsApi: type, isolate: *v8.Isolate) *const v8.FunctionTemplate {
|
||||
const callback = blk: {
|
||||
if (@hasDecl(JsApi, "constructor")) {
|
||||
break :blk JsApi.constructor.func;
|
||||
}
|
||||
break :blk illegalConstructorCallback;
|
||||
};
|
||||
|
||||
const template = v8.v8__FunctionTemplate__New__DEFAULT2(isolate, callback).?;
|
||||
{
|
||||
const internal_field_count = comptime countInternalFields(JsApi);
|
||||
if (internal_field_count > 0) {
|
||||
const instance_template = v8.v8__FunctionTemplate__InstanceTemplate(template);
|
||||
v8.v8__ObjectTemplate__SetInternalFieldCount(instance_template, internal_field_count);
|
||||
}
|
||||
}
|
||||
const name_str = if (@hasDecl(JsApi.Meta, "name")) JsApi.Meta.name else @typeName(JsApi);
|
||||
const class_name = v8.v8__String__NewFromUtf8(isolate, name_str.ptr, v8.kNormal, @intCast(name_str.len));
|
||||
v8.v8__FunctionTemplate__SetClassName(template, class_name);
|
||||
return template;
|
||||
}
|
||||
|
||||
// Attach JsApi members to a template (public for reuse)
|
||||
fn attachClass(comptime JsApi: type, isolate: *v8.Isolate, template: *const v8.FunctionTemplate) void {
|
||||
const instance = v8.v8__FunctionTemplate__InstanceTemplate(template);
|
||||
const prototype = v8.v8__FunctionTemplate__PrototypeTemplate(template);
|
||||
|
||||
// Create a signature that validates the receiver is an instance of this template.
|
||||
// This prevents crashes when JavaScript extracts a getter/method and calls it
|
||||
// with the wrong `this` (e.g., documentGetter.call(null)).
|
||||
const signature = v8.v8__Signature__New(isolate, template);
|
||||
|
||||
const declarations = @typeInfo(JsApi).@"struct".decls;
|
||||
@@ -523,7 +606,6 @@ fn attachClass(comptime JsApi: type, isolate: *v8.Isolate, template: *const v8.F
|
||||
}
|
||||
|
||||
if (value.static) {
|
||||
// Static accessors: use Template's SetAccessorProperty
|
||||
v8.v8__Template__SetAccessorProperty(@ptrCast(template), js_name, getter_callback, setter_callback, attribute);
|
||||
} else {
|
||||
v8.v8__ObjectTemplate__SetAccessorProperty__Config(prototype, &.{
|
||||
@@ -535,7 +617,6 @@ fn attachClass(comptime JsApi: type, isolate: *v8.Isolate, template: *const v8.F
|
||||
}
|
||||
},
|
||||
bridge.Function => {
|
||||
// For non-static functions, use the signature to validate the receiver
|
||||
const func_signature = if (value.static) null else signature;
|
||||
const function_template = v8.v8__FunctionTemplate__New__Config(isolate, &.{
|
||||
.callback = value.func,
|
||||
@@ -589,7 +670,7 @@ fn attachClass(comptime JsApi: type, isolate: *v8.Isolate, template: *const v8.F
|
||||
bridge.Property => {
|
||||
const js_value = switch (value.value) {
|
||||
.null => js.simpleZigValueToJs(.{ .handle = isolate }, null, true, false),
|
||||
inline .bool, .int, .float, .string => |v| js.simpleZigValueToJs(.{ .handle = isolate }, v, true, false),
|
||||
inline .bool, .int, .float, .string => |pv| js.simpleZigValueToJs(.{ .handle = isolate }, pv, true, false),
|
||||
};
|
||||
const js_name = v8.v8__String__NewFromUtf8(isolate, name.ptr, v8.kNormal, @intCast(name.len));
|
||||
|
||||
@@ -599,11 +680,10 @@ fn attachClass(comptime JsApi: type, isolate: *v8.Isolate, template: *const v8.F
|
||||
}
|
||||
|
||||
if (value.template) {
|
||||
// apply it both to the type itself (e.g. Node.Elem)
|
||||
v8.v8__Template__Set(@ptrCast(template), js_name, js_value, v8.ReadOnly + v8.DontDelete);
|
||||
}
|
||||
},
|
||||
bridge.Constructor => {}, // already handled in generateConstructor
|
||||
bridge.Constructor => {},
|
||||
else => {},
|
||||
}
|
||||
}
|
||||
@@ -636,30 +716,3 @@ fn attachClass(comptime JsApi: type, isolate: *v8.Isolate, template: *const v8.F
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn protoIndexLookup(comptime JsApi: type) ?bridge.JsApiLookup.BackingInt {
|
||||
@setEvalBranchQuota(2000);
|
||||
comptime {
|
||||
const T = JsApi.bridge.type;
|
||||
if (!@hasField(T, "_proto")) {
|
||||
return null;
|
||||
}
|
||||
const Ptr = std.meta.fieldInfo(T, ._proto).type;
|
||||
const F = @typeInfo(Ptr).pointer.child;
|
||||
return bridge.JsApiLookup.getId(F.JsApi);
|
||||
}
|
||||
}
|
||||
|
||||
// Shared illegal constructor callback for types without explicit constructors
|
||||
fn illegalConstructorCallback(raw_info: ?*const v8.FunctionCallbackInfo) callconv(.c) void {
|
||||
const isolate = v8.v8__FunctionCallbackInfo__GetIsolate(raw_info);
|
||||
log.warn(.js, "Illegal constructor call", .{});
|
||||
|
||||
const message = v8.v8__String__NewFromUtf8(isolate, "Illegal Constructor", v8.kNormal, 19);
|
||||
const js_exception = v8.v8__Exception__TypeError(message);
|
||||
|
||||
_ = v8.v8__Isolate__ThrowException(isolate, js_exception);
|
||||
var return_value: v8.ReturnValue = undefined;
|
||||
v8.v8__FunctionCallbackInfo__GetReturnValue(raw_info, &return_value);
|
||||
v8.v8__ReturnValue__Set(return_value, js_exception);
|
||||
}
|
||||
|
||||
@@ -24,6 +24,7 @@ const Session = @import("../Session.zig");
|
||||
const v8 = js.v8;
|
||||
|
||||
const Caller = @import("Caller.zig");
|
||||
const Context = @import("Context.zig");
|
||||
|
||||
const IS_DEBUG = @import("builtin").mode == .Debug;
|
||||
|
||||
@@ -386,6 +387,11 @@ pub const Property = struct {
|
||||
|
||||
pub fn unknownWindowPropertyCallback(c_name: ?*const v8.Name, handle: ?*const v8.PropertyCallbackInfo) callconv(.c) u8 {
|
||||
const v8_isolate = v8.v8__PropertyCallbackInfo__GetIsolate(handle).?;
|
||||
|
||||
// During snapshot creation, there's no Context in embedder data yet
|
||||
const v8_context = v8.v8__Isolate__GetCurrentContext(v8_isolate) orelse return 0;
|
||||
if (v8.v8__Context__GetAlignedPointerFromEmbedderData(v8_context, 1) == null) return 0;
|
||||
|
||||
var caller: Caller = undefined;
|
||||
caller.init(v8_isolate);
|
||||
defer caller.deinit();
|
||||
@@ -400,15 +406,19 @@ pub fn unknownWindowPropertyCallback(c_name: ?*const v8.Name, handle: ?*const v8
|
||||
return 0;
|
||||
};
|
||||
|
||||
const page = local.ctx.page;
|
||||
// Only Page contexts have document.getElementById lookup
|
||||
switch (local.ctx.global) {
|
||||
.page => |page| {
|
||||
const document = page.document;
|
||||
|
||||
if (document.getElementById(property, page)) |el| {
|
||||
const js_val = local.zigValueToJs(el, .{}) catch return 0;
|
||||
var pc = Caller.PropertyCallbackInfo{ .handle = handle.? };
|
||||
pc.getReturnValue().set(js_val);
|
||||
return 1;
|
||||
}
|
||||
},
|
||||
.worker => {}, // no global lookup in a worker
|
||||
}
|
||||
|
||||
if (comptime IS_DEBUG) {
|
||||
if (std.mem.startsWith(u8, property, "__")) {
|
||||
@@ -445,7 +455,8 @@ pub fn unknownWindowPropertyCallback(c_name: ?*const v8.Name, handle: ?*const v8
|
||||
.{ "ApplePaySession", {} },
|
||||
});
|
||||
if (!ignored.has(property)) {
|
||||
const key = std.fmt.bufPrint(&local.ctx.page.buf, "Window:{s}", .{property}) catch return 0;
|
||||
var buf: [2048]u8 = undefined;
|
||||
const key = std.fmt.bufPrint(&buf, "Window:{s}", .{property}) catch return 0;
|
||||
logUnknownProperty(local, key) catch return 0;
|
||||
}
|
||||
}
|
||||
@@ -508,7 +519,8 @@ pub fn unknownObjectPropertyCallback(comptime JsApi: type) *const fn (?*const v8
|
||||
|
||||
const ignored = std.StaticStringMap(void).initComptime(.{});
|
||||
if (!ignored.has(property)) {
|
||||
const key = std.fmt.bufPrint(&local.ctx.page.buf, "{s}:{s}", .{ if (@hasDecl(JsApi.Meta, "name")) JsApi.Meta.name else @typeName(JsApi), property }) catch return 0;
|
||||
var buf: [2048]u8 = undefined;
|
||||
const key = std.fmt.bufPrint(&buf, "{s}:{s}", .{ if (@hasDecl(JsApi.Meta, "name")) JsApi.Meta.name else @typeName(JsApi), property }) catch return 0;
|
||||
logUnknownProperty(local, key) catch return 0;
|
||||
}
|
||||
// not intercepted
|
||||
@@ -550,7 +562,7 @@ fn PrototypeType(comptime T: type) ?type {
|
||||
return Struct(std.meta.fieldInfo(T, ._proto).type);
|
||||
}
|
||||
|
||||
fn flattenTypes(comptime Types: []const type) [countFlattenedTypes(Types)]type {
|
||||
pub fn flattenTypes(comptime Types: []const type) [countFlattenedTypes(Types)]type {
|
||||
var index: usize = 0;
|
||||
var flat: [countFlattenedTypes(Types)]type = undefined;
|
||||
for (Types) |T| {
|
||||
@@ -673,7 +685,8 @@ pub const SubType = enum {
|
||||
webassemblymemory,
|
||||
};
|
||||
|
||||
pub const JsApis = flattenTypes(&.{
|
||||
// APIs for Page/Window contexts. Used by Snapshot.zig for Page snapshot creation.
|
||||
pub const PageJsApis = flattenTypes(&.{
|
||||
@import("../webapi/AbortController.zig"),
|
||||
@import("../webapi/AbortSignal.zig"),
|
||||
@import("../webapi/CData.zig"),
|
||||
@@ -866,3 +879,33 @@ pub const JsApis = flattenTypes(&.{
|
||||
@import("../webapi/Selection.zig"),
|
||||
@import("../webapi/ImageData.zig"),
|
||||
});
|
||||
|
||||
// APIs available on Worker context globals (constructors like URL, Headers, etc.)
|
||||
// This is a subset of PageJsApis plus WorkerGlobalScope.
|
||||
// TODO: Expand this list to include all worker-appropriate APIs.
|
||||
pub const WorkerJsApis = flattenTypes(&.{
|
||||
@import("../webapi/WorkerGlobalScope.zig"),
|
||||
@import("../webapi/EventTarget.zig"),
|
||||
@import("../webapi/DOMException.zig"),
|
||||
@import("../webapi/AbortController.zig"),
|
||||
@import("../webapi/AbortSignal.zig"),
|
||||
@import("../webapi/URL.zig"),
|
||||
@import("../webapi/net/URLSearchParams.zig"),
|
||||
@import("../webapi/net/Headers.zig"),
|
||||
@import("../webapi/net/Request.zig"),
|
||||
@import("../webapi/net/Response.zig"),
|
||||
@import("../webapi/encoding/TextEncoder.zig"),
|
||||
@import("../webapi/encoding/TextDecoder.zig"),
|
||||
@import("../webapi/Blob.zig"),
|
||||
@import("../webapi/File.zig"),
|
||||
@import("../webapi/net/FormData.zig"),
|
||||
@import("../webapi/Console.zig"),
|
||||
@import("../webapi/Crypto.zig"),
|
||||
@import("../webapi/Performance.zig"),
|
||||
});
|
||||
|
||||
// Master list of ALL JS APIs across all contexts.
|
||||
// Used by Env (class IDs, templates), JsApiLookup, and anywhere that needs
|
||||
// to know about all possible types. Individual snapshots use their own
|
||||
// subsets (PageJsApis, WorkerSnapshot.JsApis).
|
||||
pub const JsApis = PageJsApis ++ [_]type{@import("../webapi/WorkerGlobalScope.zig").JsApi};
|
||||
|
||||
@@ -27,6 +27,7 @@ pub const Caller = @import("Caller.zig");
|
||||
pub const Origin = @import("Origin.zig");
|
||||
pub const Identity = @import("Identity.zig");
|
||||
pub const Context = @import("Context.zig");
|
||||
pub const Execution = @import("Execution.zig");
|
||||
pub const Local = @import("Local.zig");
|
||||
pub const Inspector = @import("Inspector.zig");
|
||||
pub const Snapshot = @import("Snapshot.zig");
|
||||
|
||||
@@ -34,6 +34,7 @@ pub const Type = union(enum) {
|
||||
generic: void,
|
||||
node: *@import("Node.zig"),
|
||||
window: *@import("Window.zig"),
|
||||
worker_global_scope: *@import("WorkerGlobalScope.zig"),
|
||||
xhr: *@import("net/XMLHttpRequestEventTarget.zig"),
|
||||
abort_signal: *@import("AbortSignal.zig"),
|
||||
media_query_list: *@import("css/MediaQueryList.zig"),
|
||||
@@ -130,6 +131,7 @@ pub fn format(self: *EventTarget, writer: *std.Io.Writer) !void {
|
||||
.node => |n| n.format(writer),
|
||||
.generic => writer.writeAll("<EventTarget>"),
|
||||
.window => writer.writeAll("<Window>"),
|
||||
.worker_global_scope => writer.writeAll("<WorkerGlobalScope>"),
|
||||
.xhr => writer.writeAll("<XMLHttpRequestEventTarget>"),
|
||||
.abort_signal => writer.writeAll("<AbortSignal>"),
|
||||
.media_query_list => writer.writeAll("<MediaQueryList>"),
|
||||
@@ -149,6 +151,7 @@ pub fn toString(self: *EventTarget) []const u8 {
|
||||
.node => return "[object Node]",
|
||||
.generic => return "[object EventTarget]",
|
||||
.window => return "[object Window]",
|
||||
.worker_global_scope => return "[object WorkerGlobalScope]",
|
||||
.xhr => return "[object XMLHttpRequestEventTarget]",
|
||||
.abort_signal => return "[object AbortSignal]",
|
||||
.media_query_list => return "[object MediaQueryList]",
|
||||
|
||||
@@ -22,6 +22,7 @@ const String = @import("../../string.zig").String;
|
||||
|
||||
const js = @import("../js/js.zig");
|
||||
const Page = @import("../Page.zig");
|
||||
const Execution = js.Execution;
|
||||
|
||||
const Allocator = std.mem.Allocator;
|
||||
|
||||
@@ -33,7 +34,7 @@ pub fn registerTypes() []const type {
|
||||
};
|
||||
}
|
||||
|
||||
const Normalizer = *const fn ([]const u8, *Page) []const u8;
|
||||
const Normalizer = *const fn ([]const u8, []u8) []const u8;
|
||||
|
||||
pub const Entry = struct {
|
||||
name: String,
|
||||
@@ -61,14 +62,14 @@ pub fn copy(arena: Allocator, original: KeyValueList) !KeyValueList {
|
||||
return list;
|
||||
}
|
||||
|
||||
pub fn fromJsObject(arena: Allocator, js_obj: js.Object, comptime normalizer: ?Normalizer, page: *Page) !KeyValueList {
|
||||
pub fn fromJsObject(arena: Allocator, js_obj: js.Object, comptime normalizer: ?Normalizer, buf: []u8) !KeyValueList {
|
||||
var it = try js_obj.nameIterator();
|
||||
var list = KeyValueList.init();
|
||||
try list.ensureTotalCapacity(arena, it.count);
|
||||
|
||||
while (try it.next()) |name| {
|
||||
const js_value = try js_obj.get(name);
|
||||
const normalized = if (comptime normalizer) |n| n(name, page) else name;
|
||||
const normalized = if (comptime normalizer) |n| n(name, buf) else name;
|
||||
|
||||
list._entries.appendAssumeCapacity(.{
|
||||
.name = try String.init(arena, normalized, .{}),
|
||||
@@ -79,12 +80,12 @@ pub fn fromJsObject(arena: Allocator, js_obj: js.Object, comptime normalizer: ?N
|
||||
return list;
|
||||
}
|
||||
|
||||
pub fn fromArray(arena: Allocator, kvs: []const [2][]const u8, comptime normalizer: ?Normalizer, page: *Page) !KeyValueList {
|
||||
pub fn fromArray(arena: Allocator, kvs: []const [2][]const u8, comptime normalizer: ?Normalizer, buf: []u8) !KeyValueList {
|
||||
var list = KeyValueList.init();
|
||||
try list.ensureTotalCapacity(arena, kvs.len);
|
||||
|
||||
for (kvs) |pair| {
|
||||
const normalized = if (comptime normalizer) |n| n(pair[0], page) else pair[0];
|
||||
const normalized = if (comptime normalizer) |n| n(pair[0], buf) else pair[0];
|
||||
|
||||
list._entries.appendAssumeCapacity(.{
|
||||
.name = try String.init(arena, normalized, .{}),
|
||||
@@ -111,12 +112,11 @@ pub fn get(self: *const KeyValueList, name: []const u8) ?[]const u8 {
|
||||
return null;
|
||||
}
|
||||
|
||||
pub fn getAll(self: *const KeyValueList, name: []const u8, page: *Page) ![]const []const u8 {
|
||||
const arena = page.call_arena;
|
||||
pub fn getAll(self: *const KeyValueList, allocator: Allocator, name: []const u8) ![]const []const u8 {
|
||||
var arr: std.ArrayList([]const u8) = .empty;
|
||||
for (self._entries.items) |*entry| {
|
||||
if (entry.name.eqlSlice(name)) {
|
||||
try arr.append(arena, entry.value.str());
|
||||
try arr.append(allocator, entry.value.str());
|
||||
}
|
||||
}
|
||||
return arr.items;
|
||||
@@ -260,7 +260,7 @@ pub const Iterator = struct {
|
||||
|
||||
pub const Entry = struct { []const u8, []const u8 };
|
||||
|
||||
pub fn next(self: *Iterator, _: *const Page) ?Iterator.Entry {
|
||||
pub fn next(self: *Iterator, _: *const Execution) ?Iterator.Entry {
|
||||
const index = self.index;
|
||||
const entries = self.kv._entries.items;
|
||||
if (index >= entries.len) {
|
||||
|
||||
@@ -27,7 +27,7 @@ const Location = @This();
|
||||
_url: *URL,
|
||||
|
||||
pub fn init(raw_url: [:0]const u8, page: *Page) !*Location {
|
||||
const url = try URL.init(raw_url, null, page);
|
||||
const url = try URL.init(raw_url, null, &page.js.execution);
|
||||
return page._factory.create(Location{
|
||||
._url = url,
|
||||
});
|
||||
@@ -53,12 +53,12 @@ pub fn getPort(self: *const Location) []const u8 {
|
||||
return self._url.getPort();
|
||||
}
|
||||
|
||||
pub fn getOrigin(self: *const Location, page: *const Page) ![]const u8 {
|
||||
return self._url.getOrigin(page);
|
||||
pub fn getOrigin(self: *const Location, exec: *const js.Execution) ![]const u8 {
|
||||
return self._url.getOrigin(exec);
|
||||
}
|
||||
|
||||
pub fn getSearch(self: *const Location, page: *const Page) ![]const u8 {
|
||||
return self._url.getSearch(page);
|
||||
pub fn getSearch(self: *const Location, exec: *const js.Execution) ![]const u8 {
|
||||
return self._url.getSearch(exec);
|
||||
}
|
||||
|
||||
pub fn getHash(self: *const Location) []const u8 {
|
||||
@@ -98,8 +98,8 @@ pub fn reload(_: *const Location, page: *Page) !void {
|
||||
return page.scheduleNavigation(page.url, .{ .reason = .script, .kind = .reload }, .{ .script = page });
|
||||
}
|
||||
|
||||
pub fn toString(self: *const Location, page: *const Page) ![:0]const u8 {
|
||||
return self._url.toString(page);
|
||||
pub fn toString(self: *const Location, exec: *const js.Execution) ![:0]const u8 {
|
||||
return self._url.toString(exec);
|
||||
}
|
||||
|
||||
pub const JsApi = struct {
|
||||
|
||||
@@ -23,6 +23,7 @@ const U = @import("../URL.zig");
|
||||
const Page = @import("../Page.zig");
|
||||
const URLSearchParams = @import("net/URLSearchParams.zig");
|
||||
const Blob = @import("Blob.zig");
|
||||
const Execution = js.Execution;
|
||||
|
||||
const Allocator = std.mem.Allocator;
|
||||
|
||||
@@ -36,11 +37,12 @@ _search_params: ?*URLSearchParams = null,
|
||||
pub const resolve = @import("../URL.zig").resolve;
|
||||
pub const eqlDocument = @import("../URL.zig").eqlDocument;
|
||||
|
||||
pub fn init(url: [:0]const u8, base_: ?[:0]const u8, page: *Page) !*URL {
|
||||
const arena = page.arena;
|
||||
pub fn init(url: [:0]const u8, base_: ?[:0]const u8, exec: *const Execution) !*URL {
|
||||
const arena = exec.arena;
|
||||
const context_url = exec.url.*;
|
||||
|
||||
if (std.mem.eql(u8, url, "about:blank")) {
|
||||
return page._factory.create(URL{
|
||||
return exec._factory.create(URL{
|
||||
._raw = "about:blank",
|
||||
._arena = arena,
|
||||
});
|
||||
@@ -48,9 +50,9 @@ pub fn init(url: [:0]const u8, base_: ?[:0]const u8, page: *Page) !*URL {
|
||||
const url_is_absolute = @import("../URL.zig").isCompleteHTTPUrl(url);
|
||||
|
||||
const base = if (base_) |b| blk: {
|
||||
// If URL is absolute, base is ignored (but we still use page.url internally)
|
||||
// If URL is absolute, base is ignored (but we still use context url internally)
|
||||
if (url_is_absolute) {
|
||||
break :blk page.url;
|
||||
break :blk context_url;
|
||||
}
|
||||
// For relative URLs, base must be a valid absolute URL
|
||||
if (!@import("../URL.zig").isCompleteHTTPUrl(b)) {
|
||||
@@ -59,11 +61,11 @@ pub fn init(url: [:0]const u8, base_: ?[:0]const u8, page: *Page) !*URL {
|
||||
break :blk b;
|
||||
} else if (!url_is_absolute) {
|
||||
return error.TypeError;
|
||||
} else page.url;
|
||||
} else context_url;
|
||||
|
||||
const raw = try resolve(arena, base, url, .{ .always_dupe = true });
|
||||
|
||||
return page._factory.create(URL{
|
||||
return exec._factory.create(URL{
|
||||
._raw = raw,
|
||||
._arena = arena,
|
||||
});
|
||||
@@ -107,20 +109,20 @@ pub fn getPort(self: *const URL) []const u8 {
|
||||
return U.getPort(self._raw);
|
||||
}
|
||||
|
||||
pub fn getOrigin(self: *const URL, page: *const Page) ![]const u8 {
|
||||
return (try U.getOrigin(page.call_arena, self._raw)) orelse {
|
||||
pub fn getOrigin(self: *const URL, exec: *const Execution) ![]const u8 {
|
||||
return (try U.getOrigin(exec.call_arena, self._raw)) orelse {
|
||||
// yes, a null string, that's what the spec wants
|
||||
return "null";
|
||||
};
|
||||
}
|
||||
|
||||
pub fn getSearch(self: *const URL, page: *const Page) ![]const u8 {
|
||||
pub fn getSearch(self: *const URL, exec: *const Execution) ![]const u8 {
|
||||
// If searchParams has been accessed, generate search from it
|
||||
if (self._search_params) |sp| {
|
||||
if (sp.getSize() == 0) {
|
||||
return "";
|
||||
}
|
||||
var buf = std.Io.Writer.Allocating.init(page.call_arena);
|
||||
var buf = std.Io.Writer.Allocating.init(exec.call_arena);
|
||||
try buf.writer.writeByte('?');
|
||||
try sp.toString(&buf.writer);
|
||||
return buf.written();
|
||||
@@ -132,30 +134,30 @@ pub fn getHash(self: *const URL) []const u8 {
|
||||
return U.getHash(self._raw);
|
||||
}
|
||||
|
||||
pub fn getSearchParams(self: *URL, page: *Page) !*URLSearchParams {
|
||||
pub fn getSearchParams(self: *URL, exec: *const Execution) !*URLSearchParams {
|
||||
if (self._search_params) |sp| {
|
||||
return sp;
|
||||
}
|
||||
|
||||
// Get current search string (without the '?')
|
||||
const search = try self.getSearch(page);
|
||||
const search = try self.getSearch(exec);
|
||||
const search_value = if (search.len > 0) search[1..] else "";
|
||||
|
||||
const params = try URLSearchParams.init(.{ .query_string = search_value }, page);
|
||||
const params = try URLSearchParams.init(.{ .query_string = search_value }, exec);
|
||||
self._search_params = params;
|
||||
return params;
|
||||
}
|
||||
|
||||
pub fn setHref(self: *URL, value: []const u8, page: *Page) !void {
|
||||
const base = if (U.isCompleteHTTPUrl(value)) page.url else self._raw;
|
||||
const raw = try U.resolve(self._arena orelse page.arena, base, value, .{ .always_dupe = true });
|
||||
pub fn setHref(self: *URL, value: []const u8, exec: *const Execution) !void {
|
||||
const base = if (U.isCompleteHTTPUrl(value)) exec.url.* else self._raw;
|
||||
const raw = try U.resolve(self._arena orelse exec.arena, base, value, .{ .always_dupe = true });
|
||||
self._raw = raw;
|
||||
|
||||
// Update existing searchParams if it exists
|
||||
if (self._search_params) |sp| {
|
||||
const search = U.getSearch(raw);
|
||||
const search_value = if (search.len > 0) search[1..] else "";
|
||||
try sp.updateFromString(search_value, page);
|
||||
try sp.updateFromString(search_value, exec);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -184,7 +186,7 @@ pub fn setPathname(self: *URL, value: []const u8) !void {
|
||||
self._raw = try U.setPathname(self._raw, value, allocator);
|
||||
}
|
||||
|
||||
pub fn setSearch(self: *URL, value: []const u8, page: *Page) !void {
|
||||
pub fn setSearch(self: *URL, value: []const u8, exec: *const Execution) !void {
|
||||
const allocator = self._arena orelse return error.NoAllocator;
|
||||
self._raw = try U.setSearch(self._raw, value, allocator);
|
||||
|
||||
@@ -192,7 +194,7 @@ pub fn setSearch(self: *URL, value: []const u8, page: *Page) !void {
|
||||
if (self._search_params) |sp| {
|
||||
const search = U.getSearch(self._raw);
|
||||
const search_value = if (search.len > 0) search[1..] else "";
|
||||
try sp.updateFromString(search_value, page);
|
||||
try sp.updateFromString(search_value, exec);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -201,7 +203,7 @@ pub fn setHash(self: *URL, value: []const u8) !void {
|
||||
self._raw = try U.setHash(self._raw, value, allocator);
|
||||
}
|
||||
|
||||
pub fn toString(self: *const URL, page: *const Page) ![:0]const u8 {
|
||||
pub fn toString(self: *const URL, exec: *const Execution) ![:0]const u8 {
|
||||
const sp = self._search_params orelse {
|
||||
return self._raw;
|
||||
};
|
||||
@@ -217,7 +219,7 @@ pub fn toString(self: *const URL, page: *const Page) ![:0]const u8 {
|
||||
const hash = self.getHash();
|
||||
|
||||
// Build the new URL string
|
||||
var buf = std.Io.Writer.Allocating.init(page.call_arena);
|
||||
var buf = std.Io.Writer.Allocating.init(exec.call_arena);
|
||||
try buf.writer.writeAll(base);
|
||||
|
||||
// Add / if missing (e.g., "https://example.com" -> "https://example.com/")
|
||||
|
||||
@@ -411,7 +411,7 @@ pub fn postMessage(self: *Window, message: js.Value.Temp, target_origin: ?[]cons
|
||||
errdefer target_page.releaseArena(arena);
|
||||
|
||||
// Origin should be the source window's origin (where the message came from)
|
||||
const origin = try source_window._location.getOrigin(page);
|
||||
const origin = try source_window._location.getOrigin(&page.js.execution);
|
||||
const callback = try arena.create(PostMessageCallback);
|
||||
callback.* = .{
|
||||
.arena = arena,
|
||||
@@ -429,27 +429,11 @@ pub fn postMessage(self: *Window, message: js.Value.Temp, target_origin: ?[]cons
|
||||
}
|
||||
|
||||
pub fn btoa(_: *const Window, input: []const u8, page: *Page) ![]const u8 {
|
||||
const encoded_len = std.base64.standard.Encoder.calcSize(input.len);
|
||||
const encoded = try page.call_arena.alloc(u8, encoded_len);
|
||||
return std.base64.standard.Encoder.encode(encoded, input);
|
||||
return @import("encoding/base64.zig").encode(page.call_arena, input);
|
||||
}
|
||||
|
||||
pub fn atob(_: *const Window, input: []const u8, page: *Page) ![]const u8 {
|
||||
const trimmed = std.mem.trim(u8, input, &std.ascii.whitespace);
|
||||
// Forgiving base64 decode per WHATWG spec:
|
||||
// https://infra.spec.whatwg.org/#forgiving-base64-decode
|
||||
// Remove trailing padding to use standard_no_pad decoder
|
||||
const unpadded = std.mem.trimRight(u8, trimmed, "=");
|
||||
|
||||
// Length % 4 == 1 is invalid (can't represent valid base64)
|
||||
if (unpadded.len % 4 == 1) {
|
||||
return error.InvalidCharacterError;
|
||||
}
|
||||
|
||||
const decoded_len = std.base64.standard_no_pad.Decoder.calcSizeForSlice(unpadded) catch return error.InvalidCharacterError;
|
||||
const decoded = try page.call_arena.alloc(u8, decoded_len);
|
||||
std.base64.standard_no_pad.Decoder.decode(decoded, unpadded) catch return error.InvalidCharacterError;
|
||||
return decoded;
|
||||
return @import("encoding/base64.zig").decode(page.call_arena, input);
|
||||
}
|
||||
|
||||
pub fn structuredClone(_: *const Window, value: js.Value) !js.Value {
|
||||
|
||||
154
src/browser/webapi/WorkerGlobalScope.zig
Normal file
154
src/browser/webapi/WorkerGlobalScope.zig
Normal file
@@ -0,0 +1,154 @@
|
||||
// Copyright (C) 2023-2026 Lightpanda (Selecy SAS)
|
||||
//
|
||||
// Francis Bouvier <francis@lightpanda.io>
|
||||
// Pierre Tachoire <pierre@lightpanda.io>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU Affero General Public License as
|
||||
// published by the Free Software Foundation, either version 3 of the
|
||||
// License, or (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU Affero General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU Affero General Public License
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
const std = @import("std");
|
||||
const JS = @import("../js/js.zig");
|
||||
|
||||
const Console = @import("Console.zig");
|
||||
const Crypto = @import("Crypto.zig");
|
||||
const EventTarget = @import("EventTarget.zig");
|
||||
const Factory = @import("../Factory.zig");
|
||||
const Performance = @import("Performance.zig");
|
||||
const Session = @import("../Session.zig");
|
||||
|
||||
const Allocator = std.mem.Allocator;
|
||||
|
||||
const WorkerGlobalScope = @This();
|
||||
|
||||
// Infrastructure fields (similar to Page)
|
||||
_session: *Session,
|
||||
_factory: *Factory,
|
||||
arena: Allocator,
|
||||
url: [:0]const u8,
|
||||
buf: [1024]u8 = undefined, // same size as page.buf
|
||||
js: *JS.Context = undefined,
|
||||
|
||||
// WebAPI fields
|
||||
_proto: *EventTarget,
|
||||
_console: Console = .init,
|
||||
_crypto: Crypto = .init,
|
||||
_performance: Performance,
|
||||
_on_error: ?JS.Function.Global = null,
|
||||
_on_rejection_handled: ?JS.Function.Global = null,
|
||||
_on_unhandled_rejection: ?JS.Function.Global = null,
|
||||
|
||||
pub fn base(self: *const WorkerGlobalScope) [:0]const u8 {
|
||||
return self.url;
|
||||
}
|
||||
|
||||
pub fn asEventTarget(self: *WorkerGlobalScope) *EventTarget {
|
||||
return self._proto;
|
||||
}
|
||||
|
||||
pub fn getSelf(self: *WorkerGlobalScope) *WorkerGlobalScope {
|
||||
return self;
|
||||
}
|
||||
|
||||
pub fn getConsole(self: *WorkerGlobalScope) *Console {
|
||||
return &self._console;
|
||||
}
|
||||
|
||||
pub fn getCrypto(self: *WorkerGlobalScope) *Crypto {
|
||||
return &self._crypto;
|
||||
}
|
||||
|
||||
pub fn getPerformance(self: *WorkerGlobalScope) *Performance {
|
||||
return &self._performance;
|
||||
}
|
||||
|
||||
pub fn getOnError(self: *const WorkerGlobalScope) ?JS.Function.Global {
|
||||
return self._on_error;
|
||||
}
|
||||
|
||||
pub fn setOnError(self: *WorkerGlobalScope, setter: ?FunctionSetter) void {
|
||||
self._on_error = getFunctionFromSetter(setter);
|
||||
}
|
||||
|
||||
pub fn getOnRejectionHandled(self: *const WorkerGlobalScope) ?JS.Function.Global {
|
||||
return self._on_rejection_handled;
|
||||
}
|
||||
|
||||
pub fn setOnRejectionHandled(self: *WorkerGlobalScope, setter: ?FunctionSetter) void {
|
||||
self._on_rejection_handled = getFunctionFromSetter(setter);
|
||||
}
|
||||
|
||||
pub fn getOnUnhandledRejection(self: *const WorkerGlobalScope) ?JS.Function.Global {
|
||||
return self._on_unhandled_rejection;
|
||||
}
|
||||
|
||||
pub fn setOnUnhandledRejection(self: *WorkerGlobalScope, setter: ?FunctionSetter) void {
|
||||
self._on_unhandled_rejection = getFunctionFromSetter(setter);
|
||||
}
|
||||
|
||||
pub fn btoa(_: *const WorkerGlobalScope, input: []const u8, exec: *JS.Execution) ![]const u8 {
|
||||
const base64 = @import("encoding/base64.zig");
|
||||
return base64.encode(exec.call_arena, input);
|
||||
}
|
||||
|
||||
pub fn atob(_: *const WorkerGlobalScope, input: []const u8, exec: *JS.Execution) ![]const u8 {
|
||||
const base64 = @import("encoding/base64.zig");
|
||||
return base64.decode(exec.call_arena, input);
|
||||
}
|
||||
|
||||
pub fn structuredClone(_: *const WorkerGlobalScope, value: JS.Value) !JS.Value {
|
||||
return value.structuredClone();
|
||||
}
|
||||
|
||||
// TODO: importScripts - needs script loading infrastructure
|
||||
// TODO: location - needs WorkerLocation
|
||||
// TODO: navigator - needs WorkerNavigator
|
||||
// TODO: Timer functions - need scheduler integration
|
||||
|
||||
const FunctionSetter = union(enum) {
|
||||
func: JS.Function.Global,
|
||||
anything: JS.Value,
|
||||
};
|
||||
|
||||
fn getFunctionFromSetter(setter_: ?FunctionSetter) ?JS.Function.Global {
|
||||
const setter = setter_ orelse return null;
|
||||
return switch (setter) {
|
||||
.func => |func| func,
|
||||
.anything => null,
|
||||
};
|
||||
}
|
||||
|
||||
pub const JsApi = struct {
|
||||
pub const bridge = JS.Bridge(WorkerGlobalScope);
|
||||
|
||||
pub const Meta = struct {
|
||||
pub const name = "WorkerGlobalScope";
|
||||
pub const prototype_chain = bridge.prototypeChain();
|
||||
pub var class_id: bridge.ClassId = undefined;
|
||||
};
|
||||
|
||||
pub const self = bridge.accessor(WorkerGlobalScope.getSelf, null, .{});
|
||||
pub const console = bridge.accessor(WorkerGlobalScope.getConsole, null, .{});
|
||||
pub const crypto = bridge.accessor(WorkerGlobalScope.getCrypto, null, .{});
|
||||
pub const performance = bridge.accessor(WorkerGlobalScope.getPerformance, null, .{});
|
||||
|
||||
pub const onerror = bridge.accessor(WorkerGlobalScope.getOnError, WorkerGlobalScope.setOnError, .{});
|
||||
pub const onrejectionhandled = bridge.accessor(WorkerGlobalScope.getOnRejectionHandled, WorkerGlobalScope.setOnRejectionHandled, .{});
|
||||
pub const onunhandledrejection = bridge.accessor(WorkerGlobalScope.getOnUnhandledRejection, WorkerGlobalScope.setOnUnhandledRejection, .{});
|
||||
|
||||
pub const btoa = bridge.function(WorkerGlobalScope.btoa, .{});
|
||||
pub const atob = bridge.function(WorkerGlobalScope.atob, .{ .dom_exception = true });
|
||||
pub const structuredClone = bridge.function(WorkerGlobalScope.structuredClone, .{});
|
||||
|
||||
// Return false since workers don't have secure-context-only APIs
|
||||
pub const isSecureContext = bridge.property(false, .{ .template = false });
|
||||
};
|
||||
@@ -18,6 +18,7 @@
|
||||
|
||||
const std = @import("std");
|
||||
|
||||
const js = @import("../../js/js.zig");
|
||||
const Node = @import("../Node.zig");
|
||||
const Page = @import("../../Page.zig");
|
||||
const Session = @import("../../Session.zig");
|
||||
@@ -136,7 +137,7 @@ const Iterator = struct {
|
||||
|
||||
const Entry = struct { u32, *Node };
|
||||
|
||||
pub fn next(self: *Iterator, page: *Page) !?Entry {
|
||||
pub fn next(self: *Iterator, page: *const Page) !?Entry {
|
||||
const index = self.index;
|
||||
const node = try self.list.getAtIndex(index, page) orelse return null;
|
||||
self.index = index + 1;
|
||||
|
||||
@@ -43,7 +43,7 @@ const Lookup = std.StringArrayHashMapUnmanaged(void);
|
||||
const WHITESPACE = " \t\n\r\x0C";
|
||||
|
||||
pub fn length(self: *const DOMTokenList, page: *Page) !u32 {
|
||||
const tokens = try self.getTokens(page);
|
||||
const tokens = try self.getTokens(page.call_arena);
|
||||
return @intCast(tokens.count());
|
||||
}
|
||||
|
||||
@@ -82,8 +82,8 @@ pub fn add(self: *DOMTokenList, tokens: []const []const u8, page: *Page) !void {
|
||||
try validateToken(token);
|
||||
}
|
||||
|
||||
var lookup = try self.getTokens(page);
|
||||
const allocator = page.call_arena;
|
||||
var lookup = try self.getTokens(allocator);
|
||||
try lookup.ensureUnusedCapacity(allocator, tokens.len);
|
||||
|
||||
for (tokens) |token| {
|
||||
@@ -98,7 +98,7 @@ pub fn remove(self: *DOMTokenList, tokens: []const []const u8, page: *Page) !voi
|
||||
try validateToken(token);
|
||||
}
|
||||
|
||||
var lookup = try self.getTokens(page);
|
||||
var lookup = try self.getTokens(page.call_arena);
|
||||
for (tokens) |token| {
|
||||
_ = lookup.orderedRemove(token);
|
||||
}
|
||||
@@ -149,7 +149,8 @@ pub fn replace(self: *DOMTokenList, old_token: []const u8, new_token: []const u8
|
||||
return error.InvalidCharacterError;
|
||||
}
|
||||
|
||||
var lookup = try self.getTokens(page);
|
||||
const allocator = page.call_arena;
|
||||
var lookup = try self.getTokens(page.call_arena);
|
||||
|
||||
// Check if old_token exists
|
||||
if (!lookup.contains(old_token)) {
|
||||
@@ -162,7 +163,6 @@ pub fn replace(self: *DOMTokenList, old_token: []const u8, new_token: []const u8
|
||||
return true;
|
||||
}
|
||||
|
||||
const allocator = page.call_arena;
|
||||
// Build new token list preserving order but replacing old with new
|
||||
var new_tokens = try std.ArrayList([]const u8).initCapacity(allocator, lookup.count());
|
||||
var replaced_old = false;
|
||||
@@ -237,14 +237,13 @@ pub fn forEach(self: *DOMTokenList, cb_: js.Function, js_this_: ?js.Object, page
|
||||
}
|
||||
}
|
||||
|
||||
fn getTokens(self: *const DOMTokenList, page: *Page) !Lookup {
|
||||
fn getTokens(self: *const DOMTokenList, allocator: std.mem.Allocator) !Lookup {
|
||||
const value = self.getValue();
|
||||
if (value.len == 0) {
|
||||
return .empty;
|
||||
}
|
||||
|
||||
var list: Lookup = .empty;
|
||||
const allocator = page.call_arena;
|
||||
try list.ensureTotalCapacity(allocator, 4);
|
||||
|
||||
var it = std.mem.tokenizeAny(u8, value, WHITESPACE);
|
||||
|
||||
@@ -24,6 +24,7 @@ const Page = @import("../../Page.zig");
|
||||
const Node = @import("../Node.zig");
|
||||
const Element = @import("../Element.zig");
|
||||
const TreeWalker = @import("../TreeWalker.zig");
|
||||
const Execution = js.Execution;
|
||||
|
||||
const HTMLAllCollection = @This();
|
||||
|
||||
@@ -133,11 +134,11 @@ pub fn callable(self: *HTMLAllCollection, arg: CAllAsFunctionArg, page: *Page) ?
|
||||
};
|
||||
}
|
||||
|
||||
pub fn iterator(self: *HTMLAllCollection, page: *Page) !*Iterator {
|
||||
pub fn iterator(self: *HTMLAllCollection, exec: *const Execution) !*Iterator {
|
||||
return Iterator.init(.{
|
||||
.list = self,
|
||||
.tw = self._tw.clone(),
|
||||
}, page);
|
||||
}, exec);
|
||||
}
|
||||
|
||||
const GenericIterator = @import("iterator.zig").Entry;
|
||||
@@ -145,7 +146,7 @@ pub const Iterator = GenericIterator(struct {
|
||||
list: *HTMLAllCollection,
|
||||
tw: TreeWalker.FullExcludeSelf,
|
||||
|
||||
pub fn next(self: *@This(), _: *Page) ?*Element {
|
||||
pub fn next(self: *@This(), _: *const Execution) ?*Element {
|
||||
while (self.tw.next()) |node| {
|
||||
if (node.is(Element)) |el| {
|
||||
return el;
|
||||
|
||||
@@ -23,6 +23,7 @@ const Page = @import("../../Page.zig");
|
||||
const Element = @import("../Element.zig");
|
||||
const TreeWalker = @import("../TreeWalker.zig");
|
||||
const NodeLive = @import("node_live.zig").NodeLive;
|
||||
const Execution = js.Execution;
|
||||
|
||||
const Mode = enum {
|
||||
tag,
|
||||
@@ -77,7 +78,7 @@ pub fn getByName(self: *HTMLCollection, name: []const u8, page: *Page) ?*Element
|
||||
};
|
||||
}
|
||||
|
||||
pub fn iterator(self: *HTMLCollection, page: *Page) !*Iterator {
|
||||
pub fn iterator(self: *HTMLCollection, exec: *const Execution) !*Iterator {
|
||||
return Iterator.init(.{
|
||||
.list = self,
|
||||
.tw = switch (self._data) {
|
||||
@@ -94,7 +95,7 @@ pub fn iterator(self: *HTMLCollection, page: *Page) !*Iterator {
|
||||
.form => |*impl| .{ .form = impl._tw.clone() },
|
||||
.empty => .empty,
|
||||
},
|
||||
}, page);
|
||||
}, exec);
|
||||
}
|
||||
|
||||
const GenericIterator = @import("iterator.zig").Entry;
|
||||
@@ -115,7 +116,7 @@ pub const Iterator = GenericIterator(struct {
|
||||
empty: void,
|
||||
},
|
||||
|
||||
pub fn next(self: *@This(), _: *Page) ?*Element {
|
||||
pub fn next(self: *@This(), _: *const Execution) ?*Element {
|
||||
return switch (self.list._data) {
|
||||
.tag => |*impl| impl.nextTw(&self.tw.tag),
|
||||
.tag_name => |*impl| impl.nextTw(&self.tw.tag_name),
|
||||
|
||||
@@ -21,6 +21,7 @@ const lp = @import("lightpanda");
|
||||
const js = @import("../../js/js.zig");
|
||||
const Page = @import("../../Page.zig");
|
||||
const Session = @import("../../Session.zig");
|
||||
const Execution = js.Execution;
|
||||
|
||||
pub fn Entry(comptime Inner: type, comptime field: ?[]const u8) type {
|
||||
const R = reflect(Inner, field);
|
||||
@@ -38,8 +39,8 @@ pub fn Entry(comptime Inner: type, comptime field: ?[]const u8) type {
|
||||
pub const js_as_object = true;
|
||||
};
|
||||
|
||||
pub fn init(inner: Inner, page: *Page) !*Self {
|
||||
const self = try page._factory.create(Self{ ._inner = inner });
|
||||
pub fn init(inner: Inner, executor: R.Executor) !*Self {
|
||||
const self = try executor._factory.create(Self{ ._inner = inner });
|
||||
|
||||
if (@hasDecl(Inner, "acquireRef")) {
|
||||
self._inner.acquireRef();
|
||||
@@ -62,8 +63,8 @@ pub fn Entry(comptime Inner: type, comptime field: ?[]const u8) type {
|
||||
self._rc.acquire();
|
||||
}
|
||||
|
||||
pub fn next(self: *Self, page: *Page) if (R.has_error_return) anyerror!Result else Result {
|
||||
const entry = (if (comptime R.has_error_return) try self._inner.next(page) else self._inner.next(page)) orelse {
|
||||
pub fn next(self: *Self, executor: R.Executor) if (R.has_error_return) anyerror!Result else Result {
|
||||
const entry = (if (comptime R.has_error_return) try self._inner.next(executor) else self._inner.next(executor)) orelse {
|
||||
return .{ .done = true, .value = null };
|
||||
};
|
||||
|
||||
@@ -92,17 +93,22 @@ pub fn Entry(comptime Inner: type, comptime field: ?[]const u8) type {
|
||||
}
|
||||
|
||||
fn reflect(comptime Inner: type, comptime field: ?[]const u8) Reflect {
|
||||
const R = @typeInfo(@TypeOf(Inner.next)).@"fn".return_type.?;
|
||||
const fn_info = @typeInfo(@TypeOf(Inner.next)).@"fn";
|
||||
const R = fn_info.return_type.?;
|
||||
const has_error_return = @typeInfo(R) == .error_union;
|
||||
// The executor type is the last parameter of inner.next (after self)
|
||||
const Executor = fn_info.params[1].type.?;
|
||||
return .{
|
||||
.has_error_return = has_error_return,
|
||||
.ValueType = ValueType(unwrapOptional(unwrapError(R)), field),
|
||||
.Executor = Executor,
|
||||
};
|
||||
}
|
||||
|
||||
const Reflect = struct {
|
||||
has_error_return: bool,
|
||||
ValueType: type,
|
||||
Executor: type,
|
||||
};
|
||||
|
||||
fn unwrapError(comptime T: type) type {
|
||||
|
||||
50
src/browser/webapi/encoding/base64.zig
Normal file
50
src/browser/webapi/encoding/base64.zig
Normal file
@@ -0,0 +1,50 @@
|
||||
// Copyright (C) 2023-2026 Lightpanda (Selecy SAS)
|
||||
//
|
||||
// Francis Bouvier <francis@lightpanda.io>
|
||||
// Pierre Tachoire <pierre@lightpanda.io>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU Affero General Public License as
|
||||
// published by the Free Software Foundation, either version 3 of the
|
||||
// License, or (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU Affero General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU Affero General Public License
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
//! Base64 encoding/decoding helpers for btoa/atob.
|
||||
//! Used by both Window and WorkerGlobalScope.
|
||||
|
||||
const std = @import("std");
|
||||
const Allocator = std.mem.Allocator;
|
||||
|
||||
/// Encodes input to base64 (btoa).
|
||||
pub fn encode(alloc: Allocator, input: []const u8) ![]const u8 {
|
||||
const encoded_len = std.base64.standard.Encoder.calcSize(input.len);
|
||||
const encoded = try alloc.alloc(u8, encoded_len);
|
||||
return std.base64.standard.Encoder.encode(encoded, input);
|
||||
}
|
||||
|
||||
/// Decodes base64 input (atob).
|
||||
/// Implements forgiving base64 decode per WHATWG spec.
|
||||
pub fn decode(alloc: Allocator, input: []const u8) ![]const u8 {
|
||||
const trimmed = std.mem.trim(u8, input, &std.ascii.whitespace);
|
||||
// Forgiving base64 decode per WHATWG spec:
|
||||
// https://infra.spec.whatwg.org/#forgiving-base64-decode
|
||||
// Remove trailing padding to use standard_no_pad decoder
|
||||
const unpadded = std.mem.trimRight(u8, trimmed, "=");
|
||||
|
||||
// Length % 4 == 1 is invalid (can't represent valid base64)
|
||||
if (unpadded.len % 4 == 1) {
|
||||
return error.InvalidCharacterError;
|
||||
}
|
||||
|
||||
const decoded_len = std.base64.standard_no_pad.Decoder.calcSizeForSlice(unpadded) catch return error.InvalidCharacterError;
|
||||
const decoded = try alloc.alloc(u8, decoded_len);
|
||||
std.base64.standard_no_pad.Decoder.decode(decoded, unpadded) catch return error.InvalidCharacterError;
|
||||
return decoded;
|
||||
}
|
||||
@@ -127,16 +127,16 @@ fn handleBlobUrl(url: []const u8, resolver: js.PromiseResolver, page: *Page) !js
|
||||
return resolver.promise();
|
||||
}
|
||||
|
||||
fn httpStartCallback(response: HttpClient.Response) !void {
|
||||
const self: *Fetch = @ptrCast(@alignCast(response.ctx));
|
||||
fn httpStartCallback(transfer: *HttpClient.Transfer) !void {
|
||||
const self: *Fetch = @ptrCast(@alignCast(transfer.ctx));
|
||||
if (comptime IS_DEBUG) {
|
||||
log.debug(.http, "request start", .{ .url = self._url, .source = "fetch" });
|
||||
}
|
||||
self._response._http_response = response;
|
||||
self._response._transfer = transfer;
|
||||
}
|
||||
|
||||
fn httpHeaderDoneCallback(response: HttpClient.Response) !bool {
|
||||
const self: *Fetch = @ptrCast(@alignCast(response.ctx));
|
||||
fn httpHeaderDoneCallback(transfer: *HttpClient.Transfer) !bool {
|
||||
const self: *Fetch = @ptrCast(@alignCast(transfer.ctx));
|
||||
|
||||
if (self._signal) |signal| {
|
||||
if (signal._aborted) {
|
||||
@@ -145,24 +145,25 @@ fn httpHeaderDoneCallback(response: HttpClient.Response) !bool {
|
||||
}
|
||||
|
||||
const arena = self._response._arena;
|
||||
if (response.contentLength()) |cl| {
|
||||
if (transfer.getContentLength()) |cl| {
|
||||
try self._buf.ensureTotalCapacity(arena, cl);
|
||||
}
|
||||
|
||||
const res = self._response;
|
||||
const header = transfer.response_header.?;
|
||||
|
||||
if (comptime IS_DEBUG) {
|
||||
log.debug(.http, "request header", .{
|
||||
.source = "fetch",
|
||||
.url = self._url,
|
||||
.status = response.status(),
|
||||
.status = header.status,
|
||||
});
|
||||
}
|
||||
|
||||
res._status = response.status().?;
|
||||
res._status_text = std.http.Status.phrase(@enumFromInt(response.status().?)) orelse "";
|
||||
res._url = try arena.dupeZ(u8, response.url());
|
||||
res._is_redirected = response.redirectCount().? > 0;
|
||||
res._status = header.status;
|
||||
res._status_text = std.http.Status.phrase(@enumFromInt(header.status)) orelse "";
|
||||
res._url = try arena.dupeZ(u8, std.mem.span(header.url));
|
||||
res._is_redirected = header.redirect_count > 0;
|
||||
|
||||
// Determine response type based on origin comparison
|
||||
const page_origin = URL.getOrigin(arena, self._page.url) catch null;
|
||||
@@ -182,7 +183,7 @@ fn httpHeaderDoneCallback(response: HttpClient.Response) !bool {
|
||||
res._type = .basic;
|
||||
}
|
||||
|
||||
var it = response.headerIterator();
|
||||
var it = transfer.responseHeaderIterator();
|
||||
while (it.next()) |hdr| {
|
||||
try res._headers.append(hdr.name, hdr.value, self._page);
|
||||
}
|
||||
@@ -190,8 +191,8 @@ fn httpHeaderDoneCallback(response: HttpClient.Response) !bool {
|
||||
return true;
|
||||
}
|
||||
|
||||
fn httpDataCallback(response: HttpClient.Response, data: []const u8) !void {
|
||||
const self: *Fetch = @ptrCast(@alignCast(response.ctx));
|
||||
fn httpDataCallback(transfer: *HttpClient.Transfer, data: []const u8) !void {
|
||||
const self: *Fetch = @ptrCast(@alignCast(transfer.ctx));
|
||||
|
||||
// Check if aborted
|
||||
if (self._signal) |signal| {
|
||||
@@ -206,7 +207,7 @@ fn httpDataCallback(response: HttpClient.Response, data: []const u8) !void {
|
||||
fn httpDoneCallback(ctx: *anyopaque) !void {
|
||||
const self: *Fetch = @ptrCast(@alignCast(ctx));
|
||||
var response = self._response;
|
||||
response._http_response = null;
|
||||
response._transfer = null;
|
||||
response._body = self._buf.items;
|
||||
|
||||
log.info(.http, "request complete", .{
|
||||
@@ -229,7 +230,7 @@ fn httpErrorCallback(ctx: *anyopaque, _: anyerror) void {
|
||||
const self: *Fetch = @ptrCast(@alignCast(ctx));
|
||||
|
||||
var response = self._response;
|
||||
response._http_response = null;
|
||||
response._transfer = null;
|
||||
// the response is only passed on v8 on success, if we're here, it's safe to
|
||||
// clear this. (defer since `self is in the response's arena).
|
||||
|
||||
@@ -255,7 +256,7 @@ fn httpShutdownCallback(ctx: *anyopaque) void {
|
||||
|
||||
if (self._owns_response) {
|
||||
var response = self._response;
|
||||
response._http_response = null;
|
||||
response._transfer = null;
|
||||
response.deinit(self._page._session);
|
||||
// Do not access `self` after this point: the Fetch struct was
|
||||
// allocated from response._arena which has been released.
|
||||
|
||||
@@ -57,7 +57,7 @@ pub fn get(self: *const FormData, name: []const u8) ?[]const u8 {
|
||||
}
|
||||
|
||||
pub fn getAll(self: *const FormData, name: []const u8, page: *Page) ![]const []const u8 {
|
||||
return self._list.getAll(name, page);
|
||||
return self._list.getAll(page.call_arena, name);
|
||||
}
|
||||
|
||||
pub fn has(self: *const FormData, name: []const u8) bool {
|
||||
@@ -76,16 +76,16 @@ pub fn delete(self: *FormData, name: []const u8) void {
|
||||
self._list.delete(name, null);
|
||||
}
|
||||
|
||||
pub fn keys(self: *FormData, page: *Page) !*KeyValueList.KeyIterator {
|
||||
return KeyValueList.KeyIterator.init(.{ .list = self, .kv = &self._list }, page);
|
||||
pub fn keys(self: *FormData, exec: *const js.Execution) !*KeyValueList.KeyIterator {
|
||||
return KeyValueList.KeyIterator.init(.{ .list = self, .kv = &self._list }, exec);
|
||||
}
|
||||
|
||||
pub fn values(self: *FormData, page: *Page) !*KeyValueList.ValueIterator {
|
||||
return KeyValueList.ValueIterator.init(.{ .list = self, .kv = &self._list }, page);
|
||||
pub fn values(self: *FormData, exec: *const js.Execution) !*KeyValueList.ValueIterator {
|
||||
return KeyValueList.ValueIterator.init(.{ .list = self, .kv = &self._list }, exec);
|
||||
}
|
||||
|
||||
pub fn entries(self: *FormData, page: *Page) !*KeyValueList.EntryIterator {
|
||||
return KeyValueList.EntryIterator.init(.{ .list = self, .kv = &self._list }, page);
|
||||
pub fn entries(self: *FormData, exec: *const js.Execution) !*KeyValueList.EntryIterator {
|
||||
return KeyValueList.EntryIterator.init(.{ .list = self, .kv = &self._list }, exec);
|
||||
}
|
||||
|
||||
pub fn forEach(self: *FormData, cb_: js.Function, js_this_: ?js.Object) !void {
|
||||
|
||||
@@ -20,8 +20,8 @@ pub const InitOpts = union(enum) {
|
||||
pub fn init(opts_: ?InitOpts, page: *Page) !*Headers {
|
||||
const list = if (opts_) |opts| switch (opts) {
|
||||
.obj => |obj| try KeyValueList.copy(page.arena, obj._list),
|
||||
.js_obj => |js_obj| try KeyValueList.fromJsObject(page.arena, js_obj, normalizeHeaderName, page),
|
||||
.strings => |kvs| try KeyValueList.fromArray(page.arena, kvs, normalizeHeaderName, page),
|
||||
.js_obj => |js_obj| try KeyValueList.fromJsObject(page.arena, js_obj, normalizeHeaderName, &page.buf),
|
||||
.strings => |kvs| try KeyValueList.fromArray(page.arena, kvs, normalizeHeaderName, &page.buf),
|
||||
} else KeyValueList.init();
|
||||
|
||||
return page._factory.create(Headers{
|
||||
@@ -30,18 +30,18 @@ pub fn init(opts_: ?InitOpts, page: *Page) !*Headers {
|
||||
}
|
||||
|
||||
pub fn append(self: *Headers, name: []const u8, value: []const u8, page: *Page) !void {
|
||||
const normalized_name = normalizeHeaderName(name, page);
|
||||
const normalized_name = normalizeHeaderName(name, &page.buf);
|
||||
try self._list.append(page.arena, normalized_name, value);
|
||||
}
|
||||
|
||||
pub fn delete(self: *Headers, name: []const u8, page: *Page) void {
|
||||
const normalized_name = normalizeHeaderName(name, page);
|
||||
const normalized_name = normalizeHeaderName(name, &page.buf);
|
||||
self._list.delete(normalized_name, null);
|
||||
}
|
||||
|
||||
pub fn get(self: *const Headers, name: []const u8, page: *Page) !?[]const u8 {
|
||||
const normalized_name = normalizeHeaderName(name, page);
|
||||
const all_values = try self._list.getAll(normalized_name, page);
|
||||
const normalized_name = normalizeHeaderName(name, &page.buf);
|
||||
const all_values = try self._list.getAll(page.call_arena, normalized_name);
|
||||
|
||||
if (all_values.len == 0) {
|
||||
return null;
|
||||
@@ -53,25 +53,25 @@ pub fn get(self: *const Headers, name: []const u8, page: *Page) !?[]const u8 {
|
||||
}
|
||||
|
||||
pub fn has(self: *const Headers, name: []const u8, page: *Page) bool {
|
||||
const normalized_name = normalizeHeaderName(name, page);
|
||||
const normalized_name = normalizeHeaderName(name, &page.buf);
|
||||
return self._list.has(normalized_name);
|
||||
}
|
||||
|
||||
pub fn set(self: *Headers, name: []const u8, value: []const u8, page: *Page) !void {
|
||||
const normalized_name = normalizeHeaderName(name, page);
|
||||
const normalized_name = normalizeHeaderName(name, &page.buf);
|
||||
try self._list.set(page.arena, normalized_name, value);
|
||||
}
|
||||
|
||||
pub fn keys(self: *Headers, page: *Page) !*KeyValueList.KeyIterator {
|
||||
return KeyValueList.KeyIterator.init(.{ .list = self, .kv = &self._list }, page);
|
||||
pub fn keys(self: *Headers, exec: *const js.Execution) !*KeyValueList.KeyIterator {
|
||||
return KeyValueList.KeyIterator.init(.{ .list = self, .kv = &self._list }, exec);
|
||||
}
|
||||
|
||||
pub fn values(self: *Headers, page: *Page) !*KeyValueList.ValueIterator {
|
||||
return KeyValueList.ValueIterator.init(.{ .list = self, .kv = &self._list }, page);
|
||||
pub fn values(self: *Headers, exec: *const js.Execution) !*KeyValueList.ValueIterator {
|
||||
return KeyValueList.ValueIterator.init(.{ .list = self, .kv = &self._list }, exec);
|
||||
}
|
||||
|
||||
pub fn entries(self: *Headers, page: *Page) !*KeyValueList.EntryIterator {
|
||||
return KeyValueList.EntryIterator.init(.{ .list = self, .kv = &self._list }, page);
|
||||
pub fn entries(self: *Headers, exec: *const js.Execution) !*KeyValueList.EntryIterator {
|
||||
return KeyValueList.EntryIterator.init(.{ .list = self, .kv = &self._list }, exec);
|
||||
}
|
||||
|
||||
pub fn forEach(self: *Headers, cb_: js.Function, js_this_: ?js.Object) !void {
|
||||
@@ -94,11 +94,11 @@ pub fn populateHttpHeader(self: *Headers, allocator: Allocator, http_headers: *h
|
||||
}
|
||||
}
|
||||
|
||||
fn normalizeHeaderName(name: []const u8, page: *Page) []const u8 {
|
||||
if (name.len > page.buf.len) {
|
||||
fn normalizeHeaderName(name: []const u8, buf: []u8) []const u8 {
|
||||
if (name.len > buf.len) {
|
||||
return name;
|
||||
}
|
||||
return std.ascii.lowerString(&page.buf, name);
|
||||
return std.ascii.lowerString(buf, name);
|
||||
}
|
||||
|
||||
pub const JsApi = struct {
|
||||
|
||||
@@ -48,7 +48,7 @@ _type: Type,
|
||||
_status_text: []const u8,
|
||||
_url: [:0]const u8,
|
||||
_is_redirected: bool,
|
||||
_http_response: ?HttpClient.Response = null,
|
||||
_transfer: ?*HttpClient.Transfer = null,
|
||||
|
||||
const InitOpts = struct {
|
||||
status: u16 = 200,
|
||||
@@ -81,9 +81,9 @@ pub fn init(body_: ?[]const u8, opts_: ?InitOpts, page: *Page) !*Response {
|
||||
}
|
||||
|
||||
pub fn deinit(self: *Response, session: *Session) void {
|
||||
if (self._http_response) |resp| {
|
||||
resp.abort(error.Abort);
|
||||
self._http_response = null;
|
||||
if (self._transfer) |transfer| {
|
||||
transfer.abort(error.Abort);
|
||||
self._transfer = null;
|
||||
}
|
||||
session.releaseArena(self._arena);
|
||||
}
|
||||
@@ -191,7 +191,7 @@ pub fn clone(self: *const Response, page: *Page) !*Response {
|
||||
._type = self._type,
|
||||
._is_redirected = self._is_redirected,
|
||||
._headers = try Headers.init(.{ .obj = self._headers }, page),
|
||||
._http_response = null,
|
||||
._transfer = null,
|
||||
};
|
||||
return cloned;
|
||||
}
|
||||
|
||||
@@ -26,6 +26,7 @@ const Allocator = std.mem.Allocator;
|
||||
const Page = @import("../../Page.zig");
|
||||
const FormData = @import("FormData.zig");
|
||||
const KeyValueList = @import("../KeyValueList.zig");
|
||||
const Execution = js.Execution;
|
||||
|
||||
const URLSearchParams = @This();
|
||||
|
||||
@@ -38,12 +39,12 @@ const InitOpts = union(enum) {
|
||||
query_string: []const u8,
|
||||
};
|
||||
|
||||
pub fn init(opts_: ?InitOpts, page: *Page) !*URLSearchParams {
|
||||
const arena = page.arena;
|
||||
pub fn init(opts_: ?InitOpts, exec: *const Execution) !*URLSearchParams {
|
||||
const arena = exec.arena;
|
||||
const params: KeyValueList = blk: {
|
||||
const opts = opts_ orelse break :blk .empty;
|
||||
switch (opts) {
|
||||
.query_string => |qs| break :blk try paramsFromString(arena, qs, &page.buf),
|
||||
.query_string => |qs| break :blk try paramsFromString(arena, qs, exec.buf),
|
||||
.form_data => |fd| break :blk try KeyValueList.copy(arena, fd._list),
|
||||
.value => |js_val| {
|
||||
// Order matters here; Array is also an Object.
|
||||
@@ -51,24 +52,25 @@ pub fn init(opts_: ?InitOpts, page: *Page) !*URLSearchParams {
|
||||
break :blk try paramsFromArray(arena, js_val.toArray());
|
||||
}
|
||||
if (js_val.isObject()) {
|
||||
break :blk try KeyValueList.fromJsObject(arena, js_val.toObject(), null, page);
|
||||
// normalizer is null, so page won't be used
|
||||
break :blk try KeyValueList.fromJsObject(arena, js_val.toObject(), null, exec.buf);
|
||||
}
|
||||
if (js_val.isString()) |js_str| {
|
||||
break :blk try paramsFromString(arena, try js_str.toSliceWithAlloc(arena), &page.buf);
|
||||
break :blk try paramsFromString(arena, try js_str.toSliceWithAlloc(arena), exec.buf);
|
||||
}
|
||||
return error.InvalidArgument;
|
||||
},
|
||||
}
|
||||
};
|
||||
|
||||
return page._factory.create(URLSearchParams{
|
||||
return exec._factory.create(URLSearchParams{
|
||||
._arena = arena,
|
||||
._params = params,
|
||||
});
|
||||
}
|
||||
|
||||
pub fn updateFromString(self: *URLSearchParams, query_string: []const u8, page: *Page) !void {
|
||||
self._params = try paramsFromString(self._arena, query_string, &page.buf);
|
||||
pub fn updateFromString(self: *URLSearchParams, query_string: []const u8, exec: *const Execution) !void {
|
||||
self._params = try paramsFromString(self._arena, query_string, exec.buf);
|
||||
}
|
||||
|
||||
pub fn getSize(self: *const URLSearchParams) usize {
|
||||
@@ -79,8 +81,8 @@ pub fn get(self: *const URLSearchParams, name: []const u8) ?[]const u8 {
|
||||
return self._params.get(name);
|
||||
}
|
||||
|
||||
pub fn getAll(self: *const URLSearchParams, name: []const u8, page: *Page) ![]const []const u8 {
|
||||
return self._params.getAll(name, page);
|
||||
pub fn getAll(self: *const URLSearchParams, name: []const u8, exec: *const Execution) ![]const []const u8 {
|
||||
return self._params.getAll(exec.call_arena, name);
|
||||
}
|
||||
|
||||
pub fn has(self: *const URLSearchParams, name: []const u8) bool {
|
||||
@@ -99,16 +101,16 @@ pub fn delete(self: *URLSearchParams, name: []const u8, value: ?[]const u8) void
|
||||
self._params.delete(name, value);
|
||||
}
|
||||
|
||||
pub fn keys(self: *URLSearchParams, page: *Page) !*KeyValueList.KeyIterator {
|
||||
return KeyValueList.KeyIterator.init(.{ .list = self, .kv = &self._params }, page);
|
||||
pub fn keys(self: *URLSearchParams, exec: *const Execution) !*KeyValueList.KeyIterator {
|
||||
return KeyValueList.KeyIterator.init(.{ .list = self, .kv = &self._params }, exec);
|
||||
}
|
||||
|
||||
pub fn values(self: *URLSearchParams, page: *Page) !*KeyValueList.ValueIterator {
|
||||
return KeyValueList.ValueIterator.init(.{ .list = self, .kv = &self._params }, page);
|
||||
pub fn values(self: *URLSearchParams, exec: *const Execution) !*KeyValueList.ValueIterator {
|
||||
return KeyValueList.ValueIterator.init(.{ .list = self, .kv = &self._params }, exec);
|
||||
}
|
||||
|
||||
pub fn entries(self: *URLSearchParams, page: *Page) !*KeyValueList.EntryIterator {
|
||||
return KeyValueList.EntryIterator.init(.{ .list = self, .kv = &self._params }, page);
|
||||
pub fn entries(self: *URLSearchParams, exec: *const Execution) !*KeyValueList.EntryIterator {
|
||||
return KeyValueList.EntryIterator.init(.{ .list = self, .kv = &self._params }, exec);
|
||||
}
|
||||
|
||||
pub fn toString(self: *const URLSearchParams, writer: *std.Io.Writer) !void {
|
||||
@@ -314,7 +316,7 @@ pub const Iterator = struct {
|
||||
|
||||
const Entry = struct { []const u8, []const u8 };
|
||||
|
||||
pub fn next(self: *Iterator, _: *Page) !?Iterator.Entry {
|
||||
pub fn next(self: *Iterator, _: *const Execution) !?Iterator.Entry {
|
||||
const index = self.index;
|
||||
const items = self.list._params.items;
|
||||
if (index >= items.len) {
|
||||
@@ -352,8 +354,8 @@ pub const JsApi = struct {
|
||||
pub const sort = bridge.function(URLSearchParams.sort, .{});
|
||||
|
||||
pub const toString = bridge.function(_toString, .{});
|
||||
fn _toString(self: *const URLSearchParams, page: *Page) ![]const u8 {
|
||||
var buf = std.Io.Writer.Allocating.init(page.call_arena);
|
||||
fn _toString(self: *const URLSearchParams, exec: *const Execution) ![]const u8 {
|
||||
var buf = std.Io.Writer.Allocating.init(exec.call_arena);
|
||||
try self.toString(&buf.writer);
|
||||
return buf.written();
|
||||
}
|
||||
|
||||
@@ -43,7 +43,7 @@ _rc: lp.RC(u8) = .{},
|
||||
_page: *Page,
|
||||
_proto: *XMLHttpRequestEventTarget,
|
||||
_arena: Allocator,
|
||||
_http_response: ?HttpClient.Response = null,
|
||||
_transfer: ?*HttpClient.Transfer = null,
|
||||
_active_request: bool = false,
|
||||
|
||||
_url: [:0]const u8 = "",
|
||||
@@ -100,9 +100,9 @@ pub fn init(page: *Page) !*XMLHttpRequest {
|
||||
}
|
||||
|
||||
pub fn deinit(self: *XMLHttpRequest, session: *Session) void {
|
||||
if (self._http_response) |resp| {
|
||||
resp.abort(error.Abort);
|
||||
self._http_response = null;
|
||||
if (self._transfer) |transfer| {
|
||||
transfer.abort(error.Abort);
|
||||
self._transfer = null;
|
||||
}
|
||||
|
||||
if (self._on_ready_state_change) |func| {
|
||||
@@ -184,9 +184,9 @@ pub fn setWithCredentials(self: *XMLHttpRequest, value: bool) !void {
|
||||
// TODO: url should be a union, as it can be multiple things
|
||||
pub fn open(self: *XMLHttpRequest, method_: []const u8, url: [:0]const u8) !void {
|
||||
// Abort any in-progress request
|
||||
if (self._http_response) |transfer| {
|
||||
if (self._transfer) |transfer| {
|
||||
transfer.abort(error.Abort);
|
||||
self._http_response = null;
|
||||
self._transfer = null;
|
||||
}
|
||||
|
||||
// Reset internal state
|
||||
@@ -402,32 +402,34 @@ pub fn getResponseXML(self: *XMLHttpRequest, page: *Page) !?*Node.Document {
|
||||
};
|
||||
}
|
||||
|
||||
fn httpStartCallback(response: HttpClient.Response) !void {
|
||||
const self: *XMLHttpRequest = @ptrCast(@alignCast(response.ctx));
|
||||
fn httpStartCallback(transfer: *HttpClient.Transfer) !void {
|
||||
const self: *XMLHttpRequest = @ptrCast(@alignCast(transfer.ctx));
|
||||
if (comptime IS_DEBUG) {
|
||||
log.debug(.http, "request start", .{ .method = self._method, .url = self._url, .source = "xhr" });
|
||||
}
|
||||
self._http_response = response;
|
||||
self._transfer = transfer;
|
||||
}
|
||||
|
||||
fn httpHeaderCallback(response: HttpClient.Response, header: http.Header) !void {
|
||||
const self: *XMLHttpRequest = @ptrCast(@alignCast(response.ctx));
|
||||
fn httpHeaderCallback(transfer: *HttpClient.Transfer, header: http.Header) !void {
|
||||
const self: *XMLHttpRequest = @ptrCast(@alignCast(transfer.ctx));
|
||||
const joined = try std.fmt.allocPrint(self._arena, "{s}: {s}", .{ header.name, header.value });
|
||||
try self._response_headers.append(self._arena, joined);
|
||||
}
|
||||
|
||||
fn httpHeaderDoneCallback(response: HttpClient.Response) !bool {
|
||||
const self: *XMLHttpRequest = @ptrCast(@alignCast(response.ctx));
|
||||
fn httpHeaderDoneCallback(transfer: *HttpClient.Transfer) !bool {
|
||||
const self: *XMLHttpRequest = @ptrCast(@alignCast(transfer.ctx));
|
||||
|
||||
const header = &transfer.response_header.?;
|
||||
|
||||
if (comptime IS_DEBUG) {
|
||||
log.debug(.http, "request header", .{
|
||||
.source = "xhr",
|
||||
.url = self._url,
|
||||
.status = response.status(),
|
||||
.status = header.status,
|
||||
});
|
||||
}
|
||||
|
||||
if (response.contentType()) |ct| {
|
||||
if (header.contentType()) |ct| {
|
||||
self._response_mime = Mime.parse(ct) catch |e| {
|
||||
log.info(.http, "invalid content type", .{
|
||||
.content_Type = ct,
|
||||
@@ -438,18 +440,18 @@ fn httpHeaderDoneCallback(response: HttpClient.Response) !bool {
|
||||
};
|
||||
}
|
||||
|
||||
var it = response.headerIterator();
|
||||
var it = transfer.responseHeaderIterator();
|
||||
while (it.next()) |hdr| {
|
||||
const joined = try std.fmt.allocPrint(self._arena, "{s}: {s}", .{ hdr.name, hdr.value });
|
||||
try self._response_headers.append(self._arena, joined);
|
||||
}
|
||||
|
||||
self._response_status = response.status().?;
|
||||
if (response.contentLength()) |cl| {
|
||||
self._response_status = header.status;
|
||||
if (transfer.getContentLength()) |cl| {
|
||||
self._response_len = cl;
|
||||
try self._response_data.ensureTotalCapacity(self._arena, cl);
|
||||
}
|
||||
self._response_url = try self._arena.dupeZ(u8, response.url());
|
||||
self._response_url = try self._arena.dupeZ(u8, std.mem.span(header.url));
|
||||
|
||||
const page = self._page;
|
||||
|
||||
@@ -464,8 +466,8 @@ fn httpHeaderDoneCallback(response: HttpClient.Response) !bool {
|
||||
return true;
|
||||
}
|
||||
|
||||
fn httpDataCallback(response: HttpClient.Response, data: []const u8) !void {
|
||||
const self: *XMLHttpRequest = @ptrCast(@alignCast(response.ctx));
|
||||
fn httpDataCallback(transfer: *HttpClient.Transfer, data: []const u8) !void {
|
||||
const self: *XMLHttpRequest = @ptrCast(@alignCast(transfer.ctx));
|
||||
try self._response_data.appendSlice(self._arena, data);
|
||||
|
||||
const page = self._page;
|
||||
@@ -488,7 +490,7 @@ fn httpDoneCallback(ctx: *anyopaque) !void {
|
||||
|
||||
// Not that the request is done, the http/client will free the transfer
|
||||
// object. It isn't safe to keep it around.
|
||||
self._http_response = null;
|
||||
self._transfer = null;
|
||||
|
||||
const page = self._page;
|
||||
|
||||
@@ -511,23 +513,23 @@ fn httpErrorCallback(ctx: *anyopaque, err: anyerror) void {
|
||||
const self: *XMLHttpRequest = @ptrCast(@alignCast(ctx));
|
||||
// http client will close it after an error, it isn't safe to keep around
|
||||
self.handleError(err);
|
||||
if (self._http_response != null) {
|
||||
self._http_response = null;
|
||||
if (self._transfer != null) {
|
||||
self._transfer = null;
|
||||
}
|
||||
self.releaseSelfRef();
|
||||
}
|
||||
|
||||
fn httpShutdownCallback(ctx: *anyopaque) void {
|
||||
const self: *XMLHttpRequest = @ptrCast(@alignCast(ctx));
|
||||
self._http_response = null;
|
||||
self._transfer = null;
|
||||
self.releaseSelfRef();
|
||||
}
|
||||
|
||||
pub fn abort(self: *XMLHttpRequest) void {
|
||||
self.handleError(error.Abort);
|
||||
if (self._http_response) |resp| {
|
||||
self._http_response = null;
|
||||
resp.abort(error.Abort);
|
||||
if (self._transfer) |transfer| {
|
||||
self._transfer = null;
|
||||
transfer.abort(error.Abort);
|
||||
}
|
||||
self.releaseSelfRef();
|
||||
}
|
||||
|
||||
@@ -39,7 +39,6 @@ pub const Scope = enum {
|
||||
telemetry,
|
||||
unknown_prop,
|
||||
mcp,
|
||||
cache,
|
||||
};
|
||||
|
||||
const Opts = struct {
|
||||
|
||||
@@ -17,7 +17,6 @@
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
const std = @import("std");
|
||||
const log = @import("../log.zig");
|
||||
const builtin = @import("builtin");
|
||||
const net = std.net;
|
||||
const posix = std.posix;
|
||||
@@ -31,10 +30,6 @@ const http = @import("http.zig");
|
||||
const RobotStore = @import("Robots.zig").RobotStore;
|
||||
const WebBotAuth = @import("WebBotAuth.zig");
|
||||
|
||||
const Cache = @import("cache/Cache.zig");
|
||||
const FsCache = @import("cache/FsCache.zig");
|
||||
|
||||
const App = @import("../App.zig");
|
||||
const Network = @This();
|
||||
|
||||
const Listener = struct {
|
||||
@@ -50,12 +45,10 @@ const MAX_TICK_CALLBACKS = 16;
|
||||
|
||||
allocator: Allocator,
|
||||
|
||||
app: *App,
|
||||
config: *const Config,
|
||||
ca_blob: ?http.Blob,
|
||||
robot_store: RobotStore,
|
||||
web_bot_auth: ?WebBotAuth,
|
||||
cache: ?Cache,
|
||||
|
||||
connections: []http.Connection,
|
||||
available: std.DoublyLinkedList = .{},
|
||||
@@ -207,7 +200,7 @@ fn globalDeinit() void {
|
||||
libcurl.curl_global_cleanup();
|
||||
}
|
||||
|
||||
pub fn init(allocator: Allocator, app: *App, config: *const Config) !Network {
|
||||
pub fn init(allocator: Allocator, config: *const Config) !Network {
|
||||
globalInit(allocator);
|
||||
errdefer globalDeinit();
|
||||
|
||||
@@ -240,22 +233,6 @@ pub fn init(allocator: Allocator, app: *App, config: *const Config) !Network {
|
||||
else
|
||||
null;
|
||||
|
||||
const cache = if (config.httpCacheDir()) |cache_dir_path|
|
||||
Cache{
|
||||
.kind = .{
|
||||
.fs = FsCache.init(cache_dir_path) catch |e| {
|
||||
log.err(.cache, "failed to init", .{
|
||||
.kind = "FsCache",
|
||||
.path = cache_dir_path,
|
||||
.err = e,
|
||||
});
|
||||
return e;
|
||||
},
|
||||
},
|
||||
}
|
||||
else
|
||||
null;
|
||||
|
||||
return .{
|
||||
.allocator = allocator,
|
||||
.config = config,
|
||||
@@ -267,10 +244,8 @@ pub fn init(allocator: Allocator, app: *App, config: *const Config) !Network {
|
||||
.available = available,
|
||||
.connections = connections,
|
||||
|
||||
.app = app,
|
||||
.robot_store = RobotStore.init(allocator),
|
||||
.web_bot_auth = web_bot_auth,
|
||||
.cache = cache,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -303,8 +278,6 @@ pub fn deinit(self: *Network) void {
|
||||
wba.deinit(self.allocator);
|
||||
}
|
||||
|
||||
if (self.cache) |*cache| cache.deinit();
|
||||
|
||||
globalDeinit();
|
||||
}
|
||||
|
||||
|
||||
213
src/network/cache/Cache.zig
vendored
213
src/network/cache/Cache.zig
vendored
@@ -1,213 +0,0 @@
|
||||
// Copyright (C) 2023-2026 Lightpanda (Selecy SAS)
|
||||
//
|
||||
// Francis Bouvier <francis@lightpanda.io>
|
||||
// Pierre Tachoire <pierre@lightpanda.io>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU Affero General Public License as
|
||||
// published by the Free Software Foundation, either version 3 of the
|
||||
// License, or (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU Affero General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU Affero General Public License
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
const std = @import("std");
|
||||
const log = @import("../../log.zig");
|
||||
const Http = @import("../http.zig");
|
||||
const FsCache = @import("FsCache.zig");
|
||||
|
||||
/// A browser-wide cache for resources across the network.
|
||||
/// This mostly conforms to RFC9111 with regards to caching behavior.
|
||||
pub const Cache = @This();
|
||||
|
||||
kind: union(enum) {
|
||||
fs: FsCache,
|
||||
},
|
||||
|
||||
pub fn deinit(self: *Cache) void {
|
||||
return switch (self.kind) {
|
||||
inline else => |*c| c.deinit(),
|
||||
};
|
||||
}
|
||||
|
||||
pub fn get(self: *Cache, arena: std.mem.Allocator, req: CacheRequest) ?CachedResponse {
|
||||
return switch (self.kind) {
|
||||
inline else => |*c| c.get(arena, req),
|
||||
};
|
||||
}
|
||||
|
||||
pub fn put(self: *Cache, metadata: CachedMetadata, body: []const u8) !void {
|
||||
return switch (self.kind) {
|
||||
inline else => |*c| c.put(metadata, body),
|
||||
};
|
||||
}
|
||||
|
||||
pub const CacheControl = struct {
|
||||
max_age: u64,
|
||||
|
||||
pub fn parse(value: []const u8) ?CacheControl {
|
||||
var cc: CacheControl = .{ .max_age = undefined };
|
||||
|
||||
var max_age_set = false;
|
||||
var max_s_age_set = false;
|
||||
var is_public = false;
|
||||
|
||||
var iter = std.mem.splitScalar(u8, value, ',');
|
||||
while (iter.next()) |part| {
|
||||
const directive = std.mem.trim(u8, part, &std.ascii.whitespace);
|
||||
if (std.ascii.eqlIgnoreCase(directive, "no-store")) {
|
||||
return null;
|
||||
} else if (std.ascii.eqlIgnoreCase(directive, "no-cache")) {
|
||||
return null;
|
||||
} else if (std.ascii.eqlIgnoreCase(directive, "public")) {
|
||||
is_public = true;
|
||||
} else if (std.ascii.startsWithIgnoreCase(directive, "max-age=")) {
|
||||
if (!max_s_age_set) {
|
||||
if (std.fmt.parseInt(u64, directive[8..], 10) catch null) |max_age| {
|
||||
cc.max_age = max_age;
|
||||
max_age_set = true;
|
||||
}
|
||||
}
|
||||
} else if (std.ascii.startsWithIgnoreCase(directive, "s-maxage=")) {
|
||||
if (std.fmt.parseInt(u64, directive[9..], 10) catch null) |max_age| {
|
||||
cc.max_age = max_age;
|
||||
max_age_set = true;
|
||||
max_s_age_set = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!max_age_set) return null;
|
||||
if (!is_public) return null;
|
||||
if (cc.max_age == 0) return null;
|
||||
|
||||
return cc;
|
||||
}
|
||||
};
|
||||
|
||||
pub const CachedMetadata = struct {
|
||||
url: [:0]const u8,
|
||||
content_type: []const u8,
|
||||
|
||||
status: u16,
|
||||
stored_at: i64,
|
||||
age_at_store: u64,
|
||||
|
||||
cache_control: CacheControl,
|
||||
/// Response Headers
|
||||
headers: []const Http.Header,
|
||||
|
||||
/// These are Request Headers used by Vary.
|
||||
vary_headers: []const Http.Header,
|
||||
|
||||
pub fn format(self: CachedMetadata, writer: *std.Io.Writer) !void {
|
||||
try writer.print("url={s} | status={d} | content_type={s} | max_age={d} | vary=[", .{
|
||||
self.url,
|
||||
self.status,
|
||||
self.content_type,
|
||||
self.cache_control.max_age,
|
||||
});
|
||||
|
||||
// Logging all headers gets pretty verbose...
|
||||
// so we just log the Vary ones that matter for caching.
|
||||
|
||||
if (self.vary_headers.len > 0) {
|
||||
for (self.vary_headers, 0..) |hdr, i| {
|
||||
if (i > 0) try writer.print(", ", .{});
|
||||
try writer.print("{s}: {s}", .{ hdr.name, hdr.value });
|
||||
}
|
||||
}
|
||||
try writer.print("]", .{});
|
||||
}
|
||||
};
|
||||
|
||||
pub const CacheRequest = struct {
|
||||
url: []const u8,
|
||||
timestamp: i64,
|
||||
request_headers: []const Http.Header,
|
||||
};
|
||||
|
||||
pub const CachedData = union(enum) {
|
||||
buffer: []const u8,
|
||||
file: struct {
|
||||
file: std.fs.File,
|
||||
offset: usize,
|
||||
len: usize,
|
||||
},
|
||||
|
||||
pub fn format(self: CachedData, writer: *std.Io.Writer) !void {
|
||||
switch (self) {
|
||||
.buffer => |buf| try writer.print("buffer({d} bytes)", .{buf.len}),
|
||||
.file => |f| try writer.print("file(offset={d}, len={d} bytes)", .{ f.offset, f.len }),
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
pub const CachedResponse = struct {
|
||||
metadata: CachedMetadata,
|
||||
data: CachedData,
|
||||
|
||||
pub fn format(self: *const CachedResponse, writer: *std.Io.Writer) !void {
|
||||
try writer.print("metadata=(", .{});
|
||||
try self.metadata.format(writer);
|
||||
try writer.print("), data=", .{});
|
||||
try self.data.format(writer);
|
||||
}
|
||||
};
|
||||
|
||||
pub fn tryCache(
|
||||
arena: std.mem.Allocator,
|
||||
timestamp: i64,
|
||||
url: [:0]const u8,
|
||||
status: u16,
|
||||
content_type: ?[]const u8,
|
||||
cache_control: ?[]const u8,
|
||||
vary: ?[]const u8,
|
||||
age: ?[]const u8,
|
||||
has_set_cookie: bool,
|
||||
has_authorization: bool,
|
||||
) !?CachedMetadata {
|
||||
if (status != 200) {
|
||||
log.debug(.cache, "no store", .{ .url = url, .code = status, .reason = "status" });
|
||||
return null;
|
||||
}
|
||||
if (has_set_cookie) {
|
||||
log.debug(.cache, "no store", .{ .url = url, .reason = "has_cookies" });
|
||||
return null;
|
||||
}
|
||||
if (has_authorization) {
|
||||
log.debug(.cache, "no store", .{ .url = url, .reason = "has_authorization" });
|
||||
return null;
|
||||
}
|
||||
if (vary) |v| if (std.mem.eql(u8, v, "*")) {
|
||||
log.debug(.cache, "no store", .{ .url = url, .vary = v, .reason = "vary" });
|
||||
return null;
|
||||
};
|
||||
const cc = blk: {
|
||||
if (cache_control == null) {
|
||||
log.debug(.cache, "no store", .{ .url = url, .reason = "no cache control" });
|
||||
return null;
|
||||
}
|
||||
if (CacheControl.parse(cache_control.?)) |cc| {
|
||||
break :blk cc;
|
||||
}
|
||||
log.debug(.cache, "no store", .{ .url = url, .cache_control = cache_control.?, .reason = "cache control" });
|
||||
return null;
|
||||
};
|
||||
|
||||
return .{
|
||||
.url = try arena.dupeZ(u8, url),
|
||||
.content_type = if (content_type) |ct| try arena.dupe(u8, ct) else "application/octet-stream",
|
||||
.status = status,
|
||||
.stored_at = timestamp,
|
||||
.age_at_store = if (age) |a| std.fmt.parseInt(u64, a, 10) catch 0 else 0,
|
||||
.cache_control = cc,
|
||||
.headers = &.{},
|
||||
.vary_headers = &.{},
|
||||
};
|
||||
}
|
||||
612
src/network/cache/FsCache.zig
vendored
612
src/network/cache/FsCache.zig
vendored
@@ -1,612 +0,0 @@
|
||||
// Copyright (C) 2023-2026 Lightpanda (Selecy SAS)
|
||||
//
|
||||
// Francis Bouvier <francis@lightpanda.io>
|
||||
// Pierre Tachoire <pierre@lightpanda.io>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU Affero General Public License as
|
||||
// published by the Free Software Foundation, either version 3 of the
|
||||
// License, or (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU Affero General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU Affero General Public License
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
const std = @import("std");
|
||||
const log = @import("../../log.zig");
|
||||
const Cache = @import("Cache.zig");
|
||||
const Http = @import("../http.zig");
|
||||
const CacheRequest = Cache.CacheRequest;
|
||||
const CachedMetadata = Cache.CachedMetadata;
|
||||
const CachedResponse = Cache.CachedResponse;
|
||||
|
||||
const CACHE_VERSION: usize = 1;
|
||||
const LOCK_STRIPES = 16;
|
||||
comptime {
|
||||
std.debug.assert(std.math.isPowerOfTwo(LOCK_STRIPES));
|
||||
}
|
||||
|
||||
pub const FsCache = @This();
|
||||
|
||||
dir: std.fs.Dir,
|
||||
locks: [LOCK_STRIPES]std.Thread.Mutex = .{std.Thread.Mutex{}} ** LOCK_STRIPES,
|
||||
|
||||
const CacheMetadataJson = struct {
|
||||
version: usize,
|
||||
metadata: CachedMetadata,
|
||||
};
|
||||
|
||||
fn getLockPtr(self: *FsCache, key: *const [HASHED_KEY_LEN]u8) *std.Thread.Mutex {
|
||||
const lock_idx = std.hash.Wyhash.hash(0, key[0..]) & (LOCK_STRIPES - 1);
|
||||
return &self.locks[lock_idx];
|
||||
}
|
||||
|
||||
const BODY_LEN_HEADER_LEN = 8;
|
||||
const HASHED_KEY_LEN = 64;
|
||||
const HASHED_PATH_LEN = HASHED_KEY_LEN + 6;
|
||||
const HASHED_TMP_PATH_LEN = HASHED_PATH_LEN + 4;
|
||||
|
||||
fn hashKey(key: []const u8) [HASHED_KEY_LEN]u8 {
|
||||
var digest: [std.crypto.hash.sha2.Sha256.digest_length]u8 = undefined;
|
||||
std.crypto.hash.sha2.Sha256.hash(key, &digest, .{});
|
||||
var hex: [HASHED_KEY_LEN]u8 = undefined;
|
||||
_ = std.fmt.bufPrint(&hex, "{s}", .{std.fmt.bytesToHex(&digest, .lower)}) catch unreachable;
|
||||
return hex;
|
||||
}
|
||||
|
||||
fn cachePath(hashed_key: *const [HASHED_KEY_LEN]u8) [HASHED_PATH_LEN]u8 {
|
||||
var path: [HASHED_PATH_LEN]u8 = undefined;
|
||||
_ = std.fmt.bufPrint(&path, "{s}.cache", .{hashed_key}) catch unreachable;
|
||||
return path;
|
||||
}
|
||||
|
||||
fn cacheTmpPath(hashed_key: *const [HASHED_KEY_LEN]u8) [HASHED_TMP_PATH_LEN]u8 {
|
||||
var path: [HASHED_TMP_PATH_LEN]u8 = undefined;
|
||||
_ = std.fmt.bufPrint(&path, "{s}.cache.tmp", .{hashed_key}) catch unreachable;
|
||||
return path;
|
||||
}
|
||||
|
||||
pub fn init(path: []const u8) !FsCache {
|
||||
const cwd = std.fs.cwd();
|
||||
|
||||
cwd.makeDir(path) catch |err| switch (err) {
|
||||
error.PathAlreadyExists => {},
|
||||
else => return err,
|
||||
};
|
||||
|
||||
const dir = try cwd.openDir(path, .{ .iterate = true });
|
||||
return .{ .dir = dir };
|
||||
}
|
||||
|
||||
pub fn deinit(self: *FsCache) void {
|
||||
self.dir.close();
|
||||
}
|
||||
|
||||
pub fn get(self: *FsCache, arena: std.mem.Allocator, req: CacheRequest) ?Cache.CachedResponse {
|
||||
const hashed_key = hashKey(req.url);
|
||||
const cache_p = cachePath(&hashed_key);
|
||||
|
||||
const lock = self.getLockPtr(&hashed_key);
|
||||
lock.lock();
|
||||
defer lock.unlock();
|
||||
|
||||
const file = self.dir.openFile(&cache_p, .{ .mode = .read_only }) catch |e| {
|
||||
switch (e) {
|
||||
std.fs.File.OpenError.FileNotFound => {
|
||||
log.debug(.cache, "miss", .{ .url = req.url, .hash = &hashed_key, .reason = "missing" });
|
||||
},
|
||||
else => |err| {
|
||||
log.warn(.cache, "open file err", .{ .url = req.url, .err = err });
|
||||
},
|
||||
}
|
||||
return null;
|
||||
};
|
||||
|
||||
var cleanup = false;
|
||||
defer if (cleanup) {
|
||||
file.close();
|
||||
self.dir.deleteFile(&cache_p) catch |e| {
|
||||
log.err(.cache, "clean fail", .{ .url = req.url, .file = &cache_p, .err = e });
|
||||
};
|
||||
};
|
||||
|
||||
var file_buf: [1024]u8 = undefined;
|
||||
var len_buf: [BODY_LEN_HEADER_LEN]u8 = undefined;
|
||||
|
||||
var file_reader = file.reader(&file_buf);
|
||||
const file_reader_iface = &file_reader.interface;
|
||||
|
||||
file_reader_iface.readSliceAll(&len_buf) catch |e| {
|
||||
log.warn(.cache, "read header", .{ .url = req.url, .err = e });
|
||||
cleanup = true;
|
||||
return null;
|
||||
};
|
||||
const body_len = std.mem.readInt(u64, &len_buf, .little);
|
||||
|
||||
// Now we read metadata.
|
||||
file_reader.seekTo(body_len + BODY_LEN_HEADER_LEN) catch |e| {
|
||||
log.warn(.cache, "seek metadata", .{ .url = req.url, .err = e });
|
||||
cleanup = true;
|
||||
return null;
|
||||
};
|
||||
|
||||
var json_reader = std.json.Reader.init(arena, file_reader_iface);
|
||||
const cache_file: CacheMetadataJson = std.json.parseFromTokenSourceLeaky(
|
||||
CacheMetadataJson,
|
||||
arena,
|
||||
&json_reader,
|
||||
.{ .allocate = .alloc_always },
|
||||
) catch |e| {
|
||||
// Warn because malformed metadata can be a deeper symptom.
|
||||
log.warn(.cache, "miss", .{ .url = req.url, .err = e, .reason = "malformed metadata" });
|
||||
cleanup = true;
|
||||
return null;
|
||||
};
|
||||
|
||||
if (cache_file.version != CACHE_VERSION) {
|
||||
log.debug(.cache, "miss", .{
|
||||
.url = req.url,
|
||||
.reason = "version mismatch",
|
||||
.expected = CACHE_VERSION,
|
||||
.got = cache_file.version,
|
||||
});
|
||||
cleanup = true;
|
||||
return null;
|
||||
}
|
||||
|
||||
const metadata = cache_file.metadata;
|
||||
|
||||
// Check entry expiration.
|
||||
const now = req.timestamp;
|
||||
const age = (now - metadata.stored_at) + @as(i64, @intCast(metadata.age_at_store));
|
||||
if (age < 0 or @as(u64, @intCast(age)) >= metadata.cache_control.max_age) {
|
||||
log.debug(.cache, "miss", .{ .url = req.url, .reason = "expired" });
|
||||
cleanup = true;
|
||||
return null;
|
||||
}
|
||||
|
||||
// If we have Vary headers, ensure they are present & matching.
|
||||
for (metadata.vary_headers) |vary_hdr| {
|
||||
const name = vary_hdr.name;
|
||||
const value = vary_hdr.value;
|
||||
|
||||
const incoming = for (req.request_headers) |h| {
|
||||
if (std.ascii.eqlIgnoreCase(h.name, name)) break h.value;
|
||||
} else "";
|
||||
|
||||
if (!std.ascii.eqlIgnoreCase(value, incoming)) {
|
||||
log.debug(.cache, "miss", .{
|
||||
.url = req.url,
|
||||
.reason = "vary mismatch",
|
||||
.header = name,
|
||||
.expected = value,
|
||||
.got = incoming,
|
||||
});
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
// On the case of a hash collision.
|
||||
if (!std.ascii.eqlIgnoreCase(metadata.url, req.url)) {
|
||||
log.warn(.cache, "collision", .{ .url = req.url, .expected = metadata.url, .got = req.url });
|
||||
cleanup = true;
|
||||
return null;
|
||||
}
|
||||
|
||||
log.debug(.cache, "hit", .{ .url = req.url, .hash = &hashed_key });
|
||||
|
||||
return .{
|
||||
.metadata = metadata,
|
||||
.data = .{
|
||||
.file = .{
|
||||
.file = file,
|
||||
.offset = BODY_LEN_HEADER_LEN,
|
||||
.len = body_len,
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
pub fn put(self: *FsCache, meta: CachedMetadata, body: []const u8) !void {
|
||||
const hashed_key = hashKey(meta.url);
|
||||
const cache_p = cachePath(&hashed_key);
|
||||
const cache_tmp_p = cacheTmpPath(&hashed_key);
|
||||
|
||||
const lock = self.getLockPtr(&hashed_key);
|
||||
lock.lock();
|
||||
defer lock.unlock();
|
||||
|
||||
const file = self.dir.createFile(&cache_tmp_p, .{ .truncate = true }) catch |e| {
|
||||
log.err(.cache, "create file", .{ .url = meta.url, .file = &cache_tmp_p, .err = e });
|
||||
return e;
|
||||
};
|
||||
defer file.close();
|
||||
|
||||
var writer_buf: [1024]u8 = undefined;
|
||||
var file_writer = file.writer(&writer_buf);
|
||||
var file_writer_iface = &file_writer.interface;
|
||||
|
||||
var len_buf: [8]u8 = undefined;
|
||||
std.mem.writeInt(u64, &len_buf, body.len, .little);
|
||||
|
||||
file_writer_iface.writeAll(&len_buf) catch |e| {
|
||||
log.err(.cache, "write body len", .{ .url = meta.url, .err = e });
|
||||
return e;
|
||||
};
|
||||
file_writer_iface.writeAll(body) catch |e| {
|
||||
log.err(.cache, "write body", .{ .url = meta.url, .err = e });
|
||||
return e;
|
||||
};
|
||||
std.json.Stringify.value(
|
||||
CacheMetadataJson{ .version = CACHE_VERSION, .metadata = meta },
|
||||
.{ .whitespace = .minified },
|
||||
file_writer_iface,
|
||||
) catch |e| {
|
||||
log.err(.cache, "write metadata", .{ .url = meta.url, .err = e });
|
||||
return e;
|
||||
};
|
||||
file_writer_iface.flush() catch |e| {
|
||||
log.err(.cache, "flush", .{ .url = meta.url, .err = e });
|
||||
return e;
|
||||
};
|
||||
self.dir.rename(&cache_tmp_p, &cache_p) catch |e| {
|
||||
log.err(.cache, "rename", .{ .url = meta.url, .from = &cache_tmp_p, .to = &cache_p, .err = e });
|
||||
return e;
|
||||
};
|
||||
|
||||
log.debug(.cache, "put", .{ .url = meta.url, .hash = &hashed_key, .body_len = body.len });
|
||||
}
|
||||
|
||||
const testing = std.testing;
|
||||
|
||||
fn setupCache() !struct { tmp: testing.TmpDir, cache: Cache } {
|
||||
var tmp = testing.tmpDir(.{});
|
||||
errdefer tmp.cleanup();
|
||||
|
||||
const path = try tmp.dir.realpathAlloc(testing.allocator, ".");
|
||||
defer testing.allocator.free(path);
|
||||
|
||||
return .{
|
||||
.tmp = tmp,
|
||||
.cache = Cache{ .kind = .{ .fs = try FsCache.init(path) } },
|
||||
};
|
||||
}
|
||||
|
||||
test "FsCache: basic put and get" {
|
||||
var setup = try setupCache();
|
||||
defer {
|
||||
setup.cache.deinit();
|
||||
setup.tmp.cleanup();
|
||||
}
|
||||
|
||||
const cache = &setup.cache;
|
||||
|
||||
var arena = std.heap.ArenaAllocator.init(testing.allocator);
|
||||
defer arena.deinit();
|
||||
|
||||
const now = std.time.timestamp();
|
||||
const meta = CachedMetadata{
|
||||
.url = "https://example.com",
|
||||
.content_type = "text/html",
|
||||
.status = 200,
|
||||
.stored_at = now,
|
||||
.age_at_store = 0,
|
||||
.cache_control = .{ .max_age = 600 },
|
||||
.headers = &.{},
|
||||
.vary_headers = &.{},
|
||||
};
|
||||
|
||||
const body = "hello world";
|
||||
try cache.put(meta, body);
|
||||
|
||||
const result = cache.get(
|
||||
arena.allocator(),
|
||||
.{
|
||||
.url = "https://example.com",
|
||||
.timestamp = now,
|
||||
.request_headers = &.{},
|
||||
},
|
||||
) orelse return error.CacheMiss;
|
||||
const f = result.data.file;
|
||||
const file = f.file;
|
||||
defer file.close();
|
||||
|
||||
var buf: [64]u8 = undefined;
|
||||
var file_reader = file.reader(&buf);
|
||||
try file_reader.seekTo(f.offset);
|
||||
|
||||
const read_buf = try file_reader.interface.readAlloc(testing.allocator, f.len);
|
||||
defer testing.allocator.free(read_buf);
|
||||
try testing.expectEqualStrings(body, read_buf);
|
||||
}
|
||||
|
||||
test "FsCache: get expiration" {
|
||||
var setup = try setupCache();
|
||||
defer {
|
||||
setup.cache.deinit();
|
||||
setup.tmp.cleanup();
|
||||
}
|
||||
|
||||
const cache = &setup.cache;
|
||||
|
||||
var arena = std.heap.ArenaAllocator.init(testing.allocator);
|
||||
defer arena.deinit();
|
||||
|
||||
const now = 5000;
|
||||
const max_age = 1000;
|
||||
|
||||
const meta = CachedMetadata{
|
||||
.url = "https://example.com",
|
||||
.content_type = "text/html",
|
||||
.status = 200,
|
||||
.stored_at = now,
|
||||
.age_at_store = 900,
|
||||
.cache_control = .{ .max_age = max_age },
|
||||
.headers = &.{},
|
||||
.vary_headers = &.{},
|
||||
};
|
||||
|
||||
const body = "hello world";
|
||||
try cache.put(meta, body);
|
||||
|
||||
const result = cache.get(
|
||||
arena.allocator(),
|
||||
.{
|
||||
.url = "https://example.com",
|
||||
.timestamp = now + 50,
|
||||
.request_headers = &.{},
|
||||
},
|
||||
) orelse return error.CacheMiss;
|
||||
result.data.file.file.close();
|
||||
|
||||
try testing.expectEqual(null, cache.get(
|
||||
arena.allocator(),
|
||||
.{
|
||||
.url = "https://example.com",
|
||||
.timestamp = now + 200,
|
||||
.request_headers = &.{},
|
||||
},
|
||||
));
|
||||
|
||||
try testing.expectEqual(null, cache.get(
|
||||
arena.allocator(),
|
||||
.{
|
||||
.url = "https://example.com",
|
||||
.timestamp = now,
|
||||
.request_headers = &.{},
|
||||
},
|
||||
));
|
||||
}
|
||||
|
||||
test "FsCache: put override" {
|
||||
var setup = try setupCache();
|
||||
defer {
|
||||
setup.cache.deinit();
|
||||
setup.tmp.cleanup();
|
||||
}
|
||||
|
||||
const cache = &setup.cache;
|
||||
|
||||
var arena = std.heap.ArenaAllocator.init(testing.allocator);
|
||||
defer arena.deinit();
|
||||
|
||||
{
|
||||
const now = 5000;
|
||||
const max_age = 1000;
|
||||
|
||||
const meta = CachedMetadata{
|
||||
.url = "https://example.com",
|
||||
.content_type = "text/html",
|
||||
.status = 200,
|
||||
.stored_at = now,
|
||||
.age_at_store = 900,
|
||||
.cache_control = .{ .max_age = max_age },
|
||||
.headers = &.{},
|
||||
.vary_headers = &.{},
|
||||
};
|
||||
|
||||
const body = "hello world";
|
||||
try cache.put(meta, body);
|
||||
|
||||
const result = cache.get(
|
||||
arena.allocator(),
|
||||
.{
|
||||
.url = "https://example.com",
|
||||
.timestamp = now,
|
||||
.request_headers = &.{},
|
||||
},
|
||||
) orelse return error.CacheMiss;
|
||||
const f = result.data.file;
|
||||
const file = f.file;
|
||||
defer file.close();
|
||||
|
||||
var buf: [64]u8 = undefined;
|
||||
var file_reader = file.reader(&buf);
|
||||
try file_reader.seekTo(f.offset);
|
||||
|
||||
const read_buf = try file_reader.interface.readAlloc(testing.allocator, f.len);
|
||||
defer testing.allocator.free(read_buf);
|
||||
|
||||
try testing.expectEqualStrings(body, read_buf);
|
||||
}
|
||||
|
||||
{
|
||||
const now = 10000;
|
||||
const max_age = 2000;
|
||||
|
||||
const meta = CachedMetadata{
|
||||
.url = "https://example.com",
|
||||
.content_type = "text/html",
|
||||
.status = 200,
|
||||
.stored_at = now,
|
||||
.age_at_store = 0,
|
||||
.cache_control = .{ .max_age = max_age },
|
||||
.headers = &.{},
|
||||
.vary_headers = &.{},
|
||||
};
|
||||
|
||||
const body = "goodbye world";
|
||||
try cache.put(meta, body);
|
||||
|
||||
const result = cache.get(
|
||||
arena.allocator(),
|
||||
.{
|
||||
.url = "https://example.com",
|
||||
.timestamp = now,
|
||||
.request_headers = &.{},
|
||||
},
|
||||
) orelse return error.CacheMiss;
|
||||
const f = result.data.file;
|
||||
const file = f.file;
|
||||
defer file.close();
|
||||
|
||||
var buf: [64]u8 = undefined;
|
||||
var file_reader = file.reader(&buf);
|
||||
try file_reader.seekTo(f.offset);
|
||||
|
||||
const read_buf = try file_reader.interface.readAlloc(testing.allocator, f.len);
|
||||
defer testing.allocator.free(read_buf);
|
||||
|
||||
try testing.expectEqualStrings(body, read_buf);
|
||||
}
|
||||
}
|
||||
|
||||
test "FsCache: garbage file" {
|
||||
var setup = try setupCache();
|
||||
defer {
|
||||
setup.cache.deinit();
|
||||
setup.tmp.cleanup();
|
||||
}
|
||||
|
||||
const hashed_key = hashKey("https://example.com");
|
||||
const cache_p = cachePath(&hashed_key);
|
||||
const file = try setup.cache.kind.fs.dir.createFile(&cache_p, .{});
|
||||
try file.writeAll("this is not a valid cache file !@#$%");
|
||||
file.close();
|
||||
|
||||
var arena = std.heap.ArenaAllocator.init(testing.allocator);
|
||||
defer arena.deinit();
|
||||
|
||||
try testing.expectEqual(
|
||||
null,
|
||||
setup.cache.get(arena.allocator(), .{
|
||||
.url = "https://example.com",
|
||||
.timestamp = 5000,
|
||||
.request_headers = &.{},
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
test "FsCache: vary hit and miss" {
|
||||
var setup = try setupCache();
|
||||
defer {
|
||||
setup.cache.deinit();
|
||||
setup.tmp.cleanup();
|
||||
}
|
||||
|
||||
const cache = &setup.cache;
|
||||
|
||||
var arena = std.heap.ArenaAllocator.init(testing.allocator);
|
||||
defer arena.deinit();
|
||||
|
||||
const now = std.time.timestamp();
|
||||
const meta = CachedMetadata{
|
||||
.url = "https://example.com",
|
||||
.content_type = "text/html",
|
||||
.status = 200,
|
||||
.stored_at = now,
|
||||
.age_at_store = 0,
|
||||
.cache_control = .{ .max_age = 600 },
|
||||
.headers = &.{},
|
||||
.vary_headers = &.{
|
||||
.{ .name = "Accept-Encoding", .value = "gzip" },
|
||||
},
|
||||
};
|
||||
|
||||
try cache.put(meta, "hello world");
|
||||
|
||||
const result = cache.get(arena.allocator(), .{
|
||||
.url = "https://example.com",
|
||||
.timestamp = now,
|
||||
.request_headers = &.{
|
||||
.{ .name = "Accept-Encoding", .value = "gzip" },
|
||||
},
|
||||
}) orelse return error.CacheMiss;
|
||||
result.data.file.file.close();
|
||||
|
||||
try testing.expectEqual(null, cache.get(arena.allocator(), .{
|
||||
.url = "https://example.com",
|
||||
.timestamp = now,
|
||||
.request_headers = &.{
|
||||
.{ .name = "Accept-Encoding", .value = "br" },
|
||||
},
|
||||
}));
|
||||
|
||||
try testing.expectEqual(null, cache.get(arena.allocator(), .{
|
||||
.url = "https://example.com",
|
||||
.timestamp = now,
|
||||
.request_headers = &.{},
|
||||
}));
|
||||
|
||||
const result2 = cache.get(arena.allocator(), .{
|
||||
.url = "https://example.com",
|
||||
.timestamp = now,
|
||||
.request_headers = &.{
|
||||
.{ .name = "Accept-Encoding", .value = "gzip" },
|
||||
},
|
||||
}) orelse return error.CacheMiss;
|
||||
result2.data.file.file.close();
|
||||
}
|
||||
|
||||
test "FsCache: vary multiple headers" {
|
||||
var setup = try setupCache();
|
||||
defer {
|
||||
setup.cache.deinit();
|
||||
setup.tmp.cleanup();
|
||||
}
|
||||
|
||||
const cache = &setup.cache;
|
||||
|
||||
var arena = std.heap.ArenaAllocator.init(testing.allocator);
|
||||
defer arena.deinit();
|
||||
|
||||
const now = std.time.timestamp();
|
||||
const meta = CachedMetadata{
|
||||
.url = "https://example.com",
|
||||
.content_type = "text/html",
|
||||
.status = 200,
|
||||
.stored_at = now,
|
||||
.age_at_store = 0,
|
||||
.cache_control = .{ .max_age = 600 },
|
||||
.headers = &.{},
|
||||
.vary_headers = &.{
|
||||
.{ .name = "Accept-Encoding", .value = "gzip" },
|
||||
.{ .name = "Accept-Language", .value = "en" },
|
||||
},
|
||||
};
|
||||
|
||||
try cache.put(meta, "hello world");
|
||||
|
||||
const result = cache.get(arena.allocator(), .{
|
||||
.url = "https://example.com",
|
||||
.timestamp = now,
|
||||
.request_headers = &.{
|
||||
.{ .name = "Accept-Encoding", .value = "gzip" },
|
||||
.{ .name = "Accept-Language", .value = "en" },
|
||||
},
|
||||
}) orelse return error.CacheMiss;
|
||||
result.data.file.file.close();
|
||||
|
||||
try testing.expectEqual(null, cache.get(arena.allocator(), .{
|
||||
.url = "https://example.com",
|
||||
.timestamp = now,
|
||||
.request_headers = &.{
|
||||
.{ .name = "Accept-Encoding", .value = "gzip" },
|
||||
.{ .name = "Accept-Language", .value = "fr" },
|
||||
},
|
||||
}));
|
||||
}
|
||||
@@ -79,7 +79,7 @@ pub const Headers = struct {
|
||||
self.headers = updated_headers;
|
||||
}
|
||||
|
||||
pub fn parseHeader(header_str: []const u8) ?Header {
|
||||
fn parseHeader(header_str: []const u8) ?Header {
|
||||
const colon_pos = std.mem.indexOfScalar(u8, header_str, ':') orelse return null;
|
||||
|
||||
const name = std.mem.trim(u8, header_str[0..colon_pos], " \t");
|
||||
@@ -88,9 +88,22 @@ pub const Headers = struct {
|
||||
return .{ .name = name, .value = value };
|
||||
}
|
||||
|
||||
pub fn iterator(self: Headers) HeaderIterator {
|
||||
return .{ .curl_slist = .{ .header = self.headers } };
|
||||
pub fn iterator(self: *Headers) Iterator {
|
||||
return .{
|
||||
.header = self.headers,
|
||||
};
|
||||
}
|
||||
|
||||
const Iterator = struct {
|
||||
header: [*c]libcurl.CurlSList,
|
||||
|
||||
pub fn next(self: *Iterator) ?Header {
|
||||
const h = self.header orelse return null;
|
||||
|
||||
self.header = h.*.next;
|
||||
return parseHeader(std.mem.span(@as([*:0]const u8, @ptrCast(h.*.data))));
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
// In normal cases, the header iterator comes from the curl linked list.
|
||||
@@ -99,7 +112,6 @@ pub const Headers = struct {
|
||||
// This union, is an iterator that exposes the same API for either case.
|
||||
pub const HeaderIterator = union(enum) {
|
||||
curl: CurlHeaderIterator,
|
||||
curl_slist: CurlSListIterator,
|
||||
list: ListHeaderIterator,
|
||||
|
||||
pub fn next(self: *HeaderIterator) ?Header {
|
||||
@@ -108,19 +120,6 @@ pub const HeaderIterator = union(enum) {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn collect(self: *HeaderIterator, allocator: std.mem.Allocator) !std.ArrayList(Header) {
|
||||
var list: std.ArrayList(Header) = .empty;
|
||||
|
||||
while (self.next()) |hdr| {
|
||||
try list.append(allocator, .{
|
||||
.name = try allocator.dupe(u8, hdr.name),
|
||||
.value = try allocator.dupe(u8, hdr.value),
|
||||
});
|
||||
}
|
||||
|
||||
return list;
|
||||
}
|
||||
|
||||
const CurlHeaderIterator = struct {
|
||||
conn: *const Connection,
|
||||
prev: ?*libcurl.CurlHeader = null,
|
||||
@@ -137,16 +136,6 @@ pub const HeaderIterator = union(enum) {
|
||||
}
|
||||
};
|
||||
|
||||
const CurlSListIterator = struct {
|
||||
header: [*c]libcurl.CurlSList,
|
||||
|
||||
pub fn next(self: *CurlSListIterator) ?Header {
|
||||
const h = self.header orelse return null;
|
||||
self.header = h.*.next;
|
||||
return Headers.parseHeader(std.mem.span(@as([*:0]const u8, @ptrCast(h.*.data))));
|
||||
}
|
||||
};
|
||||
|
||||
const ListHeaderIterator = struct {
|
||||
index: usize = 0,
|
||||
list: []const Header,
|
||||
|
||||
Reference in New Issue
Block a user