mirror of
https://github.com/lightpanda-io/browser.git
synced 2026-03-24 05:33:16 +00:00
Merge branch 'main' into semantic-tree
This commit is contained in:
2
.github/actions/install/action.yml
vendored
2
.github/actions/install/action.yml
vendored
@@ -13,7 +13,7 @@ inputs:
|
||||
zig-v8:
|
||||
description: 'zig v8 version to install'
|
||||
required: false
|
||||
default: 'v0.3.1'
|
||||
default: 'v0.3.2'
|
||||
v8:
|
||||
description: 'v8 version to install'
|
||||
required: false
|
||||
|
||||
@@ -3,7 +3,7 @@ FROM debian:stable-slim
|
||||
ARG MINISIG=0.12
|
||||
ARG ZIG_MINISIG=RWSGOq2NVecA2UPNdBUZykf1CCb147pkmdtYxgb3Ti+JO/wCYvhbAb/U
|
||||
ARG V8=14.0.365.4
|
||||
ARG ZIG_V8=v0.3.1
|
||||
ARG ZIG_V8=v0.3.2
|
||||
ARG TARGETPLATFORM
|
||||
|
||||
RUN apt-get update -yq && \
|
||||
|
||||
23
build.zig
23
build.zig
@@ -64,7 +64,7 @@ pub fn build(b: *Build) !void {
|
||||
b.default_step.dependOn(fmt_step);
|
||||
|
||||
try linkV8(b, mod, enable_asan, enable_tsan, prebuilt_v8_path);
|
||||
try linkCurl(b, mod);
|
||||
try linkCurl(b, mod, enable_tsan);
|
||||
try linkHtml5Ever(b, mod);
|
||||
|
||||
break :blk mod;
|
||||
@@ -200,19 +200,19 @@ fn linkHtml5Ever(b: *Build, mod: *Build.Module) !void {
|
||||
mod.addObjectFile(obj);
|
||||
}
|
||||
|
||||
fn linkCurl(b: *Build, mod: *Build.Module) !void {
|
||||
fn linkCurl(b: *Build, mod: *Build.Module, is_tsan: bool) !void {
|
||||
const target = mod.resolved_target.?;
|
||||
|
||||
const curl = buildCurl(b, target, mod.optimize.?);
|
||||
const curl = buildCurl(b, target, mod.optimize.?, is_tsan);
|
||||
mod.linkLibrary(curl);
|
||||
|
||||
const zlib = buildZlib(b, target, mod.optimize.?);
|
||||
const zlib = buildZlib(b, target, mod.optimize.?, is_tsan);
|
||||
curl.root_module.linkLibrary(zlib);
|
||||
|
||||
const brotli = buildBrotli(b, target, mod.optimize.?);
|
||||
const brotli = buildBrotli(b, target, mod.optimize.?, is_tsan);
|
||||
for (brotli) |lib| curl.root_module.linkLibrary(lib);
|
||||
|
||||
const nghttp2 = buildNghttp2(b, target, mod.optimize.?);
|
||||
const nghttp2 = buildNghttp2(b, target, mod.optimize.?, is_tsan);
|
||||
curl.root_module.linkLibrary(nghttp2);
|
||||
|
||||
const boringssl = buildBoringSsl(b, target, mod.optimize.?);
|
||||
@@ -229,13 +229,14 @@ fn linkCurl(b: *Build, mod: *Build.Module) !void {
|
||||
}
|
||||
}
|
||||
|
||||
fn buildZlib(b: *Build, target: Build.ResolvedTarget, optimize: std.builtin.OptimizeMode) *Build.Step.Compile {
|
||||
fn buildZlib(b: *Build, target: Build.ResolvedTarget, optimize: std.builtin.OptimizeMode, is_tsan: bool) *Build.Step.Compile {
|
||||
const dep = b.dependency("zlib", .{});
|
||||
|
||||
const mod = b.createModule(.{
|
||||
.target = target,
|
||||
.optimize = optimize,
|
||||
.link_libc = true,
|
||||
.sanitize_thread = is_tsan,
|
||||
});
|
||||
|
||||
const lib = b.addLibrary(.{ .name = "z", .root_module = mod });
|
||||
@@ -260,13 +261,14 @@ fn buildZlib(b: *Build, target: Build.ResolvedTarget, optimize: std.builtin.Opti
|
||||
return lib;
|
||||
}
|
||||
|
||||
fn buildBrotli(b: *Build, target: Build.ResolvedTarget, optimize: std.builtin.OptimizeMode) [3]*Build.Step.Compile {
|
||||
fn buildBrotli(b: *Build, target: Build.ResolvedTarget, optimize: std.builtin.OptimizeMode, is_tsan: bool) [3]*Build.Step.Compile {
|
||||
const dep = b.dependency("brotli", .{});
|
||||
|
||||
const mod = b.createModule(.{
|
||||
.target = target,
|
||||
.optimize = optimize,
|
||||
.link_libc = true,
|
||||
.sanitize_thread = is_tsan,
|
||||
});
|
||||
mod.addIncludePath(dep.path("c/include"));
|
||||
|
||||
@@ -322,13 +324,14 @@ fn buildBoringSsl(b: *Build, target: Build.ResolvedTarget, optimize: std.builtin
|
||||
return .{ ssl, crypto };
|
||||
}
|
||||
|
||||
fn buildNghttp2(b: *Build, target: Build.ResolvedTarget, optimize: std.builtin.OptimizeMode) *Build.Step.Compile {
|
||||
fn buildNghttp2(b: *Build, target: Build.ResolvedTarget, optimize: std.builtin.OptimizeMode, is_tsan: bool) *Build.Step.Compile {
|
||||
const dep = b.dependency("nghttp2", .{});
|
||||
|
||||
const mod = b.createModule(.{
|
||||
.target = target,
|
||||
.optimize = optimize,
|
||||
.link_libc = true,
|
||||
.sanitize_thread = is_tsan,
|
||||
});
|
||||
mod.addIncludePath(dep.path("lib/includes"));
|
||||
|
||||
@@ -373,6 +376,7 @@ fn buildCurl(
|
||||
b: *Build,
|
||||
target: Build.ResolvedTarget,
|
||||
optimize: std.builtin.OptimizeMode,
|
||||
is_tsan: bool,
|
||||
) *Build.Step.Compile {
|
||||
const dep = b.dependency("curl", .{});
|
||||
|
||||
@@ -380,6 +384,7 @@ fn buildCurl(
|
||||
.target = target,
|
||||
.optimize = optimize,
|
||||
.link_libc = true,
|
||||
.sanitize_thread = is_tsan,
|
||||
});
|
||||
mod.addIncludePath(dep.path("lib"));
|
||||
mod.addIncludePath(dep.path("include"));
|
||||
|
||||
@@ -5,10 +5,10 @@
|
||||
.minimum_zig_version = "0.15.2",
|
||||
.dependencies = .{
|
||||
.v8 = .{
|
||||
.url = "https://github.com/lightpanda-io/zig-v8-fork/archive/refs/tags/v0.3.1.tar.gz",
|
||||
.hash = "v8-0.0.0-xddH64J7BAC81mkf6G9RbEJxS-W3TIRl5iFnShwbqCqy",
|
||||
.url = "https://github.com/lightpanda-io/zig-v8-fork/archive/refs/tags/v0.3.2.tar.gz",
|
||||
.hash = "v8-0.0.0-xddH6wx-BABNgL7YIDgbnFgKZuXZ68yZNngNSrV6OjrY",
|
||||
},
|
||||
//.v8 = .{ .path = "../zig-v8-fork" },
|
||||
// .v8 = .{ .path = "../zig-v8-fork" },
|
||||
.brotli = .{
|
||||
// v1.2.0
|
||||
.url = "https://github.com/google/brotli/archive/028fb5a23661f123017c060daa546b55cf4bde29.tar.gz",
|
||||
|
||||
25
src/App.zig
25
src/App.zig
@@ -25,23 +25,20 @@ const Config = @import("Config.zig");
|
||||
const Snapshot = @import("browser/js/Snapshot.zig");
|
||||
const Platform = @import("browser/js/Platform.zig");
|
||||
const Telemetry = @import("telemetry/telemetry.zig").Telemetry;
|
||||
const RobotStore = @import("browser/Robots.zig").RobotStore;
|
||||
|
||||
pub const Http = @import("http/Http.zig");
|
||||
const Network = @import("network/Runtime.zig");
|
||||
pub const ArenaPool = @import("ArenaPool.zig");
|
||||
|
||||
const App = @This();
|
||||
|
||||
http: Http,
|
||||
network: Network,
|
||||
config: *const Config,
|
||||
platform: Platform,
|
||||
snapshot: Snapshot,
|
||||
telemetry: Telemetry,
|
||||
allocator: Allocator,
|
||||
arena_pool: ArenaPool,
|
||||
robots: RobotStore,
|
||||
app_dir_path: ?[]const u8,
|
||||
shutdown: bool = false,
|
||||
|
||||
pub fn init(allocator: Allocator, config: *const Config) !*App {
|
||||
const app = try allocator.create(App);
|
||||
@@ -50,8 +47,7 @@ pub fn init(allocator: Allocator, config: *const Config) !*App {
|
||||
app.* = .{
|
||||
.config = config,
|
||||
.allocator = allocator,
|
||||
.robots = RobotStore.init(allocator),
|
||||
.http = undefined,
|
||||
.network = undefined,
|
||||
.platform = undefined,
|
||||
.snapshot = undefined,
|
||||
.app_dir_path = undefined,
|
||||
@@ -59,8 +55,8 @@ pub fn init(allocator: Allocator, config: *const Config) !*App {
|
||||
.arena_pool = undefined,
|
||||
};
|
||||
|
||||
app.http = try Http.init(allocator, &app.robots, config);
|
||||
errdefer app.http.deinit();
|
||||
app.network = try Network.init(allocator, config);
|
||||
errdefer app.network.deinit();
|
||||
|
||||
app.platform = try Platform.init();
|
||||
errdefer app.platform.deinit();
|
||||
@@ -79,19 +75,18 @@ pub fn init(allocator: Allocator, config: *const Config) !*App {
|
||||
return app;
|
||||
}
|
||||
|
||||
pub fn deinit(self: *App) void {
|
||||
if (@atomicRmw(bool, &self.shutdown, .Xchg, true, .monotonic)) {
|
||||
return;
|
||||
}
|
||||
pub fn shutdown(self: *const App) bool {
|
||||
return self.network.shutdown.load(.acquire);
|
||||
}
|
||||
|
||||
pub fn deinit(self: *App) void {
|
||||
const allocator = self.allocator;
|
||||
if (self.app_dir_path) |app_dir_path| {
|
||||
allocator.free(app_dir_path);
|
||||
self.app_dir_path = null;
|
||||
}
|
||||
self.telemetry.deinit();
|
||||
self.robots.deinit();
|
||||
self.http.deinit();
|
||||
self.network.deinit();
|
||||
self.snapshot.deinit();
|
||||
self.platform.deinit();
|
||||
self.arena_pool.deinit();
|
||||
|
||||
@@ -31,6 +31,7 @@ pub const RunMode = enum {
|
||||
mcp,
|
||||
};
|
||||
|
||||
pub const MAX_LISTENERS = 16;
|
||||
pub const CDP_MAX_HTTP_REQUEST_SIZE = 4096;
|
||||
|
||||
// max message size
|
||||
@@ -153,6 +154,13 @@ pub fn userAgentSuffix(self: *const Config) ?[]const u8 {
|
||||
};
|
||||
}
|
||||
|
||||
pub fn cdpTimeout(self: *const Config) usize {
|
||||
return switch (self.mode) {
|
||||
.serve => |opts| if (opts.timeout > 604_800) 604_800_000 else @as(usize, opts.timeout) * 1000,
|
||||
else => unreachable,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn maxConnections(self: *const Config) u16 {
|
||||
return switch (self.mode) {
|
||||
.serve => |opts| opts.cdp_max_connections,
|
||||
|
||||
@@ -21,7 +21,7 @@ const lp = @import("lightpanda");
|
||||
|
||||
const log = @import("log.zig");
|
||||
const Page = @import("browser/Page.zig");
|
||||
const Transfer = @import("http/Client.zig").Transfer;
|
||||
const Transfer = @import("browser/HttpClient.zig").Transfer;
|
||||
|
||||
const Allocator = std.mem.Allocator;
|
||||
|
||||
|
||||
114
src/Server.zig
114
src/Server.zig
@@ -18,8 +18,6 @@
|
||||
|
||||
const std = @import("std");
|
||||
const lp = @import("lightpanda");
|
||||
const builtin = @import("builtin");
|
||||
|
||||
const net = std.net;
|
||||
const posix = std.posix;
|
||||
|
||||
@@ -30,16 +28,13 @@ const log = @import("log.zig");
|
||||
const App = @import("App.zig");
|
||||
const Config = @import("Config.zig");
|
||||
const CDP = @import("cdp/cdp.zig").CDP;
|
||||
const Net = @import("Net.zig");
|
||||
const Http = @import("http/Http.zig");
|
||||
const HttpClient = @import("http/Client.zig");
|
||||
const Net = @import("network/websocket.zig");
|
||||
const HttpClient = @import("browser/HttpClient.zig");
|
||||
|
||||
const Server = @This();
|
||||
|
||||
app: *App,
|
||||
shutdown: std.atomic.Value(bool) = .init(false),
|
||||
allocator: Allocator,
|
||||
listener: ?posix.socket_t,
|
||||
json_version_response: []const u8,
|
||||
|
||||
// Thread management
|
||||
@@ -48,103 +43,52 @@ clients: std.ArrayList(*Client) = .{},
|
||||
client_mutex: std.Thread.Mutex = .{},
|
||||
clients_pool: std.heap.MemoryPool(Client),
|
||||
|
||||
pub fn init(app: *App, address: net.Address) !Server {
|
||||
pub fn init(app: *App, address: net.Address) !*Server {
|
||||
const allocator = app.allocator;
|
||||
const json_version_response = try buildJSONVersionResponse(allocator, address);
|
||||
errdefer allocator.free(json_version_response);
|
||||
|
||||
return .{
|
||||
const self = try allocator.create(Server);
|
||||
errdefer allocator.destroy(self);
|
||||
|
||||
self.* = .{
|
||||
.app = app,
|
||||
.listener = null,
|
||||
.allocator = allocator,
|
||||
.json_version_response = json_version_response,
|
||||
.clients_pool = std.heap.MemoryPool(Client).init(app.allocator),
|
||||
.clients_pool = std.heap.MemoryPool(Client).init(allocator),
|
||||
};
|
||||
|
||||
try self.app.network.bind(address, self, onAccept);
|
||||
log.info(.app, "server running", .{ .address = address });
|
||||
|
||||
return self;
|
||||
}
|
||||
|
||||
/// Interrupts the server so that main can complete normally and call all defer handlers.
|
||||
pub fn stop(self: *Server) void {
|
||||
if (self.shutdown.swap(true, .release)) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Shutdown all active clients
|
||||
pub fn deinit(self: *Server) void {
|
||||
// Stop all active clients
|
||||
{
|
||||
self.client_mutex.lock();
|
||||
defer self.client_mutex.unlock();
|
||||
|
||||
for (self.clients.items) |client| {
|
||||
client.stop();
|
||||
}
|
||||
}
|
||||
|
||||
// Linux and BSD/macOS handle canceling a socket blocked on accept differently.
|
||||
// For Linux, we use std.shutdown, which will cause accept to return error.SocketNotListening (EINVAL).
|
||||
// For BSD, shutdown will return an error. Instead we call posix.close, which will result with error.ConnectionAborted (BADF).
|
||||
if (self.listener) |listener| switch (builtin.target.os.tag) {
|
||||
.linux => posix.shutdown(listener, .recv) catch |err| {
|
||||
log.warn(.app, "listener shutdown", .{ .err = err });
|
||||
},
|
||||
.macos, .freebsd, .netbsd, .openbsd => {
|
||||
self.listener = null;
|
||||
posix.close(listener);
|
||||
},
|
||||
else => unreachable,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn deinit(self: *Server) void {
|
||||
if (!self.shutdown.load(.acquire)) {
|
||||
self.stop();
|
||||
}
|
||||
|
||||
self.joinThreads();
|
||||
if (self.listener) |listener| {
|
||||
posix.close(listener);
|
||||
self.listener = null;
|
||||
}
|
||||
self.clients.deinit(self.allocator);
|
||||
self.clients_pool.deinit();
|
||||
self.allocator.free(self.json_version_response);
|
||||
self.allocator.destroy(self);
|
||||
}
|
||||
|
||||
pub fn run(self: *Server, address: net.Address, timeout_ms: u32) !void {
|
||||
const flags = posix.SOCK.STREAM | posix.SOCK.CLOEXEC | posix.SOCK.NONBLOCK;
|
||||
const listener = try posix.socket(address.any.family, flags, posix.IPPROTO.TCP);
|
||||
self.listener = listener;
|
||||
|
||||
try posix.setsockopt(listener, posix.SOL.SOCKET, posix.SO.REUSEADDR, &std.mem.toBytes(@as(c_int, 1)));
|
||||
if (@hasDecl(posix.TCP, "NODELAY")) {
|
||||
try posix.setsockopt(listener, posix.IPPROTO.TCP, posix.TCP.NODELAY, &std.mem.toBytes(@as(c_int, 1)));
|
||||
}
|
||||
|
||||
try posix.bind(listener, &address.any, address.getOsSockLen());
|
||||
try posix.listen(listener, self.app.config.maxPendingConnections());
|
||||
|
||||
log.info(.app, "server running", .{ .address = address });
|
||||
while (!self.shutdown.load(.acquire)) {
|
||||
const socket = posix.accept(listener, null, null, posix.SOCK.NONBLOCK) catch |err| {
|
||||
switch (err) {
|
||||
error.SocketNotListening, error.ConnectionAborted => {
|
||||
log.info(.app, "server stopped", .{});
|
||||
break;
|
||||
},
|
||||
error.WouldBlock => {
|
||||
std.Thread.sleep(10 * std.time.ns_per_ms);
|
||||
continue;
|
||||
},
|
||||
else => {
|
||||
log.err(.app, "CDP accept", .{ .err = err });
|
||||
std.Thread.sleep(std.time.ns_per_s);
|
||||
continue;
|
||||
},
|
||||
}
|
||||
};
|
||||
|
||||
self.spawnWorker(socket, timeout_ms) catch |err| {
|
||||
log.err(.app, "CDP spawn", .{ .err = err });
|
||||
posix.close(socket);
|
||||
};
|
||||
}
|
||||
fn onAccept(ctx: *anyopaque, socket: posix.socket_t) void {
|
||||
const self: *Server = @ptrCast(@alignCast(ctx));
|
||||
const timeout_ms: u32 = @intCast(self.app.config.cdpTimeout());
|
||||
self.spawnWorker(socket, timeout_ms) catch |err| {
|
||||
log.err(.app, "CDP spawn", .{ .err = err });
|
||||
posix.close(socket);
|
||||
};
|
||||
}
|
||||
|
||||
fn handleConnection(self: *Server, socket: posix.socket_t, timeout_ms: u32) void {
|
||||
@@ -173,10 +117,10 @@ fn handleConnection(self: *Server, socket: posix.socket_t, timeout_ms: u32) void
|
||||
self.registerClient(client);
|
||||
defer self.unregisterClient(client);
|
||||
|
||||
// Check shutdown after registering to avoid missing stop() signal.
|
||||
// If stop() already iterated over clients, this client won't receive stop()
|
||||
// Check shutdown after registering to avoid missing the stop signal.
|
||||
// If deinit() already iterated over clients, this client won't receive stop()
|
||||
// and would block joinThreads() indefinitely.
|
||||
if (self.shutdown.load(.acquire)) {
|
||||
if (self.app.shutdown()) {
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -213,7 +157,7 @@ fn unregisterClient(self: *Server, client: *Client) void {
|
||||
}
|
||||
|
||||
fn spawnWorker(self: *Server, socket: posix.socket_t, timeout_ms: u32) !void {
|
||||
if (self.shutdown.load(.acquire)) {
|
||||
if (self.app.shutdown()) {
|
||||
return error.ShuttingDown;
|
||||
}
|
||||
|
||||
@@ -283,7 +227,7 @@ pub const Client = struct {
|
||||
log.info(.app, "client connected", .{ .ip = client_address });
|
||||
}
|
||||
|
||||
const http = try app.http.createClient(allocator);
|
||||
const http = try HttpClient.init(allocator, &app.network);
|
||||
errdefer http.deinit();
|
||||
|
||||
return .{
|
||||
|
||||
@@ -24,7 +24,7 @@ const ArenaAllocator = std.heap.ArenaAllocator;
|
||||
const js = @import("js/js.zig");
|
||||
const log = @import("../log.zig");
|
||||
const App = @import("../App.zig");
|
||||
const HttpClient = @import("../http/Client.zig");
|
||||
const HttpClient = @import("HttpClient.zig");
|
||||
|
||||
const ArenaPool = App.ArenaPool;
|
||||
|
||||
|
||||
@@ -205,7 +205,7 @@ pub fn dispatch(self: *EventManager, target: *EventTarget, event: *Event) Dispat
|
||||
|
||||
pub fn dispatchOpts(self: *EventManager, target: *EventTarget, event: *Event, comptime opts: DispatchOpts) DispatchError!void {
|
||||
event.acquireRef();
|
||||
defer event.deinit(false, self.page);
|
||||
defer event.deinit(false, self.page._session);
|
||||
|
||||
if (comptime IS_DEBUG) {
|
||||
log.debug(.event, "eventManager.dispatch", .{ .type = event._type_string.str(), .bubbles = event._bubbles });
|
||||
@@ -234,7 +234,7 @@ pub fn dispatchDirect(self: *EventManager, target: *EventTarget, event: *Event,
|
||||
const page = self.page;
|
||||
|
||||
event.acquireRef();
|
||||
defer event.deinit(false, page);
|
||||
defer event.deinit(false, page._session);
|
||||
|
||||
if (comptime IS_DEBUG) {
|
||||
log.debug(.event, "dispatchDirect", .{ .type = event._type_string, .context = opts.context });
|
||||
|
||||
@@ -48,13 +48,11 @@ const Factory = @This();
|
||||
_arena: Allocator,
|
||||
_slab: SlabAllocator,
|
||||
|
||||
pub fn init(arena: Allocator) !*Factory {
|
||||
const self = try arena.create(Factory);
|
||||
self.* = .{
|
||||
pub fn init(arena: Allocator) Factory {
|
||||
return .{
|
||||
._arena = arena,
|
||||
._slab = SlabAllocator.init(arena, 128),
|
||||
};
|
||||
return self;
|
||||
}
|
||||
|
||||
// this is a root object
|
||||
@@ -249,16 +247,15 @@ fn eventInit(arena: Allocator, typ: String, value: anytype) !Event {
|
||||
};
|
||||
}
|
||||
|
||||
pub fn blob(self: *Factory, child: anytype) !*@TypeOf(child) {
|
||||
const allocator = self._slab.allocator();
|
||||
|
||||
pub fn blob(_: *const Factory, arena: Allocator, child: anytype) !*@TypeOf(child) {
|
||||
// Special case: Blob has slice and mime fields, so we need manual setup
|
||||
const chain = try PrototypeChain(
|
||||
&.{ Blob, @TypeOf(child) },
|
||||
).allocate(allocator);
|
||||
).allocate(arena);
|
||||
|
||||
const blob_ptr = chain.get(0);
|
||||
blob_ptr.* = .{
|
||||
._arena = arena,
|
||||
._type = unionInit(Blob.Type, chain.get(1)),
|
||||
._slice = "",
|
||||
._mime = "",
|
||||
@@ -273,14 +270,16 @@ pub fn abstractRange(self: *Factory, child: anytype, page: *Page) !*@TypeOf(chil
|
||||
const chain = try PrototypeChain(&.{ AbstractRange, @TypeOf(child) }).allocate(allocator);
|
||||
|
||||
const doc = page.document.asNode();
|
||||
chain.set(0, AbstractRange{
|
||||
const abstract_range = chain.get(0);
|
||||
abstract_range.* = AbstractRange{
|
||||
._type = unionInit(AbstractRange.Type, chain.get(1)),
|
||||
._end_offset = 0,
|
||||
._start_offset = 0,
|
||||
._end_container = doc,
|
||||
._start_container = doc,
|
||||
});
|
||||
};
|
||||
chain.setLeaf(1, child);
|
||||
page._live_ranges.append(&abstract_range._range_link);
|
||||
return chain.get(1);
|
||||
}
|
||||
|
||||
|
||||
@@ -17,28 +17,29 @@
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
const std = @import("std");
|
||||
const lp = @import("lightpanda");
|
||||
|
||||
const log = @import("../log.zig");
|
||||
const builtin = @import("builtin");
|
||||
const posix = std.posix;
|
||||
|
||||
const Net = @import("../Net.zig");
|
||||
const lp = @import("lightpanda");
|
||||
const log = @import("../log.zig");
|
||||
const Net = @import("../network/http.zig");
|
||||
const Network = @import("../network/Runtime.zig");
|
||||
const Config = @import("../Config.zig");
|
||||
const URL = @import("../browser/URL.zig");
|
||||
const Notification = @import("../Notification.zig");
|
||||
const CookieJar = @import("../browser/webapi/storage/Cookie.zig").Jar;
|
||||
const Robots = @import("../browser/Robots.zig");
|
||||
const Robots = @import("../network/Robots.zig");
|
||||
const RobotStore = Robots.RobotStore;
|
||||
const posix = std.posix;
|
||||
|
||||
const Allocator = std.mem.Allocator;
|
||||
const ArenaAllocator = std.heap.ArenaAllocator;
|
||||
|
||||
const IS_DEBUG = builtin.mode == .Debug;
|
||||
|
||||
const Method = Net.Method;
|
||||
const ResponseHead = Net.ResponseHead;
|
||||
const HeaderIterator = Net.HeaderIterator;
|
||||
pub const Method = Net.Method;
|
||||
pub const Headers = Net.Headers;
|
||||
pub const ResponseHead = Net.ResponseHead;
|
||||
pub const HeaderIterator = Net.HeaderIterator;
|
||||
|
||||
// This is loosely tied to a browser Page. Loading all the <scripts>, doing
|
||||
// XHR requests, and loading imports all happens through here. Sine the app
|
||||
@@ -77,8 +78,7 @@ queue: TransferQueue,
|
||||
// The main app allocator
|
||||
allocator: Allocator,
|
||||
|
||||
// Reference to the App-owned Robot Store.
|
||||
robot_store: *RobotStore,
|
||||
network: *Network,
|
||||
// Queue of requests that depend on a robots.txt.
|
||||
// Allows us to fetch the robots.txt just once.
|
||||
pending_robots_queue: std.StringHashMapUnmanaged(std.ArrayList(Request)) = .empty,
|
||||
@@ -97,8 +97,6 @@ http_proxy: ?[:0]const u8 = null,
|
||||
// CDP.
|
||||
use_proxy: bool,
|
||||
|
||||
config: *const Config,
|
||||
|
||||
cdp_client: ?CDPClient = null,
|
||||
|
||||
// libcurl can monitor arbitrary sockets, this lets us use libcurl to poll
|
||||
@@ -121,14 +119,14 @@ pub const CDPClient = struct {
|
||||
|
||||
const TransferQueue = std.DoublyLinkedList;
|
||||
|
||||
pub fn init(allocator: Allocator, ca_blob: ?Net.Blob, robot_store: *RobotStore, config: *const Config) !*Client {
|
||||
pub fn init(allocator: Allocator, network: *Network) !*Client {
|
||||
var transfer_pool = std.heap.MemoryPool(Transfer).init(allocator);
|
||||
errdefer transfer_pool.deinit();
|
||||
|
||||
const client = try allocator.create(Client);
|
||||
errdefer allocator.destroy(client);
|
||||
|
||||
var handles = try Net.Handles.init(allocator, ca_blob, config);
|
||||
var handles = try Net.Handles.init(allocator, network.ca_blob, network.config);
|
||||
errdefer handles.deinit(allocator);
|
||||
|
||||
// Set transfer callbacks on each connection.
|
||||
@@ -136,7 +134,7 @@ pub fn init(allocator: Allocator, ca_blob: ?Net.Blob, robot_store: *RobotStore,
|
||||
try conn.setCallbacks(Transfer.headerCallback, Transfer.dataCallback);
|
||||
}
|
||||
|
||||
const http_proxy = config.httpProxy();
|
||||
const http_proxy = network.config.httpProxy();
|
||||
|
||||
client.* = .{
|
||||
.queue = .{},
|
||||
@@ -144,10 +142,9 @@ pub fn init(allocator: Allocator, ca_blob: ?Net.Blob, robot_store: *RobotStore,
|
||||
.intercepted = 0,
|
||||
.handles = handles,
|
||||
.allocator = allocator,
|
||||
.robot_store = robot_store,
|
||||
.network = network,
|
||||
.http_proxy = http_proxy,
|
||||
.use_proxy = http_proxy != null,
|
||||
.config = config,
|
||||
.transfer_pool = transfer_pool,
|
||||
};
|
||||
|
||||
@@ -170,7 +167,7 @@ pub fn deinit(self: *Client) void {
|
||||
}
|
||||
|
||||
pub fn newHeaders(self: *const Client) !Net.Headers {
|
||||
return Net.Headers.init(self.config.http_headers.user_agent_header);
|
||||
return Net.Headers.init(self.network.config.http_headers.user_agent_header);
|
||||
}
|
||||
|
||||
pub fn abort(self: *Client) void {
|
||||
@@ -255,12 +252,12 @@ pub fn tick(self: *Client, timeout_ms: u32) !PerformStatus {
|
||||
}
|
||||
|
||||
pub fn request(self: *Client, req: Request) !void {
|
||||
if (self.config.obeyRobots()) {
|
||||
if (self.network.config.obeyRobots()) {
|
||||
const robots_url = try URL.getRobotsUrl(self.allocator, req.url);
|
||||
errdefer self.allocator.free(robots_url);
|
||||
|
||||
// If we have this robots cached, we can take a fast path.
|
||||
if (self.robot_store.get(robots_url)) |robot_entry| {
|
||||
if (self.network.robot_store.get(robots_url)) |robot_entry| {
|
||||
defer self.allocator.free(robots_url);
|
||||
|
||||
switch (robot_entry) {
|
||||
@@ -401,18 +398,18 @@ fn robotsDoneCallback(ctx_ptr: *anyopaque) !void {
|
||||
switch (ctx.status) {
|
||||
200 => {
|
||||
if (ctx.buffer.items.len > 0) {
|
||||
const robots: ?Robots = ctx.client.robot_store.robotsFromBytes(
|
||||
ctx.client.config.http_headers.user_agent,
|
||||
const robots: ?Robots = ctx.client.network.robot_store.robotsFromBytes(
|
||||
ctx.client.network.config.http_headers.user_agent,
|
||||
ctx.buffer.items,
|
||||
) catch blk: {
|
||||
log.warn(.browser, "failed to parse robots", .{ .robots_url = ctx.robots_url });
|
||||
// If we fail to parse, we just insert it as absent and ignore.
|
||||
try ctx.client.robot_store.putAbsent(ctx.robots_url);
|
||||
try ctx.client.network.robot_store.putAbsent(ctx.robots_url);
|
||||
break :blk null;
|
||||
};
|
||||
|
||||
if (robots) |r| {
|
||||
try ctx.client.robot_store.put(ctx.robots_url, r);
|
||||
try ctx.client.network.robot_store.put(ctx.robots_url, r);
|
||||
const path = URL.getPathname(ctx.req.url);
|
||||
allowed = r.isAllowed(path);
|
||||
}
|
||||
@@ -421,12 +418,12 @@ fn robotsDoneCallback(ctx_ptr: *anyopaque) !void {
|
||||
404 => {
|
||||
log.debug(.http, "robots not found", .{ .url = ctx.robots_url });
|
||||
// If we get a 404, we just insert it as absent.
|
||||
try ctx.client.robot_store.putAbsent(ctx.robots_url);
|
||||
try ctx.client.network.robot_store.putAbsent(ctx.robots_url);
|
||||
},
|
||||
else => {
|
||||
log.debug(.http, "unexpected status on robots", .{ .url = ctx.robots_url, .status = ctx.status });
|
||||
// If we get an unexpected status, we just insert as absent.
|
||||
try ctx.client.robot_store.putAbsent(ctx.robots_url);
|
||||
try ctx.client.network.robot_store.putAbsent(ctx.robots_url);
|
||||
},
|
||||
}
|
||||
|
||||
@@ -609,7 +606,7 @@ fn makeTransfer(self: *Client, req: Request) !*Transfer {
|
||||
.req = req,
|
||||
.ctx = req.ctx,
|
||||
.client = self,
|
||||
.max_response_size = self.config.httpMaxResponseSize(),
|
||||
.max_response_size = self.network.config.httpMaxResponseSize(),
|
||||
};
|
||||
return transfer;
|
||||
}
|
||||
@@ -706,7 +703,7 @@ fn makeRequest(self: *Client, conn: *Net.Connection, transfer: *Transfer) anyerr
|
||||
}
|
||||
|
||||
var header_list = req.headers;
|
||||
try conn.secretHeaders(&header_list, &self.config.http_headers); // Add headers that must be hidden from intercepts
|
||||
try conn.secretHeaders(&header_list, &self.network.config.http_headers); // Add headers that must be hidden from intercepts
|
||||
try conn.setHeaders(&header_list);
|
||||
|
||||
// Add cookies.
|
||||
@@ -54,6 +54,7 @@ const Performance = @import("webapi/Performance.zig");
|
||||
const Screen = @import("webapi/Screen.zig");
|
||||
const VisualViewport = @import("webapi/VisualViewport.zig");
|
||||
const PerformanceObserver = @import("webapi/PerformanceObserver.zig");
|
||||
const AbstractRange = @import("webapi/AbstractRange.zig");
|
||||
const MutationObserver = @import("webapi/MutationObserver.zig");
|
||||
const IntersectionObserver = @import("webapi/IntersectionObserver.zig");
|
||||
const CustomElementDefinition = @import("webapi/CustomElementDefinition.zig");
|
||||
@@ -62,8 +63,7 @@ const PageTransitionEvent = @import("webapi/event/PageTransitionEvent.zig");
|
||||
const NavigationKind = @import("webapi/navigation/root.zig").NavigationKind;
|
||||
const KeyboardEvent = @import("webapi/event/KeyboardEvent.zig");
|
||||
|
||||
const Http = App.Http;
|
||||
const Net = @import("../Net.zig");
|
||||
const HttpClient = @import("HttpClient.zig");
|
||||
const ArenaPool = App.ArenaPool;
|
||||
|
||||
const timestamp = @import("../datetime.zig").timestamp;
|
||||
@@ -143,6 +143,9 @@ _to_load: std.ArrayList(*Element.Html) = .{},
|
||||
|
||||
_script_manager: ScriptManager,
|
||||
|
||||
// List of active live ranges (for mutation updates per DOM spec)
|
||||
_live_ranges: std.DoublyLinkedList = .{},
|
||||
|
||||
// List of active MutationObservers
|
||||
_mutation_observers: std.DoublyLinkedList = .{},
|
||||
_mutation_delivery_scheduled: bool = false,
|
||||
@@ -191,6 +194,8 @@ _queued_navigation: ?*QueuedNavigation = null,
|
||||
// The URL of the current page
|
||||
url: [:0]const u8 = "about:blank",
|
||||
|
||||
origin: ?[]const u8 = null,
|
||||
|
||||
// The base url specifies the base URL used to resolve the relative urls.
|
||||
// It is set by a <base> tag.
|
||||
// If null the url must be used.
|
||||
@@ -213,14 +218,6 @@ arena: Allocator,
|
||||
// from JS. Best arena to use, when possible.
|
||||
call_arena: Allocator,
|
||||
|
||||
arena_pool: *ArenaPool,
|
||||
// In Debug, we use this to see if anything fails to release an arena back to
|
||||
// the pool.
|
||||
_arena_pool_leak_track: (if (IS_DEBUG) std.AutoHashMapUnmanaged(usize, struct {
|
||||
owner: []const u8,
|
||||
count: usize,
|
||||
}) else void) = if (IS_DEBUG) .empty else {},
|
||||
|
||||
parent: ?*Page,
|
||||
window: *Window,
|
||||
document: *Document,
|
||||
@@ -247,17 +244,11 @@ pub fn init(self: *Page, frame_id: u32, session: *Session, parent: ?*Page) !void
|
||||
if (comptime IS_DEBUG) {
|
||||
log.debug(.page, "page.init", .{});
|
||||
}
|
||||
const browser = session.browser;
|
||||
const arena_pool = browser.arena_pool;
|
||||
|
||||
const page_arena = if (parent) |p| p.arena else try arena_pool.acquire();
|
||||
errdefer if (parent == null) arena_pool.release(page_arena);
|
||||
|
||||
var factory = if (parent) |p| p._factory else try Factory.init(page_arena);
|
||||
|
||||
const call_arena = try arena_pool.acquire();
|
||||
errdefer arena_pool.release(call_arena);
|
||||
const call_arena = try session.getArena(.{ .debug = "call_arena" });
|
||||
errdefer session.releaseArena(call_arena);
|
||||
|
||||
const factory = &session.factory;
|
||||
const document = (try factory.document(Node.Document.HTMLDocument{
|
||||
._proto = undefined,
|
||||
})).asDocument();
|
||||
@@ -265,10 +256,9 @@ pub fn init(self: *Page, frame_id: u32, session: *Session, parent: ?*Page) !void
|
||||
self.* = .{
|
||||
.js = undefined,
|
||||
.parent = parent,
|
||||
.arena = page_arena,
|
||||
.arena = session.page_arena,
|
||||
.document = document,
|
||||
.window = undefined,
|
||||
.arena_pool = arena_pool,
|
||||
.call_arena = call_arena,
|
||||
._frame_id = frame_id,
|
||||
._session = session,
|
||||
@@ -276,7 +266,7 @@ pub fn init(self: *Page, frame_id: u32, session: *Session, parent: ?*Page) !void
|
||||
._pending_loads = 1, // always 1 for the ScriptManager
|
||||
._type = if (parent == null) .root else .frame,
|
||||
._script_manager = undefined,
|
||||
._event_manager = EventManager.init(page_arena, self),
|
||||
._event_manager = EventManager.init(session.page_arena, self),
|
||||
};
|
||||
|
||||
var screen: *Screen = undefined;
|
||||
@@ -304,6 +294,7 @@ pub fn init(self: *Page, frame_id: u32, session: *Session, parent: ?*Page) !void
|
||||
._visual_viewport = visual_viewport,
|
||||
});
|
||||
|
||||
const browser = session.browser;
|
||||
self._script_manager = ScriptManager.init(browser.allocator, browser.http_client, self);
|
||||
errdefer self._script_manager.deinit();
|
||||
|
||||
@@ -339,11 +330,12 @@ pub fn deinit(self: *Page, abort_http: bool) void {
|
||||
// stats.print(&stream) catch unreachable;
|
||||
}
|
||||
|
||||
const session = self._session;
|
||||
|
||||
if (self._queued_navigation) |qn| {
|
||||
self.arena_pool.release(qn.arena);
|
||||
session.releaseArena(qn.arena);
|
||||
}
|
||||
|
||||
const session = self._session;
|
||||
session.browser.env.destroyContext(self.js);
|
||||
|
||||
self._script_manager.shutdown = true;
|
||||
@@ -359,23 +351,7 @@ pub fn deinit(self: *Page, abort_http: bool) void {
|
||||
|
||||
self._script_manager.deinit();
|
||||
|
||||
if (comptime IS_DEBUG) {
|
||||
var it = self._arena_pool_leak_track.valueIterator();
|
||||
while (it.next()) |value_ptr| {
|
||||
if (value_ptr.count > 0) {
|
||||
log.err(.bug, "ArenaPool Leak", .{ .owner = value_ptr.owner, .type = self._type, .url = self.url });
|
||||
if (comptime builtin.is_test) {
|
||||
@panic("ArenaPool Leak");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
self.arena_pool.release(self.call_arena);
|
||||
|
||||
if (self.parent == null) {
|
||||
self.arena_pool.release(self.arena);
|
||||
}
|
||||
session.releaseArena(self.call_arena);
|
||||
}
|
||||
|
||||
pub fn base(self: *const Page) [:0]const u8 {
|
||||
@@ -389,14 +365,10 @@ pub fn getTitle(self: *Page) !?[]const u8 {
|
||||
return null;
|
||||
}
|
||||
|
||||
pub fn getOrigin(self: *Page, allocator: Allocator) !?[]const u8 {
|
||||
return try URL.getOrigin(allocator, self.url);
|
||||
}
|
||||
|
||||
// Add comon headers for a request:
|
||||
// * cookies
|
||||
// * referer
|
||||
pub fn headersForRequest(self: *Page, temp: Allocator, url: [:0]const u8, headers: *Http.Headers) !void {
|
||||
pub fn headersForRequest(self: *Page, temp: Allocator, url: [:0]const u8, headers: *HttpClient.Headers) !void {
|
||||
try self.requestCookie(.{}).headersForRequest(temp, url, headers);
|
||||
|
||||
// Build the referer
|
||||
@@ -419,38 +391,16 @@ pub fn headersForRequest(self: *Page, temp: Allocator, url: [:0]const u8, header
|
||||
}
|
||||
}
|
||||
|
||||
const GetArenaOpts = struct {
|
||||
debug: []const u8,
|
||||
};
|
||||
pub fn getArena(self: *Page, comptime opts: GetArenaOpts) !Allocator {
|
||||
const allocator = try self.arena_pool.acquire();
|
||||
if (comptime IS_DEBUG) {
|
||||
const gop = try self._arena_pool_leak_track.getOrPut(self.arena, @intFromPtr(allocator.ptr));
|
||||
if (gop.found_existing) {
|
||||
std.debug.assert(gop.value_ptr.count == 0);
|
||||
}
|
||||
gop.value_ptr.* = .{ .owner = opts.debug, .count = 1 };
|
||||
}
|
||||
return allocator;
|
||||
pub fn getArena(self: *Page, comptime opts: Session.GetArenaOpts) !Allocator {
|
||||
return self._session.getArena(opts);
|
||||
}
|
||||
|
||||
pub fn releaseArena(self: *Page, allocator: Allocator) void {
|
||||
if (comptime IS_DEBUG) {
|
||||
const found = self._arena_pool_leak_track.getPtr(@intFromPtr(allocator.ptr)).?;
|
||||
if (found.count != 1) {
|
||||
log.err(.bug, "ArenaPool Double Free", .{ .owner = found.owner, .count = found.count, .type = self._type, .url = self.url });
|
||||
if (comptime builtin.is_test) {
|
||||
@panic("ArenaPool Double Free");
|
||||
}
|
||||
return;
|
||||
}
|
||||
found.count = 0;
|
||||
}
|
||||
return self.arena_pool.release(allocator);
|
||||
return self._session.releaseArena(allocator);
|
||||
}
|
||||
|
||||
pub fn isSameOrigin(self: *const Page, url: [:0]const u8) !bool {
|
||||
const current_origin = (try URL.getOrigin(self.call_arena, self.url)) orelse return false;
|
||||
const current_origin = self.origin orelse return false;
|
||||
return std.mem.startsWith(u8, url, current_origin);
|
||||
}
|
||||
|
||||
@@ -473,6 +423,14 @@ pub fn navigate(self: *Page, request_url: [:0]const u8, opts: NavigateOpts) !voi
|
||||
// page and dispatch the events.
|
||||
if (std.mem.eql(u8, "about:blank", request_url)) {
|
||||
self.url = "about:blank";
|
||||
|
||||
if (self.parent) |parent| {
|
||||
self.origin = parent.origin;
|
||||
} else {
|
||||
self.origin = null;
|
||||
}
|
||||
try self.js.setOrigin(self.origin);
|
||||
|
||||
// Assume we parsed the document.
|
||||
// It's important to force a reset during the following navigation.
|
||||
self._parse_state = .complete;
|
||||
@@ -519,6 +477,7 @@ pub fn navigate(self: *Page, request_url: [:0]const u8, opts: NavigateOpts) !voi
|
||||
var http_client = session.browser.http_client;
|
||||
|
||||
self.url = try self.arena.dupeZ(u8, request_url);
|
||||
self.origin = try URL.getOrigin(self.arena, self.url);
|
||||
|
||||
self._req_id = req_id;
|
||||
self._navigated_options = .{
|
||||
@@ -579,8 +538,8 @@ pub fn scheduleNavigation(self: *Page, request_url: []const u8, opts: NavigateOp
|
||||
if (self.canScheduleNavigation(std.meta.activeTag(nt)) == false) {
|
||||
return;
|
||||
}
|
||||
const arena = try self.arena_pool.acquire();
|
||||
errdefer self.arena_pool.release(arena);
|
||||
const arena = try self._session.getArena(.{ .debug = "scheduleNavigation" });
|
||||
errdefer self._session.releaseArena(arena);
|
||||
return self.scheduleNavigationWithArena(arena, request_url, opts, nt);
|
||||
}
|
||||
|
||||
@@ -619,9 +578,8 @@ fn scheduleNavigationWithArena(originator: *Page, arena: Allocator, request_url:
|
||||
if (target.parent == null) {
|
||||
try session.navigation.updateEntries(target.url, opts.kind, target, true);
|
||||
}
|
||||
// doin't defer this, the caller, the caller is responsible for freeing
|
||||
// it on error
|
||||
target.arena_pool.release(arena);
|
||||
// don't defer this, the caller is responsible for freeing it on error
|
||||
session.releaseArena(arena);
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -653,7 +611,7 @@ fn scheduleNavigationWithArena(originator: *Page, arena: Allocator, request_url:
|
||||
};
|
||||
|
||||
if (target._queued_navigation) |existing| {
|
||||
target.arena_pool.release(existing.arena);
|
||||
session.releaseArena(existing.arena);
|
||||
}
|
||||
|
||||
target._queued_navigation = qn;
|
||||
@@ -823,12 +781,18 @@ fn notifyParentLoadComplete(self: *Page) void {
|
||||
parent.iframeCompletedLoading(self.iframe.?);
|
||||
}
|
||||
|
||||
fn pageHeaderDoneCallback(transfer: *Http.Transfer) !bool {
|
||||
fn pageHeaderDoneCallback(transfer: *HttpClient.Transfer) !bool {
|
||||
var self: *Page = @ptrCast(@alignCast(transfer.ctx));
|
||||
|
||||
// would be different than self.url in the case of a redirect
|
||||
const header = &transfer.response_header.?;
|
||||
self.url = try self.arena.dupeZ(u8, std.mem.span(header.url));
|
||||
|
||||
const response_url = std.mem.span(header.url);
|
||||
if (std.mem.eql(u8, response_url, self.url) == false) {
|
||||
// would be different than self.url in the case of a redirect
|
||||
self.url = try self.arena.dupeZ(u8, response_url);
|
||||
self.origin = try URL.getOrigin(self.arena, self.url);
|
||||
}
|
||||
try self.js.setOrigin(self.origin);
|
||||
|
||||
self.window._location = try Location.init(self.url, self);
|
||||
self.document._location = self.window._location;
|
||||
@@ -845,7 +809,7 @@ fn pageHeaderDoneCallback(transfer: *Http.Transfer) !bool {
|
||||
return true;
|
||||
}
|
||||
|
||||
fn pageDataCallback(transfer: *Http.Transfer, data: []const u8) !void {
|
||||
fn pageDataCallback(transfer: *HttpClient.Transfer, data: []const u8) !void {
|
||||
var self: *Page = @ptrCast(@alignCast(transfer.ctx));
|
||||
|
||||
if (self._parse_state == .pre) {
|
||||
@@ -2434,6 +2398,12 @@ pub fn removeNode(self: *Page, parent: *Node, child: *Node, opts: RemoveNodeOpts
|
||||
const previous_sibling = child.previousSibling();
|
||||
const next_sibling = child.nextSibling();
|
||||
|
||||
// Capture child's index before removal for live range updates (DOM spec remove steps 4-7)
|
||||
const child_index_for_ranges: ?u32 = if (self._live_ranges.first != null)
|
||||
parent.getChildIndex(child)
|
||||
else
|
||||
null;
|
||||
|
||||
const children = parent._children.?;
|
||||
switch (children.*) {
|
||||
.one => |n| {
|
||||
@@ -2462,6 +2432,11 @@ pub fn removeNode(self: *Page, parent: *Node, child: *Node, opts: RemoveNodeOpts
|
||||
child._parent = null;
|
||||
child._child_link = .{};
|
||||
|
||||
// Update live ranges for removal (DOM spec remove steps 4-7)
|
||||
if (child_index_for_ranges) |idx| {
|
||||
self.updateRangesForNodeRemoval(parent, child, idx);
|
||||
}
|
||||
|
||||
// Handle slot assignment removal before mutation observers
|
||||
if (child.is(Element)) |el| {
|
||||
// Check if the parent was a shadow host
|
||||
@@ -2609,6 +2584,21 @@ pub fn _insertNodeRelative(self: *Page, comptime from_parser: bool, parent: *Nod
|
||||
}
|
||||
child._parent = parent;
|
||||
|
||||
// Update live ranges for insertion (DOM spec insert step 6).
|
||||
// For .before/.after the child was inserted at a specific position;
|
||||
// ranges on parent with offsets past that position must be incremented.
|
||||
// For .append no range update is needed (spec: "if child is non-null").
|
||||
if (self._live_ranges.first != null) {
|
||||
switch (relative) {
|
||||
.append => {},
|
||||
.before, .after => {
|
||||
if (parent.getChildIndex(child)) |idx| {
|
||||
self.updateRangesForNodeInsertion(parent, idx);
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
// Tri-state behavior for mutations:
|
||||
// 1. from_parser=true, parse_mode=document -> no mutations (initial document parse)
|
||||
// 2. from_parser=true, parse_mode=fragment -> mutations (innerHTML additions)
|
||||
@@ -2867,6 +2857,54 @@ pub fn childListChange(
|
||||
}
|
||||
}
|
||||
|
||||
// --- Live range update methods (DOM spec §4.2.3, §4.2.4, §4.7, §4.8) ---
|
||||
|
||||
/// Update all live ranges after a replaceData mutation on a CharacterData node.
|
||||
/// Per DOM spec: insertData = replaceData(offset, 0, data),
|
||||
/// deleteData = replaceData(offset, count, "").
|
||||
/// All parameters are in UTF-16 code unit offsets.
|
||||
pub fn updateRangesForCharacterDataReplace(self: *Page, target: *Node, offset: u32, count: u32, data_len: u32) void {
|
||||
var it: ?*std.DoublyLinkedList.Node = self._live_ranges.first;
|
||||
while (it) |link| : (it = link.next) {
|
||||
const ar: *AbstractRange = @fieldParentPtr("_range_link", link);
|
||||
ar.updateForCharacterDataReplace(target, offset, count, data_len);
|
||||
}
|
||||
}
|
||||
|
||||
/// Update all live ranges after a splitText operation.
|
||||
/// Steps 7b-7e of the DOM spec splitText algorithm.
|
||||
/// Steps 7d-7e complement (not overlap) updateRangesForNodeInsertion:
|
||||
/// the insert update handles offsets > child_index, while 7d/7e handle
|
||||
/// offsets == node_index+1 (these are equal values but with > vs == checks).
|
||||
pub fn updateRangesForSplitText(self: *Page, target: *Node, new_node: *Node, offset: u32, parent: *Node, node_index: u32) void {
|
||||
var it: ?*std.DoublyLinkedList.Node = self._live_ranges.first;
|
||||
while (it) |link| : (it = link.next) {
|
||||
const ar: *AbstractRange = @fieldParentPtr("_range_link", link);
|
||||
ar.updateForSplitText(target, new_node, offset, parent, node_index);
|
||||
}
|
||||
}
|
||||
|
||||
/// Update all live ranges after a node insertion.
|
||||
/// Per DOM spec insert algorithm step 6: only applies when inserting before a
|
||||
/// non-null reference node.
|
||||
pub fn updateRangesForNodeInsertion(self: *Page, parent: *Node, child_index: u32) void {
|
||||
var it: ?*std.DoublyLinkedList.Node = self._live_ranges.first;
|
||||
while (it) |link| : (it = link.next) {
|
||||
const ar: *AbstractRange = @fieldParentPtr("_range_link", link);
|
||||
ar.updateForNodeInsertion(parent, child_index);
|
||||
}
|
||||
}
|
||||
|
||||
/// Update all live ranges after a node removal.
|
||||
/// Per DOM spec remove algorithm steps 4-7.
|
||||
pub fn updateRangesForNodeRemoval(self: *Page, parent: *Node, child: *Node, child_index: u32) void {
|
||||
var it: ?*std.DoublyLinkedList.Node = self._live_ranges.first;
|
||||
while (it) |link| : (it = link.next) {
|
||||
const ar: *AbstractRange = @fieldParentPtr("_range_link", link);
|
||||
ar.updateForNodeRemoval(parent, child, child_index);
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: optimize and cleanup, this is called a lot (e.g., innerHTML = '')
|
||||
pub fn parseHtmlAsChildren(self: *Page, node: *Node, html: []const u8) !void {
|
||||
const previous_parse_mode = self._parse_mode;
|
||||
@@ -3047,7 +3085,7 @@ pub const NavigateReason = enum {
|
||||
pub const NavigateOpts = struct {
|
||||
cdp_id: ?i64 = null,
|
||||
reason: NavigateReason = .address_bar,
|
||||
method: Http.Method = .GET,
|
||||
method: HttpClient.Method = .GET,
|
||||
body: ?[]const u8 = null,
|
||||
header: ?[:0]const u8 = null,
|
||||
force: bool = false,
|
||||
@@ -3057,7 +3095,7 @@ pub const NavigateOpts = struct {
|
||||
pub const NavigatedOpts = struct {
|
||||
cdp_id: ?i64 = null,
|
||||
reason: NavigateReason = .address_bar,
|
||||
method: Http.Method = .GET,
|
||||
method: HttpClient.Method = .GET,
|
||||
};
|
||||
|
||||
const NavigationType = enum {
|
||||
@@ -3164,7 +3202,7 @@ pub fn handleClick(self: *Page, target: *Node) !void {
|
||||
pub fn triggerKeyboard(self: *Page, keyboard_event: *KeyboardEvent) !void {
|
||||
const event = keyboard_event.asEvent();
|
||||
const element = self.window._document._active_element orelse {
|
||||
keyboard_event.deinit(false, self);
|
||||
keyboard_event.deinit(false, self._session);
|
||||
return;
|
||||
};
|
||||
|
||||
@@ -3240,7 +3278,7 @@ pub fn submitForm(self: *Page, submitter_: ?*Element, form_: ?*Element.Html.Form
|
||||
|
||||
// so submit_event is still valid when we check _prevent_default
|
||||
submit_event.acquireRef();
|
||||
defer submit_event.deinit(false, self);
|
||||
defer submit_event.deinit(false, self._session);
|
||||
|
||||
try self._event_manager.dispatch(form_element.asEventTarget(), submit_event);
|
||||
// If the submit event was prevented, don't submit the form
|
||||
@@ -3254,8 +3292,8 @@ pub fn submitForm(self: *Page, submitter_: ?*Element, form_: ?*Element.Html.Form
|
||||
// I don't think this is technically correct, but FormData handles it ok
|
||||
const form_data = try FormData.init(form, submitter_, self);
|
||||
|
||||
const arena = try self.arena_pool.acquire();
|
||||
errdefer self.arena_pool.release(arena);
|
||||
const arena = try self._session.getArena(.{ .debug = "submitForm" });
|
||||
errdefer self._session.releaseArena(arena);
|
||||
|
||||
const encoding = form_element.getAttributeSafe(comptime .wrap("enctype"));
|
||||
|
||||
@@ -3302,7 +3340,7 @@ const RequestCookieOpts = struct {
|
||||
is_http: bool = true,
|
||||
is_navigation: bool = false,
|
||||
};
|
||||
pub fn requestCookie(self: *const Page, opts: RequestCookieOpts) Http.Client.RequestCookie {
|
||||
pub fn requestCookie(self: *const Page, opts: RequestCookieOpts) HttpClient.RequestCookie {
|
||||
return .{
|
||||
.jar = &self._session.cookie_jar,
|
||||
.origin = self.url,
|
||||
|
||||
@@ -21,7 +21,8 @@ const lp = @import("lightpanda");
|
||||
const builtin = @import("builtin");
|
||||
|
||||
const log = @import("../log.zig");
|
||||
const Http = @import("../http/Http.zig");
|
||||
const HttpClient = @import("HttpClient.zig");
|
||||
const net_http = @import("../network/http.zig");
|
||||
const String = @import("../string.zig").String;
|
||||
|
||||
const js = @import("js/js.zig");
|
||||
@@ -60,7 +61,7 @@ ready_scripts: std.DoublyLinkedList,
|
||||
|
||||
shutdown: bool = false,
|
||||
|
||||
client: *Http.Client,
|
||||
client: *HttpClient,
|
||||
allocator: Allocator,
|
||||
buffer_pool: BufferPool,
|
||||
|
||||
@@ -88,7 +89,7 @@ importmap: std.StringHashMapUnmanaged([:0]const u8),
|
||||
// event).
|
||||
page_notified_of_completion: bool,
|
||||
|
||||
pub fn init(allocator: Allocator, http_client: *Http.Client, page: *Page) ScriptManager {
|
||||
pub fn init(allocator: Allocator, http_client: *HttpClient, page: *Page) ScriptManager {
|
||||
return .{
|
||||
.page = page,
|
||||
.async_scripts = .{},
|
||||
@@ -141,7 +142,7 @@ fn clearList(list: *std.DoublyLinkedList) void {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn getHeaders(self: *ScriptManager, url: [:0]const u8) !Http.Headers {
|
||||
pub fn getHeaders(self: *ScriptManager, url: [:0]const u8) !net_http.Headers {
|
||||
var headers = try self.client.newHeaders();
|
||||
try self.page.headersForRequest(self.page.arena, url, &headers);
|
||||
return headers;
|
||||
@@ -675,11 +676,11 @@ pub const Script = struct {
|
||||
self.manager.script_pool.destroy(self);
|
||||
}
|
||||
|
||||
fn startCallback(transfer: *Http.Transfer) !void {
|
||||
fn startCallback(transfer: *HttpClient.Transfer) !void {
|
||||
log.debug(.http, "script fetch start", .{ .req = transfer });
|
||||
}
|
||||
|
||||
fn headerCallback(transfer: *Http.Transfer) !bool {
|
||||
fn headerCallback(transfer: *HttpClient.Transfer) !bool {
|
||||
const self: *Script = @ptrCast(@alignCast(transfer.ctx));
|
||||
const header = &transfer.response_header.?;
|
||||
self.status = header.status;
|
||||
@@ -746,14 +747,14 @@ pub const Script = struct {
|
||||
return true;
|
||||
}
|
||||
|
||||
fn dataCallback(transfer: *Http.Transfer, data: []const u8) !void {
|
||||
fn dataCallback(transfer: *HttpClient.Transfer, data: []const u8) !void {
|
||||
const self: *Script = @ptrCast(@alignCast(transfer.ctx));
|
||||
self._dataCallback(transfer, data) catch |err| {
|
||||
log.err(.http, "SM.dataCallback", .{ .err = err, .transfer = transfer, .len = data.len });
|
||||
return err;
|
||||
};
|
||||
}
|
||||
fn _dataCallback(self: *Script, _: *Http.Transfer, data: []const u8) !void {
|
||||
fn _dataCallback(self: *Script, _: *HttpClient.Transfer, data: []const u8) !void {
|
||||
try self.source.remote.appendSlice(self.manager.allocator, data);
|
||||
}
|
||||
|
||||
|
||||
@@ -21,6 +21,7 @@ const lp = @import("lightpanda");
|
||||
const builtin = @import("builtin");
|
||||
|
||||
const log = @import("../log.zig");
|
||||
const App = @import("../App.zig");
|
||||
|
||||
const js = @import("js/js.zig");
|
||||
const storage = @import("webapi/storage/storage.zig");
|
||||
@@ -29,20 +30,53 @@ const History = @import("webapi/History.zig");
|
||||
|
||||
const Page = @import("Page.zig");
|
||||
const Browser = @import("Browser.zig");
|
||||
const Factory = @import("Factory.zig");
|
||||
const Notification = @import("../Notification.zig");
|
||||
const QueuedNavigation = Page.QueuedNavigation;
|
||||
|
||||
const Allocator = std.mem.Allocator;
|
||||
const ArenaPool = App.ArenaPool;
|
||||
const IS_DEBUG = builtin.mode == .Debug;
|
||||
|
||||
// Session is like a browser's tab.
|
||||
// It owns the js env and the loader for all the pages of the session.
|
||||
// You can create successively multiple pages for a session, but you must
|
||||
// deinit a page before running another one.
|
||||
// deinit a page before running another one. It manages two distinct lifetimes.
|
||||
//
|
||||
// The first is the lifetime of the Session itself, where pages are created and
|
||||
// removed, but share the same cookie jar and navigation history (etc...)
|
||||
//
|
||||
// The second is as a container the data needed by the full page hierarchy, i.e. \
|
||||
// the root page and all of its frames (and all of their frames.)
|
||||
const Session = @This();
|
||||
|
||||
// These are the fields that remain intact for the duration of the Session
|
||||
browser: *Browser,
|
||||
arena: Allocator,
|
||||
history: History,
|
||||
navigation: Navigation,
|
||||
storage_shed: storage.Shed,
|
||||
notification: *Notification,
|
||||
cookie_jar: storage.Cookie.Jar,
|
||||
|
||||
// These are the fields that get reset whenever the Session's page (the root) is reset.
|
||||
factory: Factory,
|
||||
|
||||
page_arena: Allocator,
|
||||
|
||||
// Origin map for same-origin context sharing. Scoped to the root page lifetime.
|
||||
origins: std.StringHashMapUnmanaged(*js.Origin) = .empty,
|
||||
|
||||
// Shared resources for all pages in this session.
|
||||
// These live for the duration of the page tree (root + frames).
|
||||
arena_pool: *ArenaPool,
|
||||
|
||||
// In Debug, we use this to see if anything fails to release an arena back to
|
||||
// the pool.
|
||||
_arena_pool_leak_track: if (IS_DEBUG) std.AutoHashMapUnmanaged(usize, struct {
|
||||
owner: []const u8,
|
||||
count: usize,
|
||||
}) else void = if (IS_DEBUG) .empty else {},
|
||||
|
||||
page: ?Page,
|
||||
|
||||
queued_navigation: std.ArrayList(*Page),
|
||||
// Temporary buffer for about:blank navigations during processing.
|
||||
@@ -50,27 +84,24 @@ queued_navigation: std.ArrayList(*Page),
|
||||
// about:blank navigations (which may add to queued_navigation).
|
||||
queued_queued_navigation: std.ArrayList(*Page),
|
||||
|
||||
// Used to create our Inspector and in the BrowserContext.
|
||||
arena: Allocator,
|
||||
|
||||
cookie_jar: storage.Cookie.Jar,
|
||||
storage_shed: storage.Shed,
|
||||
|
||||
history: History,
|
||||
navigation: Navigation,
|
||||
|
||||
page: ?Page,
|
||||
|
||||
frame_id_gen: u32,
|
||||
|
||||
pub fn init(self: *Session, browser: *Browser, notification: *Notification) !void {
|
||||
const allocator = browser.app.allocator;
|
||||
const arena = try browser.arena_pool.acquire();
|
||||
errdefer browser.arena_pool.release(arena);
|
||||
const arena_pool = browser.arena_pool;
|
||||
|
||||
const arena = try arena_pool.acquire();
|
||||
errdefer arena_pool.release(arena);
|
||||
|
||||
const page_arena = try arena_pool.acquire();
|
||||
errdefer arena_pool.release(page_arena);
|
||||
|
||||
self.* = .{
|
||||
.page = null,
|
||||
.arena = arena,
|
||||
.arena_pool = arena_pool,
|
||||
.page_arena = page_arena,
|
||||
.factory = Factory.init(page_arena),
|
||||
.history = .{},
|
||||
.frame_id_gen = 0,
|
||||
// The prototype (EventTarget) for Navigation is created when a Page is created.
|
||||
@@ -90,9 +121,9 @@ pub fn deinit(self: *Session) void {
|
||||
}
|
||||
self.cookie_jar.deinit();
|
||||
|
||||
const browser = self.browser;
|
||||
self.storage_shed.deinit(browser.app.allocator);
|
||||
browser.arena_pool.release(self.arena);
|
||||
self.storage_shed.deinit(self.browser.app.allocator);
|
||||
self.arena_pool.release(self.page_arena);
|
||||
self.arena_pool.release(self.arena);
|
||||
}
|
||||
|
||||
// NOTE: the caller is not the owner of the returned value,
|
||||
@@ -126,29 +157,133 @@ pub fn removePage(self: *Session) void {
|
||||
self.page = null;
|
||||
|
||||
self.navigation.onRemovePage();
|
||||
self.resetPageResources();
|
||||
|
||||
if (comptime IS_DEBUG) {
|
||||
log.debug(.browser, "remove page", .{});
|
||||
}
|
||||
}
|
||||
|
||||
pub const GetArenaOpts = struct {
|
||||
debug: []const u8,
|
||||
};
|
||||
|
||||
pub fn getArena(self: *Session, opts: GetArenaOpts) !Allocator {
|
||||
const allocator = try self.arena_pool.acquire();
|
||||
if (comptime IS_DEBUG) {
|
||||
// Use session's arena (not page_arena) since page_arena gets reset between pages
|
||||
const gop = try self._arena_pool_leak_track.getOrPut(self.arena, @intFromPtr(allocator.ptr));
|
||||
if (gop.found_existing and gop.value_ptr.count != 0) {
|
||||
log.err(.bug, "ArenaPool Double Use", .{ .owner = gop.value_ptr.*.owner });
|
||||
@panic("ArenaPool Double Use");
|
||||
}
|
||||
gop.value_ptr.* = .{ .owner = opts.debug, .count = 1 };
|
||||
}
|
||||
return allocator;
|
||||
}
|
||||
|
||||
pub fn releaseArena(self: *Session, allocator: Allocator) void {
|
||||
if (comptime IS_DEBUG) {
|
||||
const found = self._arena_pool_leak_track.getPtr(@intFromPtr(allocator.ptr)).?;
|
||||
if (found.count != 1) {
|
||||
log.err(.bug, "ArenaPool Double Free", .{ .owner = found.owner, .count = found.count });
|
||||
if (comptime builtin.is_test) {
|
||||
@panic("ArenaPool Double Free");
|
||||
}
|
||||
return;
|
||||
}
|
||||
found.count = 0;
|
||||
}
|
||||
return self.arena_pool.release(allocator);
|
||||
}
|
||||
|
||||
pub fn getOrCreateOrigin(self: *Session, key_: ?[]const u8) !*js.Origin {
|
||||
const key = key_ orelse {
|
||||
var opaque_origin: [36]u8 = undefined;
|
||||
@import("../id.zig").uuidv4(&opaque_origin);
|
||||
// Origin.init will dupe opaque_origin. It's fine that this doesn't
|
||||
// get added to self.origins. In fact, it further isolates it. When the
|
||||
// context is freed, it'll call session.releaseOrigin which will free it.
|
||||
return js.Origin.init(self.browser.app, self.browser.env.isolate, &opaque_origin);
|
||||
};
|
||||
|
||||
const gop = try self.origins.getOrPut(self.arena, key);
|
||||
if (gop.found_existing) {
|
||||
const origin = gop.value_ptr.*;
|
||||
origin.rc += 1;
|
||||
return origin;
|
||||
}
|
||||
|
||||
errdefer _ = self.origins.remove(key);
|
||||
|
||||
const origin = try js.Origin.init(self.browser.app, self.browser.env.isolate, key);
|
||||
gop.key_ptr.* = origin.key;
|
||||
gop.value_ptr.* = origin;
|
||||
return origin;
|
||||
}
|
||||
|
||||
pub fn releaseOrigin(self: *Session, origin: *js.Origin) void {
|
||||
const rc = origin.rc;
|
||||
if (rc == 1) {
|
||||
_ = self.origins.remove(origin.key);
|
||||
origin.deinit(self.browser.app);
|
||||
} else {
|
||||
origin.rc = rc - 1;
|
||||
}
|
||||
}
|
||||
|
||||
/// Reset page_arena and factory for a clean slate.
|
||||
/// Called when root page is removed.
|
||||
fn resetPageResources(self: *Session) void {
|
||||
// Check for arena leaks before releasing
|
||||
if (comptime IS_DEBUG) {
|
||||
var it = self._arena_pool_leak_track.valueIterator();
|
||||
while (it.next()) |value_ptr| {
|
||||
if (value_ptr.count > 0) {
|
||||
log.err(.bug, "ArenaPool Leak", .{ .owner = value_ptr.owner });
|
||||
}
|
||||
}
|
||||
self._arena_pool_leak_track.clearRetainingCapacity();
|
||||
}
|
||||
|
||||
// All origins should have been released when contexts were destroyed
|
||||
if (comptime IS_DEBUG) {
|
||||
std.debug.assert(self.origins.count() == 0);
|
||||
}
|
||||
// Defensive cleanup in case origins leaked
|
||||
{
|
||||
const app = self.browser.app;
|
||||
var it = self.origins.valueIterator();
|
||||
while (it.next()) |value| {
|
||||
value.*.deinit(app);
|
||||
}
|
||||
self.origins.clearRetainingCapacity();
|
||||
}
|
||||
|
||||
// Release old page_arena and acquire fresh one
|
||||
self.frame_id_gen = 0;
|
||||
self.arena_pool.reset(self.page_arena, 64 * 1024);
|
||||
self.factory = Factory.init(self.page_arena);
|
||||
}
|
||||
|
||||
pub fn replacePage(self: *Session) !*Page {
|
||||
if (comptime IS_DEBUG) {
|
||||
log.debug(.browser, "replace page", .{});
|
||||
}
|
||||
|
||||
lp.assert(self.page != null, "Session.replacePage null page", .{});
|
||||
lp.assert(self.page.?.parent == null, "Session.replacePage with parent", .{});
|
||||
|
||||
var current = self.page.?;
|
||||
const frame_id = current._frame_id;
|
||||
const parent = current.parent;
|
||||
current.deinit(false);
|
||||
current.deinit(true);
|
||||
|
||||
self.resetPageResources();
|
||||
self.browser.env.memoryPressureNotification(.moderate);
|
||||
|
||||
self.page = @as(Page, undefined);
|
||||
const page = &self.page.?;
|
||||
try Page.init(page, frame_id, self, parent);
|
||||
try Page.init(page, frame_id, self, null);
|
||||
return page;
|
||||
}
|
||||
|
||||
@@ -428,12 +563,11 @@ fn processQueuedNavigation(self: *Session) !void {
|
||||
fn processFrameNavigation(self: *Session, page: *Page, qn: *QueuedNavigation) !void {
|
||||
lp.assert(page.parent != null, "root queued navigation", .{});
|
||||
|
||||
const browser = self.browser;
|
||||
const iframe = page.iframe.?;
|
||||
const parent = page.parent.?;
|
||||
|
||||
page._queued_navigation = null;
|
||||
defer browser.arena_pool.release(qn.arena);
|
||||
defer self.releaseArena(qn.arena);
|
||||
|
||||
errdefer iframe._window = null;
|
||||
|
||||
@@ -465,9 +599,21 @@ fn processRootQueuedNavigation(self: *Session) !void {
|
||||
// create a copy before the page is cleared
|
||||
const qn = current_page._queued_navigation.?;
|
||||
current_page._queued_navigation = null;
|
||||
defer self.browser.arena_pool.release(qn.arena);
|
||||
|
||||
defer self.arena_pool.release(qn.arena);
|
||||
|
||||
// HACK
|
||||
// Mark as released in tracking BEFORE removePage clears the map.
|
||||
// We can't call releaseArena() because that would also return the arena
|
||||
// to the pool, making the memory invalid before we use qn.url/qn.opts.
|
||||
if (comptime IS_DEBUG) {
|
||||
if (self._arena_pool_leak_track.getPtr(@intFromPtr(qn.arena.ptr))) |found| {
|
||||
found.count = 0;
|
||||
}
|
||||
}
|
||||
|
||||
self.removePage();
|
||||
|
||||
self.page = @as(Page, undefined);
|
||||
const new_page = &self.page.?;
|
||||
try Page.init(new_page, frame_id, self, null);
|
||||
|
||||
@@ -23,9 +23,11 @@ const log = @import("../../log.zig");
|
||||
const js = @import("js.zig");
|
||||
const Env = @import("Env.zig");
|
||||
const bridge = @import("bridge.zig");
|
||||
const Origin = @import("Origin.zig");
|
||||
const Scheduler = @import("Scheduler.zig");
|
||||
|
||||
const Page = @import("../Page.zig");
|
||||
const Session = @import("../Session.zig");
|
||||
const ScriptManager = @import("../ScriptManager.zig");
|
||||
|
||||
const v8 = js.v8;
|
||||
@@ -41,6 +43,7 @@ const Context = @This();
|
||||
id: usize,
|
||||
env: *Env,
|
||||
page: *Page,
|
||||
session: *Session,
|
||||
isolate: js.Isolate,
|
||||
|
||||
// Per-context microtask queue for isolation between contexts
|
||||
@@ -74,39 +77,11 @@ call_depth: usize = 0,
|
||||
// context.localScope
|
||||
local: ?*const js.Local = null,
|
||||
|
||||
// Serves two purposes. Like `global_objects`, this is used to free
|
||||
// every Global(Object) we've created during the lifetime of the context.
|
||||
// More importantly, it serves as an identity map - for a given Zig
|
||||
// instance, we map it to the same Global(Object).
|
||||
// The key is the @intFromPtr of the Zig value
|
||||
identity_map: std.AutoHashMapUnmanaged(usize, v8.Global) = .empty,
|
||||
origin: *Origin,
|
||||
|
||||
// Any type that is stored in the identity_map which has a finalizer declared
|
||||
// will have its finalizer stored here. This is only used when shutting down
|
||||
// if v8 hasn't called the finalizer directly itself.
|
||||
finalizer_callbacks: std.AutoHashMapUnmanaged(usize, *FinalizerCallback) = .empty,
|
||||
finalizer_callback_pool: std.heap.MemoryPool(FinalizerCallback),
|
||||
|
||||
// Some web APIs have to manage opaque values. Ideally, they use an
|
||||
// js.Object, but the js.Object has no lifetime guarantee beyond the
|
||||
// current call. They can call .persist() on their js.Object to get
|
||||
// a `Global(Object)`. We need to track these to free them.
|
||||
// This used to be a map and acted like identity_map; the key was
|
||||
// the @intFromPtr(js_obj.handle). But v8 can re-use address. Without
|
||||
// a reliable way to know if an object has already been persisted,
|
||||
// we now simply persist every time persist() is called.
|
||||
global_values: std.ArrayList(v8.Global) = .empty,
|
||||
global_objects: std.ArrayList(v8.Global) = .empty,
|
||||
// Unlike other v8 types, like functions or objects, modules are not shared
|
||||
// across origins.
|
||||
global_modules: std.ArrayList(v8.Global) = .empty,
|
||||
global_promises: std.ArrayList(v8.Global) = .empty,
|
||||
global_functions: std.ArrayList(v8.Global) = .empty,
|
||||
global_promise_resolvers: std.ArrayList(v8.Global) = .empty,
|
||||
|
||||
// Temp variants stored in HashMaps for O(1) early cleanup.
|
||||
// Key is global.data_ptr.
|
||||
global_values_temp: std.AutoHashMapUnmanaged(usize, v8.Global) = .empty,
|
||||
global_promises_temp: std.AutoHashMapUnmanaged(usize, v8.Global) = .empty,
|
||||
global_functions_temp: std.AutoHashMapUnmanaged(usize, v8.Global) = .empty,
|
||||
|
||||
// Our module cache: normalized module specifier => module.
|
||||
module_cache: std.StringHashMapUnmanaged(ModuleEntry) = .empty,
|
||||
@@ -174,64 +149,11 @@ pub fn deinit(self: *Context) void {
|
||||
// this can release objects
|
||||
self.scheduler.deinit();
|
||||
|
||||
{
|
||||
var it = self.identity_map.valueIterator();
|
||||
while (it.next()) |global| {
|
||||
v8.v8__Global__Reset(global);
|
||||
}
|
||||
}
|
||||
{
|
||||
var it = self.finalizer_callbacks.valueIterator();
|
||||
while (it.next()) |finalizer| {
|
||||
finalizer.*.deinit();
|
||||
}
|
||||
self.finalizer_callback_pool.deinit();
|
||||
}
|
||||
|
||||
for (self.global_values.items) |*global| {
|
||||
v8.v8__Global__Reset(global);
|
||||
}
|
||||
|
||||
for (self.global_objects.items) |*global| {
|
||||
v8.v8__Global__Reset(global);
|
||||
}
|
||||
|
||||
for (self.global_modules.items) |*global| {
|
||||
v8.v8__Global__Reset(global);
|
||||
}
|
||||
|
||||
for (self.global_functions.items) |*global| {
|
||||
v8.v8__Global__Reset(global);
|
||||
}
|
||||
|
||||
for (self.global_promises.items) |*global| {
|
||||
v8.v8__Global__Reset(global);
|
||||
}
|
||||
|
||||
for (self.global_promise_resolvers.items) |*global| {
|
||||
v8.v8__Global__Reset(global);
|
||||
}
|
||||
|
||||
{
|
||||
var it = self.global_values_temp.valueIterator();
|
||||
while (it.next()) |global| {
|
||||
v8.v8__Global__Reset(global);
|
||||
}
|
||||
}
|
||||
|
||||
{
|
||||
var it = self.global_promises_temp.valueIterator();
|
||||
while (it.next()) |global| {
|
||||
v8.v8__Global__Reset(global);
|
||||
}
|
||||
}
|
||||
|
||||
{
|
||||
var it = self.global_functions_temp.valueIterator();
|
||||
while (it.next()) |global| {
|
||||
v8.v8__Global__Reset(global);
|
||||
}
|
||||
}
|
||||
self.session.releaseOrigin(self.origin);
|
||||
|
||||
v8.v8__Global__Reset(&self.handle);
|
||||
env.isolate.notifyContextDisposed();
|
||||
@@ -241,8 +163,40 @@ pub fn deinit(self: *Context) void {
|
||||
v8.v8__MicrotaskQueue__DELETE(self.microtask_queue);
|
||||
}
|
||||
|
||||
pub fn setOrigin(self: *Context, key: ?[]const u8) !void {
|
||||
const env = self.env;
|
||||
const isolate = env.isolate;
|
||||
|
||||
const origin = try self.session.getOrCreateOrigin(key);
|
||||
errdefer self.session.releaseOrigin(origin);
|
||||
|
||||
try self.origin.transferTo(origin);
|
||||
self.origin.deinit(env.app);
|
||||
|
||||
self.origin = origin;
|
||||
|
||||
{
|
||||
var ls: js.Local.Scope = undefined;
|
||||
self.localScope(&ls);
|
||||
defer ls.deinit();
|
||||
|
||||
// Set the V8::Context SecurityToken, which is a big part of what allows
|
||||
// one context to access another.
|
||||
const token_local = v8.v8__Global__Get(&origin.security_token, isolate.handle);
|
||||
v8.v8__Context__SetSecurityToken(ls.local.handle, token_local);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn trackGlobal(self: *Context, global: v8.Global) !void {
|
||||
return self.origin.trackGlobal(global);
|
||||
}
|
||||
|
||||
pub fn trackTemp(self: *Context, global: v8.Global) !void {
|
||||
return self.origin.trackTemp(global);
|
||||
}
|
||||
|
||||
pub fn weakRef(self: *Context, obj: anytype) void {
|
||||
const fc = self.finalizer_callbacks.get(@intFromPtr(obj)) orelse {
|
||||
const fc = self.origin.finalizer_callbacks.get(@intFromPtr(obj)) orelse {
|
||||
if (comptime IS_DEBUG) {
|
||||
// should not be possible
|
||||
std.debug.assert(false);
|
||||
@@ -253,7 +207,7 @@ pub fn weakRef(self: *Context, obj: anytype) void {
|
||||
}
|
||||
|
||||
pub fn safeWeakRef(self: *Context, obj: anytype) void {
|
||||
const fc = self.finalizer_callbacks.get(@intFromPtr(obj)) orelse {
|
||||
const fc = self.origin.finalizer_callbacks.get(@intFromPtr(obj)) orelse {
|
||||
if (comptime IS_DEBUG) {
|
||||
// should not be possible
|
||||
std.debug.assert(false);
|
||||
@@ -265,7 +219,7 @@ pub fn safeWeakRef(self: *Context, obj: anytype) void {
|
||||
}
|
||||
|
||||
pub fn strongRef(self: *Context, obj: anytype) void {
|
||||
const fc = self.finalizer_callbacks.get(@intFromPtr(obj)) orelse {
|
||||
const fc = self.origin.finalizer_callbacks.get(@intFromPtr(obj)) orelse {
|
||||
if (comptime IS_DEBUG) {
|
||||
// should not be possible
|
||||
std.debug.assert(false);
|
||||
@@ -275,45 +229,6 @@ pub fn strongRef(self: *Context, obj: anytype) void {
|
||||
v8.v8__Global__ClearWeak(&fc.global);
|
||||
}
|
||||
|
||||
pub fn release(self: *Context, item: anytype) void {
|
||||
if (@TypeOf(item) == *anyopaque) {
|
||||
// Existing *anyopaque path for identity_map. Called internally from
|
||||
// finalizers
|
||||
var global = self.identity_map.fetchRemove(@intFromPtr(item)) orelse {
|
||||
if (comptime IS_DEBUG) {
|
||||
// should not be possible
|
||||
std.debug.assert(false);
|
||||
}
|
||||
return;
|
||||
};
|
||||
v8.v8__Global__Reset(&global.value);
|
||||
|
||||
// The item has been fianalized, remove it for the finalizer callback so that
|
||||
// we don't try to call it again on shutdown.
|
||||
const fc = self.finalizer_callbacks.fetchRemove(@intFromPtr(item)) orelse {
|
||||
if (comptime IS_DEBUG) {
|
||||
// should not be possible
|
||||
std.debug.assert(false);
|
||||
}
|
||||
return;
|
||||
};
|
||||
self.finalizer_callback_pool.destroy(fc.value);
|
||||
return;
|
||||
}
|
||||
|
||||
var map = switch (@TypeOf(item)) {
|
||||
js.Value.Temp => &self.global_values_temp,
|
||||
js.Promise.Temp => &self.global_promises_temp,
|
||||
js.Function.Temp => &self.global_functions_temp,
|
||||
else => |T| @compileError("Context.release cannot be called with a " ++ @typeName(T)),
|
||||
};
|
||||
|
||||
if (map.fetchRemove(item.handle.data_ptr)) |kv| {
|
||||
var global = kv.value;
|
||||
v8.v8__Global__Reset(&global);
|
||||
}
|
||||
}
|
||||
|
||||
// Any operation on the context have to be made from a local.
|
||||
pub fn localScope(self: *Context, ls: *js.Local.Scope) void {
|
||||
const isolate = self.isolate;
|
||||
@@ -336,28 +251,18 @@ pub fn toLocal(self: *Context, global: anytype) js.Local.ToLocalReturnType(@Type
|
||||
return l.toLocal(global);
|
||||
}
|
||||
|
||||
// This isn't expected to be called often. It's for converting attributes into
|
||||
// function calls, e.g. <body onload="doSomething"> will turn that "doSomething"
|
||||
// string into a js.Function which looks like: function(e) { doSomething(e) }
|
||||
// There might be more efficient ways to do this, but doing it this way means
|
||||
// our code only has to worry about js.Funtion, not some union of a js.Function
|
||||
// or a string.
|
||||
pub fn stringToPersistedFunction(self: *Context, str: []const u8) !js.Function.Global {
|
||||
pub fn stringToPersistedFunction(
|
||||
self: *Context,
|
||||
function_body: []const u8,
|
||||
comptime parameter_names: []const []const u8,
|
||||
extensions: []const v8.Object,
|
||||
) !js.Function.Global {
|
||||
var ls: js.Local.Scope = undefined;
|
||||
self.localScope(&ls);
|
||||
defer ls.deinit();
|
||||
|
||||
var extra: []const u8 = "";
|
||||
const normalized = std.mem.trim(u8, str, &std.ascii.whitespace);
|
||||
if (normalized.len > 0 and normalized[normalized.len - 1] != ')') {
|
||||
extra = "(e)";
|
||||
}
|
||||
const full = try std.fmt.allocPrintSentinel(self.call_arena, "(function(e) {{ {s}{s} }})", .{ normalized, extra }, 0);
|
||||
const js_val = try ls.local.compileAndRun(full, null);
|
||||
if (!js_val.isFunction()) {
|
||||
return error.StringFunctionError;
|
||||
}
|
||||
return try (js.Function{ .local = &ls.local, .handle = @ptrCast(js_val.handle) }).persist();
|
||||
const js_function = try ls.local.compileFunction(function_body, parameter_names, extensions);
|
||||
return js_function.persist();
|
||||
}
|
||||
|
||||
pub fn module(self: *Context, comptime want_result: bool, local: *const js.Local, src: []const u8, url: []const u8, cacheable: bool) !(if (want_result) ModuleEntry else void) {
|
||||
@@ -1039,34 +944,6 @@ pub fn queueMicrotaskFunc(self: *Context, cb: js.Function) void {
|
||||
v8.v8__MicrotaskQueue__EnqueueMicrotaskFunc(self.microtask_queue, self.isolate.handle, cb.handle);
|
||||
}
|
||||
|
||||
pub fn createFinalizerCallback(self: *Context, global: v8.Global, ptr: *anyopaque, finalizerFn: *const fn (ptr: *anyopaque, page: *Page) void) !*FinalizerCallback {
|
||||
const fc = try self.finalizer_callback_pool.create();
|
||||
fc.* = .{
|
||||
.ctx = self,
|
||||
.ptr = ptr,
|
||||
.global = global,
|
||||
.finalizerFn = finalizerFn,
|
||||
};
|
||||
return fc;
|
||||
}
|
||||
|
||||
// == Misc ==
|
||||
// A type that has a finalizer can have its finalizer called one of two ways.
|
||||
// The first is from V8 via the WeakCallback we give to weakRef. But that isn't
|
||||
// guaranteed to fire, so we track this in ctx._finalizers and call them on
|
||||
// context shutdown.
|
||||
pub const FinalizerCallback = struct {
|
||||
ctx: *Context,
|
||||
ptr: *anyopaque,
|
||||
global: v8.Global,
|
||||
finalizerFn: *const fn (ptr: *anyopaque, page: *Page) void,
|
||||
|
||||
pub fn deinit(self: *FinalizerCallback) void {
|
||||
self.finalizerFn(self.ptr, self.ctx.page);
|
||||
self.ctx.finalizer_callback_pool.destroy(self);
|
||||
}
|
||||
};
|
||||
|
||||
// == Profiler ==
|
||||
pub fn startCpuProfiler(self: *Context) void {
|
||||
if (comptime !IS_DEBUG) {
|
||||
|
||||
@@ -26,6 +26,7 @@ const App = @import("../../App.zig");
|
||||
const log = @import("../../log.zig");
|
||||
|
||||
const bridge = @import("bridge.zig");
|
||||
const Origin = @import("Origin.zig");
|
||||
const Context = @import("Context.zig");
|
||||
const Isolate = @import("Isolate.zig");
|
||||
const Platform = @import("Platform.zig");
|
||||
@@ -57,6 +58,8 @@ const Env = @This();
|
||||
|
||||
app: *App,
|
||||
|
||||
allocator: Allocator,
|
||||
|
||||
platform: *const Platform,
|
||||
|
||||
// the global isolate
|
||||
@@ -70,6 +73,11 @@ isolate_params: *v8.CreateParams,
|
||||
|
||||
context_id: usize,
|
||||
|
||||
// Maps origin -> shared Origin contains, for v8 values shared across
|
||||
// same-origin Contexts. There's a mismatch here between our JS model and our
|
||||
// Browser model. Origins only live as long as the root page of a session exists.
|
||||
// It would be wrong/dangerous to re-use an Origin across root page navigations.
|
||||
|
||||
// Global handles that need to be freed on deinit
|
||||
eternal_function_templates: []v8.Eternal,
|
||||
|
||||
@@ -206,6 +214,7 @@ pub fn init(app: *App, opts: InitOpts) !Env {
|
||||
return .{
|
||||
.app = app,
|
||||
.context_id = 0,
|
||||
.allocator = allocator,
|
||||
.contexts = undefined,
|
||||
.context_count = 0,
|
||||
.isolate = isolate,
|
||||
@@ -228,7 +237,9 @@ pub fn deinit(self: *Env) void {
|
||||
ctx.deinit();
|
||||
}
|
||||
|
||||
const allocator = self.app.allocator;
|
||||
const app = self.app;
|
||||
const allocator = app.allocator;
|
||||
|
||||
if (self.inspector) |i| {
|
||||
i.deinit(allocator);
|
||||
}
|
||||
@@ -272,6 +283,7 @@ pub fn createContext(self: *Env, page: *Page) !*Context {
|
||||
|
||||
// get the global object for the context, this maps to our Window
|
||||
const global_obj = v8.v8__Context__Global(v8_context).?;
|
||||
|
||||
{
|
||||
// Store our TAO inside the internal field of the global object. This
|
||||
// maps the v8::Object -> Zig instance. Almost all objects have this, and
|
||||
@@ -287,6 +299,7 @@ pub fn createContext(self: *Env, page: *Page) !*Context {
|
||||
};
|
||||
v8.v8__Object__SetAlignedPointerInInternalField(global_obj, 0, tao);
|
||||
}
|
||||
|
||||
// our window wrapped in a v8::Global
|
||||
var global_global: v8.Global = undefined;
|
||||
v8.v8__Global__New(isolate.handle, global_obj, &global_global);
|
||||
@@ -294,10 +307,15 @@ pub fn createContext(self: *Env, page: *Page) !*Context {
|
||||
const context_id = self.context_id;
|
||||
self.context_id = context_id + 1;
|
||||
|
||||
const origin = try page._session.getOrCreateOrigin(null);
|
||||
errdefer page._session.releaseOrigin(origin);
|
||||
|
||||
const context = try context_arena.create(Context);
|
||||
context.* = .{
|
||||
.env = self,
|
||||
.page = page,
|
||||
.session = page._session,
|
||||
.origin = origin,
|
||||
.id = context_id,
|
||||
.isolate = isolate,
|
||||
.arena = context_arena,
|
||||
@@ -307,9 +325,8 @@ pub fn createContext(self: *Env, page: *Page) !*Context {
|
||||
.microtask_queue = microtask_queue,
|
||||
.script_manager = &page._script_manager,
|
||||
.scheduler = .init(context_arena),
|
||||
.finalizer_callback_pool = std.heap.MemoryPool(Context.FinalizerCallback).init(self.app.allocator),
|
||||
};
|
||||
try context.identity_map.putNoClobber(context_arena, @intFromPtr(page.window), global_global);
|
||||
try context.origin.identity_map.putNoClobber(context_arena, @intFromPtr(page.window), global_global);
|
||||
|
||||
// Store a pointer to our context inside the v8 context so that, given
|
||||
// a v8 context, we can get our context out
|
||||
|
||||
@@ -209,11 +209,11 @@ fn _persist(self: *const Function, comptime is_global: bool) !(if (is_global) Gl
|
||||
var global: v8.Global = undefined;
|
||||
v8.v8__Global__New(ctx.isolate.handle, self.handle, &global);
|
||||
if (comptime is_global) {
|
||||
try ctx.global_functions.append(ctx.arena, global);
|
||||
} else {
|
||||
try ctx.global_functions_temp.put(ctx.arena, global.data_ptr, global);
|
||||
try ctx.trackGlobal(global);
|
||||
return .{ .handle = global, .origin = {} };
|
||||
}
|
||||
return .{ .handle = global };
|
||||
try ctx.trackTemp(global);
|
||||
return .{ .handle = global, .origin = ctx.origin };
|
||||
}
|
||||
|
||||
pub fn tempWithThis(self: *const Function, value: anytype) !Temp {
|
||||
@@ -226,15 +226,18 @@ pub fn persistWithThis(self: *const Function, value: anytype) !Global {
|
||||
return with_this.persist();
|
||||
}
|
||||
|
||||
pub const Temp = G(0);
|
||||
pub const Global = G(1);
|
||||
pub const Temp = G(.temp);
|
||||
pub const Global = G(.global);
|
||||
|
||||
fn G(comptime discriminator: u8) type {
|
||||
const GlobalType = enum(u8) {
|
||||
temp,
|
||||
global,
|
||||
};
|
||||
|
||||
fn G(comptime global_type: GlobalType) type {
|
||||
return struct {
|
||||
handle: v8.Global,
|
||||
|
||||
// makes the types different (G(0) != G(1)), without taking up space
|
||||
comptime _: u8 = discriminator,
|
||||
origin: if (global_type == .temp) *js.Origin else void,
|
||||
|
||||
const Self = @This();
|
||||
|
||||
@@ -252,5 +255,9 @@ fn G(comptime discriminator: u8) type {
|
||||
pub fn isEqual(self: *const Self, other: Function) bool {
|
||||
return v8.v8__Global__IsEqual(&self.handle, other.handle);
|
||||
}
|
||||
|
||||
pub fn release(self: *const Self) void {
|
||||
self.origin.releaseTemp(self.handle);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@@ -130,6 +130,12 @@ pub fn contextCreated(
|
||||
|
||||
pub fn contextDestroyed(self: *Inspector, context: *const v8.Context) void {
|
||||
v8.v8_inspector__Inspector__ContextDestroyed(self.handle, context);
|
||||
|
||||
if (self.default_context) |*dc| {
|
||||
if (v8.v8__Global__IsEqual(dc, context)) {
|
||||
self.default_context = null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn resetContextGroup(self: *const Inspector) void {
|
||||
|
||||
@@ -18,6 +18,7 @@
|
||||
|
||||
const std = @import("std");
|
||||
const Page = @import("../Page.zig");
|
||||
const Session = @import("../Session.zig");
|
||||
const log = @import("../../log.zig");
|
||||
const string = @import("../../string.zig");
|
||||
|
||||
@@ -115,6 +116,49 @@ pub fn exec(self: *const Local, src: []const u8, name: ?[]const u8) !js.Value {
|
||||
return self.compileAndRun(src, name);
|
||||
}
|
||||
|
||||
/// Compiles a function body as function.
|
||||
///
|
||||
/// https://v8.github.io/api/head/classv8_1_1ScriptCompiler.html#a3a15bb5a7dfc3f998e6ac789e6b4646a
|
||||
pub fn compileFunction(
|
||||
self: *const Local,
|
||||
function_body: []const u8,
|
||||
/// We tend to know how many params we'll pass; can remove the comptime if necessary.
|
||||
comptime parameter_names: []const []const u8,
|
||||
extensions: []const v8.Object,
|
||||
) !js.Function {
|
||||
// TODO: Make configurable.
|
||||
const script_name = self.isolate.initStringHandle("anonymous");
|
||||
const script_source = self.isolate.initStringHandle(function_body);
|
||||
|
||||
var parameter_list: [parameter_names.len]*const v8.String = undefined;
|
||||
inline for (0..parameter_names.len) |i| {
|
||||
parameter_list[i] = self.isolate.initStringHandle(parameter_names[i]);
|
||||
}
|
||||
|
||||
// Create `ScriptOrigin`.
|
||||
var origin: v8.ScriptOrigin = undefined;
|
||||
v8.v8__ScriptOrigin__CONSTRUCT(&origin, script_name);
|
||||
|
||||
// Create `ScriptCompilerSource`.
|
||||
var script_compiler_source: v8.ScriptCompilerSource = undefined;
|
||||
v8.v8__ScriptCompiler__Source__CONSTRUCT2(script_source, &origin, null, &script_compiler_source);
|
||||
defer v8.v8__ScriptCompiler__Source__DESTRUCT(&script_compiler_source);
|
||||
|
||||
// Compile the function.
|
||||
const result = v8.v8__ScriptCompiler__CompileFunction(
|
||||
self.handle,
|
||||
&script_compiler_source,
|
||||
parameter_list.len,
|
||||
¶meter_list,
|
||||
extensions.len,
|
||||
@ptrCast(&extensions),
|
||||
v8.kNoCompileOptions,
|
||||
v8.kNoCacheNoReason,
|
||||
) orelse return error.CompilationError;
|
||||
|
||||
return .{ .local = self, .handle = result };
|
||||
}
|
||||
|
||||
pub fn compileAndRun(self: *const Local, src: []const u8, name: ?[]const u8) !js.Value {
|
||||
const script_name = self.isolate.initStringHandle(name orelse "anonymous");
|
||||
const script_source = self.isolate.initStringHandle(src);
|
||||
@@ -171,7 +215,7 @@ pub fn mapZigInstanceToJs(self: *const Local, js_obj_handle: ?*const v8.Object,
|
||||
.pointer => |ptr| {
|
||||
const resolved = resolveValue(value);
|
||||
|
||||
const gop = try ctx.identity_map.getOrPut(arena, @intFromPtr(resolved.ptr));
|
||||
const gop = try ctx.origin.identity_map.getOrPut(arena, @intFromPtr(resolved.ptr));
|
||||
if (gop.found_existing) {
|
||||
// we've seen this instance before, return the same object
|
||||
return (js.Object.Global{ .handle = gop.value_ptr.* }).local(self);
|
||||
@@ -225,16 +269,17 @@ pub fn mapZigInstanceToJs(self: *const Local, js_obj_handle: ?*const v8.Object,
|
||||
// can't figure out how to make that work, since it depends on
|
||||
// the [runtime] `value`.
|
||||
// We need the resolved finalizer, which we have in resolved.
|
||||
//
|
||||
// The above if statement would be more clear as:
|
||||
// if (resolved.finalizer_from_v8) |finalizer| {
|
||||
// But that's a runtime check.
|
||||
// Instead, we check if the base has finalizer. The assumption
|
||||
// here is that if a resolve type has a finalizer, then the base
|
||||
// should have a finalizer too.
|
||||
const fc = try ctx.createFinalizerCallback(gop.value_ptr.*, resolved.ptr, resolved.finalizer_from_zig.?);
|
||||
const fc = try ctx.origin.createFinalizerCallback(ctx.session, gop.value_ptr.*, resolved.ptr, resolved.finalizer_from_zig.?);
|
||||
{
|
||||
errdefer fc.deinit();
|
||||
try ctx.finalizer_callbacks.put(ctx.arena, @intFromPtr(resolved.ptr), fc);
|
||||
try ctx.origin.finalizer_callbacks.put(ctx.origin.arena, @intFromPtr(resolved.ptr), fc);
|
||||
}
|
||||
|
||||
conditionallyReference(value);
|
||||
@@ -1083,7 +1128,7 @@ const Resolved = struct {
|
||||
class_id: u16,
|
||||
prototype_chain: []const @import("TaggedOpaque.zig").PrototypeChainEntry,
|
||||
finalizer_from_v8: ?*const fn (handle: ?*const v8.WeakCallbackInfo) callconv(.c) void = null,
|
||||
finalizer_from_zig: ?*const fn (ptr: *anyopaque, page: *Page) void = null,
|
||||
finalizer_from_zig: ?*const fn (ptr: *anyopaque, session: *Session) void = null,
|
||||
};
|
||||
pub fn resolveValue(value: anytype) Resolved {
|
||||
const T = bridge.Struct(@TypeOf(value));
|
||||
|
||||
@@ -97,7 +97,7 @@ pub fn persist(self: Object) !Global {
|
||||
var global: v8.Global = undefined;
|
||||
v8.v8__Global__New(ctx.isolate.handle, self.handle, &global);
|
||||
|
||||
try ctx.global_objects.append(ctx.arena, global);
|
||||
try ctx.trackGlobal(global);
|
||||
|
||||
return .{ .handle = global };
|
||||
}
|
||||
|
||||
240
src/browser/js/Origin.zig
Normal file
240
src/browser/js/Origin.zig
Normal file
@@ -0,0 +1,240 @@
|
||||
// Copyright (C) 2023-2025 Lightpanda (Selecy SAS)
|
||||
//
|
||||
// Francis Bouvier <francis@lightpanda.io>
|
||||
// Pierre Tachoire <pierre@lightpanda.io>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU Affero General Public License as
|
||||
// published by the Free Software Foundation, either version 3 of the
|
||||
// License, or (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU Affero General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU Affero General Public License
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
// Origin represents the shared Zig<->JS bridge state for all contexts within
|
||||
// the same origin. Multiple contexts (frames) from the same origin share a
|
||||
// single Origin, ensuring that JS objects maintain their identity across frames.
|
||||
|
||||
const std = @import("std");
|
||||
const js = @import("js.zig");
|
||||
|
||||
const App = @import("../../App.zig");
|
||||
const Session = @import("../Session.zig");
|
||||
|
||||
const v8 = js.v8;
|
||||
const Allocator = std.mem.Allocator;
|
||||
const IS_DEBUG = @import("builtin").mode == .Debug;
|
||||
|
||||
const Origin = @This();
|
||||
|
||||
rc: usize = 1,
|
||||
arena: Allocator,
|
||||
|
||||
// The key, e.g. lightpanda.io:443
|
||||
key: []const u8,
|
||||
|
||||
// Security token - all contexts in this realm must use the same v8::Value instance
|
||||
// as their security token for V8 to allow cross-context access
|
||||
security_token: v8.Global,
|
||||
|
||||
// Serves two purposes. Like `global_objects`, this is used to free
|
||||
// every Global(Object) we've created during the lifetime of the realm.
|
||||
// More importantly, it serves as an identity map - for a given Zig
|
||||
// instance, we map it to the same Global(Object).
|
||||
// The key is the @intFromPtr of the Zig value
|
||||
identity_map: std.AutoHashMapUnmanaged(usize, v8.Global) = .empty,
|
||||
|
||||
// Some web APIs have to manage opaque values. Ideally, they use an
|
||||
// js.Object, but the js.Object has no lifetime guarantee beyond the
|
||||
// current call. They can call .persist() on their js.Object to get
|
||||
// a `Global(Object)`. We need to track these to free them.
|
||||
// This used to be a map and acted like identity_map; the key was
|
||||
// the @intFromPtr(js_obj.handle). But v8 can re-use address. Without
|
||||
// a reliable way to know if an object has already been persisted,
|
||||
// we now simply persist every time persist() is called.
|
||||
globals: std.ArrayList(v8.Global) = .empty,
|
||||
|
||||
// Temp variants stored in HashMaps for O(1) early cleanup.
|
||||
// Key is global.data_ptr.
|
||||
temps: std.AutoHashMapUnmanaged(usize, v8.Global) = .empty,
|
||||
|
||||
// Any type that is stored in the identity_map which has a finalizer declared
|
||||
// will have its finalizer stored here. This is only used when shutting down
|
||||
// if v8 hasn't called the finalizer directly itself.
|
||||
finalizer_callbacks: std.AutoHashMapUnmanaged(usize, *FinalizerCallback) = .empty,
|
||||
|
||||
pub fn init(app: *App, isolate: js.Isolate, key: []const u8) !*Origin {
|
||||
const arena = try app.arena_pool.acquire();
|
||||
errdefer app.arena_pool.release(arena);
|
||||
|
||||
var hs: js.HandleScope = undefined;
|
||||
hs.init(isolate);
|
||||
defer hs.deinit();
|
||||
|
||||
const owned_key = try arena.dupe(u8, key);
|
||||
const token_local = isolate.initStringHandle(owned_key);
|
||||
var token_global: v8.Global = undefined;
|
||||
v8.v8__Global__New(isolate.handle, token_local, &token_global);
|
||||
|
||||
const self = try arena.create(Origin);
|
||||
self.* = .{
|
||||
.rc = 1,
|
||||
.arena = arena,
|
||||
.key = owned_key,
|
||||
.globals = .empty,
|
||||
.temps = .empty,
|
||||
.security_token = token_global,
|
||||
};
|
||||
return self;
|
||||
}
|
||||
|
||||
pub fn deinit(self: *Origin, app: *App) void {
|
||||
// Call finalizers before releasing anything
|
||||
{
|
||||
var it = self.finalizer_callbacks.valueIterator();
|
||||
while (it.next()) |finalizer| {
|
||||
finalizer.*.deinit();
|
||||
}
|
||||
}
|
||||
|
||||
v8.v8__Global__Reset(&self.security_token);
|
||||
|
||||
{
|
||||
var it = self.identity_map.valueIterator();
|
||||
while (it.next()) |global| {
|
||||
v8.v8__Global__Reset(global);
|
||||
}
|
||||
}
|
||||
|
||||
for (self.globals.items) |*global| {
|
||||
v8.v8__Global__Reset(global);
|
||||
}
|
||||
|
||||
{
|
||||
var it = self.temps.valueIterator();
|
||||
while (it.next()) |global| {
|
||||
v8.v8__Global__Reset(global);
|
||||
}
|
||||
}
|
||||
|
||||
app.arena_pool.release(self.arena);
|
||||
}
|
||||
|
||||
pub fn trackGlobal(self: *Origin, global: v8.Global) !void {
|
||||
return self.globals.append(self.arena, global);
|
||||
}
|
||||
|
||||
pub fn trackTemp(self: *Origin, global: v8.Global) !void {
|
||||
return self.temps.put(self.arena, global.data_ptr, global);
|
||||
}
|
||||
|
||||
pub fn releaseTemp(self: *Origin, global: v8.Global) void {
|
||||
if (self.temps.fetchRemove(global.data_ptr)) |kv| {
|
||||
var g = kv.value;
|
||||
v8.v8__Global__Reset(&g);
|
||||
}
|
||||
}
|
||||
|
||||
/// Release an item from the identity_map (called after finalizer runs from V8)
|
||||
pub fn release(self: *Origin, item: *anyopaque) void {
|
||||
var global = self.identity_map.fetchRemove(@intFromPtr(item)) orelse {
|
||||
if (comptime IS_DEBUG) {
|
||||
std.debug.assert(false);
|
||||
}
|
||||
return;
|
||||
};
|
||||
v8.v8__Global__Reset(&global.value);
|
||||
|
||||
// The item has been finalized, remove it from the finalizer callback so that
|
||||
// we don't try to call it again on shutdown.
|
||||
const kv = self.finalizer_callbacks.fetchRemove(@intFromPtr(item)) orelse {
|
||||
if (comptime IS_DEBUG) {
|
||||
std.debug.assert(false);
|
||||
}
|
||||
return;
|
||||
};
|
||||
const fc = kv.value;
|
||||
fc.session.releaseArena(fc.arena);
|
||||
}
|
||||
|
||||
pub fn createFinalizerCallback(
|
||||
self: *Origin,
|
||||
session: *Session,
|
||||
global: v8.Global,
|
||||
ptr: *anyopaque,
|
||||
zig_finalizer: *const fn (ptr: *anyopaque, session: *Session) void,
|
||||
) !*FinalizerCallback {
|
||||
const arena = try session.getArena(.{ .debug = "FinalizerCallback" });
|
||||
errdefer session.releaseArena(arena);
|
||||
const fc = try arena.create(FinalizerCallback);
|
||||
fc.* = .{
|
||||
.arena = arena,
|
||||
.origin = self,
|
||||
.session = session,
|
||||
.ptr = ptr,
|
||||
.global = global,
|
||||
.zig_finalizer = zig_finalizer,
|
||||
};
|
||||
return fc;
|
||||
}
|
||||
|
||||
pub fn transferTo(self: *Origin, dest: *Origin) !void {
|
||||
const arena = dest.arena;
|
||||
|
||||
try dest.globals.ensureUnusedCapacity(arena, self.globals.items.len);
|
||||
for (self.globals.items) |obj| {
|
||||
dest.globals.appendAssumeCapacity(obj);
|
||||
}
|
||||
self.globals.clearRetainingCapacity();
|
||||
|
||||
{
|
||||
try dest.temps.ensureUnusedCapacity(arena, self.temps.count());
|
||||
var it = self.temps.iterator();
|
||||
while (it.next()) |kv| {
|
||||
try dest.temps.put(arena, kv.key_ptr.*, kv.value_ptr.*);
|
||||
}
|
||||
self.temps.clearRetainingCapacity();
|
||||
}
|
||||
|
||||
{
|
||||
try dest.finalizer_callbacks.ensureUnusedCapacity(arena, self.finalizer_callbacks.count());
|
||||
var it = self.finalizer_callbacks.iterator();
|
||||
while (it.next()) |kv| {
|
||||
kv.value_ptr.*.origin = dest;
|
||||
try dest.finalizer_callbacks.put(arena, kv.key_ptr.*, kv.value_ptr.*);
|
||||
}
|
||||
self.finalizer_callbacks.clearRetainingCapacity();
|
||||
}
|
||||
|
||||
{
|
||||
try dest.identity_map.ensureUnusedCapacity(arena, self.identity_map.count());
|
||||
var it = self.identity_map.iterator();
|
||||
while (it.next()) |kv| {
|
||||
try dest.identity_map.put(arena, kv.key_ptr.*, kv.value_ptr.*);
|
||||
}
|
||||
self.identity_map.clearRetainingCapacity();
|
||||
}
|
||||
}
|
||||
|
||||
// A type that has a finalizer can have its finalizer called one of two ways.
|
||||
// The first is from V8 via the WeakCallback we give to weakRef. But that isn't
|
||||
// guaranteed to fire, so we track this in finalizer_callbacks and call them on
|
||||
// origin shutdown.
|
||||
pub const FinalizerCallback = struct {
|
||||
arena: Allocator,
|
||||
origin: *Origin,
|
||||
session: *Session,
|
||||
ptr: *anyopaque,
|
||||
global: v8.Global,
|
||||
zig_finalizer: *const fn (ptr: *anyopaque, session: *Session) void,
|
||||
|
||||
pub fn deinit(self: *FinalizerCallback) void {
|
||||
self.zig_finalizer(self.ptr, self.session);
|
||||
self.session.releaseArena(self.arena);
|
||||
}
|
||||
};
|
||||
@@ -62,22 +62,25 @@ fn _persist(self: *const Promise, comptime is_global: bool) !(if (is_global) Glo
|
||||
var global: v8.Global = undefined;
|
||||
v8.v8__Global__New(ctx.isolate.handle, self.handle, &global);
|
||||
if (comptime is_global) {
|
||||
try ctx.global_promises.append(ctx.arena, global);
|
||||
} else {
|
||||
try ctx.global_promises_temp.put(ctx.arena, global.data_ptr, global);
|
||||
try ctx.trackGlobal(global);
|
||||
return .{ .handle = global, .origin = {} };
|
||||
}
|
||||
return .{ .handle = global };
|
||||
try ctx.trackTemp(global);
|
||||
return .{ .handle = global, .origin = ctx.origin };
|
||||
}
|
||||
|
||||
pub const Temp = G(0);
|
||||
pub const Global = G(1);
|
||||
pub const Temp = G(.temp);
|
||||
pub const Global = G(.global);
|
||||
|
||||
fn G(comptime discriminator: u8) type {
|
||||
const GlobalType = enum(u8) {
|
||||
temp,
|
||||
global,
|
||||
};
|
||||
|
||||
fn G(comptime global_type: GlobalType) type {
|
||||
return struct {
|
||||
handle: v8.Global,
|
||||
|
||||
// makes the types different (G(0) != G(1)), without taking up space
|
||||
comptime _: u8 = discriminator,
|
||||
origin: if (global_type == .temp) *js.Origin else void,
|
||||
|
||||
const Self = @This();
|
||||
|
||||
@@ -91,5 +94,9 @@ fn G(comptime discriminator: u8) type {
|
||||
.handle = @ptrCast(v8.v8__Global__Get(&self.handle, l.isolate.handle)),
|
||||
};
|
||||
}
|
||||
|
||||
pub fn release(self: *const Self) void {
|
||||
self.origin.releaseTemp(self.handle);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@@ -79,7 +79,7 @@ pub fn persist(self: PromiseResolver) !Global {
|
||||
var ctx = self.local.ctx;
|
||||
var global: v8.Global = undefined;
|
||||
v8.v8__Global__New(ctx.isolate.handle, self.handle, &global);
|
||||
try ctx.global_promise_resolvers.append(ctx.arena, global);
|
||||
try ctx.trackGlobal(global);
|
||||
return .{ .handle = global };
|
||||
}
|
||||
|
||||
|
||||
@@ -259,11 +259,11 @@ fn _persist(self: *const Value, comptime is_global: bool) !(if (is_global) Globa
|
||||
var global: v8.Global = undefined;
|
||||
v8.v8__Global__New(ctx.isolate.handle, self.handle, &global);
|
||||
if (comptime is_global) {
|
||||
try ctx.global_values.append(ctx.arena, global);
|
||||
} else {
|
||||
try ctx.global_values_temp.put(ctx.arena, global.data_ptr, global);
|
||||
try ctx.trackGlobal(global);
|
||||
return .{ .handle = global, .origin = {} };
|
||||
}
|
||||
return .{ .handle = global };
|
||||
try ctx.trackTemp(global);
|
||||
return .{ .handle = global, .origin = ctx.origin };
|
||||
}
|
||||
|
||||
pub fn toZig(self: Value, comptime T: type) !T {
|
||||
@@ -310,15 +310,18 @@ pub fn format(self: Value, writer: *std.Io.Writer) !void {
|
||||
return js_str.format(writer);
|
||||
}
|
||||
|
||||
pub const Temp = G(0);
|
||||
pub const Global = G(1);
|
||||
pub const Temp = G(.temp);
|
||||
pub const Global = G(.global);
|
||||
|
||||
fn G(comptime discriminator: u8) type {
|
||||
const GlobalType = enum(u8) {
|
||||
temp,
|
||||
global,
|
||||
};
|
||||
|
||||
fn G(comptime global_type: GlobalType) type {
|
||||
return struct {
|
||||
handle: v8.Global,
|
||||
|
||||
// makes the types different (G(0) != G(1)), without taking up space
|
||||
comptime _: u8 = discriminator,
|
||||
origin: if (global_type == .temp) *js.Origin else void,
|
||||
|
||||
const Self = @This();
|
||||
|
||||
@@ -336,5 +339,9 @@ fn G(comptime discriminator: u8) type {
|
||||
pub fn isEqual(self: *const Self, other: Value) bool {
|
||||
return v8.v8__Global__IsEqual(&self.handle, other.handle);
|
||||
}
|
||||
|
||||
pub fn release(self: *const Self) void {
|
||||
self.origin.releaseTemp(self.handle);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@@ -21,11 +21,13 @@ const js = @import("js.zig");
|
||||
const lp = @import("lightpanda");
|
||||
const log = @import("../../log.zig");
|
||||
const Page = @import("../Page.zig");
|
||||
const Session = @import("../Session.zig");
|
||||
|
||||
const v8 = js.v8;
|
||||
|
||||
const Caller = @import("Caller.zig");
|
||||
const Context = @import("Context.zig");
|
||||
const Origin = @import("Origin.zig");
|
||||
|
||||
const IS_DEBUG = @import("builtin").mode == .Debug;
|
||||
|
||||
@@ -104,24 +106,24 @@ pub fn Builder(comptime T: type) type {
|
||||
return entries;
|
||||
}
|
||||
|
||||
pub fn finalizer(comptime func: *const fn (self: *T, shutdown: bool, page: *Page) void) Finalizer {
|
||||
pub fn finalizer(comptime func: *const fn (self: *T, shutdown: bool, session: *Session) void) Finalizer {
|
||||
return .{
|
||||
.from_zig = struct {
|
||||
fn wrap(ptr: *anyopaque, page: *Page) void {
|
||||
func(@ptrCast(@alignCast(ptr)), true, page);
|
||||
fn wrap(ptr: *anyopaque, session: *Session) void {
|
||||
func(@ptrCast(@alignCast(ptr)), true, session);
|
||||
}
|
||||
}.wrap,
|
||||
|
||||
.from_v8 = struct {
|
||||
fn wrap(handle: ?*const v8.WeakCallbackInfo) callconv(.c) void {
|
||||
const ptr = v8.v8__WeakCallbackInfo__GetParameter(handle.?).?;
|
||||
const fc: *Context.FinalizerCallback = @ptrCast(@alignCast(ptr));
|
||||
const fc: *Origin.FinalizerCallback = @ptrCast(@alignCast(ptr));
|
||||
|
||||
const ctx = fc.ctx;
|
||||
const origin = fc.origin;
|
||||
const value_ptr = fc.ptr;
|
||||
if (ctx.finalizer_callbacks.contains(@intFromPtr(value_ptr))) {
|
||||
func(@ptrCast(@alignCast(value_ptr)), false, ctx.page);
|
||||
ctx.release(value_ptr);
|
||||
if (origin.finalizer_callbacks.contains(@intFromPtr(value_ptr))) {
|
||||
func(@ptrCast(@alignCast(value_ptr)), false, fc.session);
|
||||
origin.release(value_ptr);
|
||||
} else {
|
||||
// A bit weird, but v8 _requires_ that we release it
|
||||
// If we don't. We'll 100% crash.
|
||||
@@ -413,12 +415,12 @@ pub const Property = struct {
|
||||
};
|
||||
|
||||
const Finalizer = struct {
|
||||
// The finalizer wrapper when called fro Zig. This is only called on
|
||||
// Context.deinit
|
||||
from_zig: *const fn (ctx: *anyopaque, page: *Page) void,
|
||||
// The finalizer wrapper when called from Zig. This is only called on
|
||||
// Origin.deinit
|
||||
from_zig: *const fn (ctx: *anyopaque, session: *Session) void,
|
||||
|
||||
// The finalizer wrapper when called from V8. This may never be called
|
||||
// (hence why we fallback to calling in Context.denit). If it is called,
|
||||
// (hence why we fallback to calling in Origin.deinit). If it is called,
|
||||
// it is only ever called after we SetWeak on the Global.
|
||||
from_v8: *const fn (?*const v8.WeakCallbackInfo) callconv(.c) void,
|
||||
};
|
||||
|
||||
@@ -24,6 +24,7 @@ const string = @import("../../string.zig");
|
||||
pub const Env = @import("Env.zig");
|
||||
pub const bridge = @import("bridge.zig");
|
||||
pub const Caller = @import("Caller.zig");
|
||||
pub const Origin = @import("Origin.zig");
|
||||
pub const Context = @import("Context.zig");
|
||||
pub const Local = @import("Local.zig");
|
||||
pub const Inspector = @import("Inspector.zig");
|
||||
@@ -161,7 +162,7 @@ pub fn ArrayBufferRef(comptime kind: ArrayType) type {
|
||||
var ctx = self.local.ctx;
|
||||
var global: v8.Global = undefined;
|
||||
v8.v8__Global__New(ctx.isolate.handle, self.handle, &global);
|
||||
try ctx.global_values.append(ctx.arena, global);
|
||||
try ctx.trackGlobal(global);
|
||||
|
||||
return .{ .handle = global };
|
||||
}
|
||||
|
||||
@@ -293,6 +293,28 @@
|
||||
div.style.top = '0';
|
||||
testing.expectEqual('0px', div.style.top);
|
||||
|
||||
// Scroll properties
|
||||
div.style.scrollMarginTop = '0';
|
||||
testing.expectEqual('0px', div.style.scrollMarginTop);
|
||||
|
||||
div.style.scrollPaddingBottom = '0';
|
||||
testing.expectEqual('0px', div.style.scrollPaddingBottom);
|
||||
|
||||
// Multi-column
|
||||
div.style.columnWidth = '0';
|
||||
testing.expectEqual('0px', div.style.columnWidth);
|
||||
|
||||
div.style.columnRuleWidth = '0';
|
||||
testing.expectEqual('0px', div.style.columnRuleWidth);
|
||||
|
||||
// Outline shorthand
|
||||
div.style.outline = '0';
|
||||
testing.expectEqual('0px', div.style.outline);
|
||||
|
||||
// Shapes
|
||||
div.style.shapeMargin = '0';
|
||||
testing.expectEqual('0px', div.style.shapeMargin);
|
||||
|
||||
// Non-length properties should not be affected
|
||||
div.style.opacity = '0';
|
||||
testing.expectEqual('0', div.style.opacity);
|
||||
@@ -313,6 +335,12 @@
|
||||
div.style.alignContent = 'first baseline';
|
||||
testing.expectEqual('baseline', div.style.alignContent);
|
||||
|
||||
div.style.alignSelf = 'first baseline';
|
||||
testing.expectEqual('baseline', div.style.alignSelf);
|
||||
|
||||
div.style.justifySelf = 'first baseline';
|
||||
testing.expectEqual('baseline', div.style.justifySelf);
|
||||
|
||||
// "last baseline" should remain unchanged
|
||||
div.style.alignItems = 'last baseline';
|
||||
testing.expectEqual('last baseline', div.style.alignItems);
|
||||
@@ -339,6 +367,16 @@
|
||||
|
||||
div.style.gap = '10px 20px';
|
||||
testing.expectEqual('10px 20px', div.style.gap);
|
||||
|
||||
// New shorthands
|
||||
div.style.overflow = 'hidden hidden';
|
||||
testing.expectEqual('hidden', div.style.overflow);
|
||||
|
||||
div.style.scrollSnapAlign = 'start start';
|
||||
testing.expectEqual('start', div.style.scrollSnapAlign);
|
||||
|
||||
div.style.overscrollBehavior = 'auto auto';
|
||||
testing.expectEqual('auto', div.style.overscrollBehavior);
|
||||
}
|
||||
</script>
|
||||
|
||||
|
||||
@@ -23,6 +23,22 @@
|
||||
}
|
||||
</script>
|
||||
|
||||
<script id="action">
|
||||
{
|
||||
const form = document.createElement('form')
|
||||
testing.expectEqual(testing.BASE_URL + 'element/html/form.html', form.action)
|
||||
|
||||
form.action = 'hello';
|
||||
testing.expectEqual(testing.BASE_URL + 'element/html/hello', form.action)
|
||||
|
||||
form.action = '/hello';
|
||||
testing.expectEqual(testing.ORIGIN + 'hello', form.action)
|
||||
|
||||
form.action = 'https://lightpanda.io/hello';
|
||||
testing.expectEqual('https://lightpanda.io/hello', form.action)
|
||||
}
|
||||
</script>
|
||||
|
||||
<!-- Test fixtures for form.method -->
|
||||
<form id="form_get" method="get"></form>
|
||||
<form id="form_post" method="post"></form>
|
||||
|
||||
@@ -64,11 +64,12 @@
|
||||
// child frame's top.parent is itself (root has no parent)
|
||||
testing.expectEqual(window, window[0].top.parent);
|
||||
|
||||
// Todo: Context security tokens
|
||||
// testing.expectEqual(true, window.sub1_loaded);
|
||||
// testing.expectEqual(true, window.sub2_loaded);
|
||||
// testing.expectEqual(1, window.sub1_count);
|
||||
// testing.expectEqual(2, window.sub2_count);
|
||||
// Cross-frame property access
|
||||
testing.expectEqual(true, window.sub1_loaded);
|
||||
testing.expectEqual(true, window.sub2_loaded);
|
||||
testing.expectEqual(1, window.sub1_count);
|
||||
// depends on how far the initial load got before it was cancelled.
|
||||
testing.expectEqual(true, window.sub2_count == 1 || window.sub2_count == 2);
|
||||
});
|
||||
</script>
|
||||
|
||||
|
||||
315
src/browser/tests/range_mutations.html
Normal file
315
src/browser/tests/range_mutations.html
Normal file
@@ -0,0 +1,315 @@
|
||||
<!DOCTYPE html>
|
||||
<script src="testing.js"></script>
|
||||
|
||||
<script id=insertData_adjusts_range_offsets>
|
||||
{
|
||||
const text = document.createTextNode('abcdef');
|
||||
const div = document.createElement('div');
|
||||
div.appendChild(text);
|
||||
|
||||
const range = document.createRange();
|
||||
range.setStart(text, 2);
|
||||
range.setEnd(text, 5);
|
||||
// range covers "cde"
|
||||
|
||||
// Insert "XX" at offset 1 (before range start)
|
||||
text.insertData(1, 'XX');
|
||||
// "aXXbcdef" — range should shift right by 2
|
||||
testing.expectEqual(4, range.startOffset);
|
||||
testing.expectEqual(7, range.endOffset);
|
||||
testing.expectEqual(text, range.startContainer);
|
||||
}
|
||||
</script>
|
||||
|
||||
<script id=insertData_at_range_start>
|
||||
{
|
||||
const text = document.createTextNode('abcdef');
|
||||
const div = document.createElement('div');
|
||||
div.appendChild(text);
|
||||
|
||||
const range = document.createRange();
|
||||
range.setStart(text, 2);
|
||||
range.setEnd(text, 5);
|
||||
|
||||
// Insert at exactly the start offset — should not shift start
|
||||
text.insertData(2, 'YY');
|
||||
// "abYYcdef" — start stays at 2, end shifts by 2
|
||||
testing.expectEqual(2, range.startOffset);
|
||||
testing.expectEqual(7, range.endOffset);
|
||||
}
|
||||
</script>
|
||||
|
||||
<script id=insertData_inside_range>
|
||||
{
|
||||
const text = document.createTextNode('abcdef');
|
||||
const div = document.createElement('div');
|
||||
div.appendChild(text);
|
||||
|
||||
const range = document.createRange();
|
||||
range.setStart(text, 2);
|
||||
range.setEnd(text, 5);
|
||||
|
||||
// Insert inside the range
|
||||
text.insertData(3, 'Z');
|
||||
// "abcZdef" — start unchanged, end shifts by 1
|
||||
testing.expectEqual(2, range.startOffset);
|
||||
testing.expectEqual(6, range.endOffset);
|
||||
}
|
||||
</script>
|
||||
|
||||
<script id=insertData_after_range>
|
||||
{
|
||||
const text = document.createTextNode('abcdef');
|
||||
const div = document.createElement('div');
|
||||
div.appendChild(text);
|
||||
|
||||
const range = document.createRange();
|
||||
range.setStart(text, 2);
|
||||
range.setEnd(text, 5);
|
||||
|
||||
// Insert after range end — no change
|
||||
text.insertData(5, 'ZZ');
|
||||
testing.expectEqual(2, range.startOffset);
|
||||
testing.expectEqual(5, range.endOffset);
|
||||
}
|
||||
</script>
|
||||
|
||||
<script id=deleteData_before_range>
|
||||
{
|
||||
const text = document.createTextNode('abcdef');
|
||||
const div = document.createElement('div');
|
||||
div.appendChild(text);
|
||||
|
||||
const range = document.createRange();
|
||||
range.setStart(text, 3);
|
||||
range.setEnd(text, 5);
|
||||
// range covers "de"
|
||||
|
||||
// Delete "ab" (offset 0, count 2) — before range
|
||||
text.deleteData(0, 2);
|
||||
// "cdef" — range shifts left by 2
|
||||
testing.expectEqual(1, range.startOffset);
|
||||
testing.expectEqual(3, range.endOffset);
|
||||
}
|
||||
</script>
|
||||
|
||||
<script id=deleteData_overlapping_range_start>
|
||||
{
|
||||
const text = document.createTextNode('abcdef');
|
||||
const div = document.createElement('div');
|
||||
div.appendChild(text);
|
||||
|
||||
const range = document.createRange();
|
||||
range.setStart(text, 2);
|
||||
range.setEnd(text, 5);
|
||||
|
||||
// Delete from offset 1, count 2 — overlaps range start
|
||||
text.deleteData(1, 2);
|
||||
// "adef" — start clamped to offset(1), end adjusted
|
||||
testing.expectEqual(1, range.startOffset);
|
||||
testing.expectEqual(3, range.endOffset);
|
||||
}
|
||||
</script>
|
||||
|
||||
<script id=deleteData_inside_range>
|
||||
{
|
||||
const text = document.createTextNode('abcdef');
|
||||
const div = document.createElement('div');
|
||||
div.appendChild(text);
|
||||
|
||||
const range = document.createRange();
|
||||
range.setStart(text, 1);
|
||||
range.setEnd(text, 5);
|
||||
|
||||
// Delete inside range: offset 2, count 2
|
||||
text.deleteData(2, 2);
|
||||
// "abef" — start unchanged, end shifts by -2
|
||||
testing.expectEqual(1, range.startOffset);
|
||||
testing.expectEqual(3, range.endOffset);
|
||||
}
|
||||
</script>
|
||||
|
||||
<script id=replaceData_adjusts_range>
|
||||
{
|
||||
const text = document.createTextNode('abcdef');
|
||||
const div = document.createElement('div');
|
||||
div.appendChild(text);
|
||||
|
||||
const range = document.createRange();
|
||||
range.setStart(text, 2);
|
||||
range.setEnd(text, 5);
|
||||
|
||||
// Replace "cd" (offset 2, count 2) with "XXXX" (4 chars)
|
||||
text.replaceData(2, 2, 'XXXX');
|
||||
// "abXXXXef" — start clamped to 2, end adjusted by (4-2)=+2
|
||||
testing.expectEqual(2, range.startOffset);
|
||||
testing.expectEqual(7, range.endOffset);
|
||||
}
|
||||
</script>
|
||||
|
||||
<script id=splitText_moves_range_to_new_node>
|
||||
{
|
||||
const text = document.createTextNode('abcdef');
|
||||
const div = document.createElement('div');
|
||||
div.appendChild(text);
|
||||
|
||||
const range = document.createRange();
|
||||
range.setStart(text, 4);
|
||||
range.setEnd(text, 6);
|
||||
// range covers "ef"
|
||||
|
||||
const newText = text.splitText(3);
|
||||
// text = "abc", newText = "def"
|
||||
// Range was at (text, 4)-(text, 6), with offset > 3:
|
||||
// start moves to (newText, 4-3=1), end moves to (newText, 6-3=3)
|
||||
testing.expectEqual(newText, range.startContainer);
|
||||
testing.expectEqual(1, range.startOffset);
|
||||
testing.expectEqual(newText, range.endContainer);
|
||||
testing.expectEqual(3, range.endOffset);
|
||||
}
|
||||
</script>
|
||||
|
||||
<script id=splitText_range_at_split_point>
|
||||
{
|
||||
const text = document.createTextNode('abcdef');
|
||||
const div = document.createElement('div');
|
||||
div.appendChild(text);
|
||||
|
||||
const range = document.createRange();
|
||||
range.setStart(text, 0);
|
||||
range.setEnd(text, 3);
|
||||
// range covers "abc"
|
||||
|
||||
const newText = text.splitText(3);
|
||||
// text = "abc", newText = "def"
|
||||
// Range end is at exactly the split offset — should stay on original node
|
||||
testing.expectEqual(text, range.startContainer);
|
||||
testing.expectEqual(0, range.startOffset);
|
||||
testing.expectEqual(text, range.endContainer);
|
||||
testing.expectEqual(3, range.endOffset);
|
||||
}
|
||||
</script>
|
||||
|
||||
<script id=appendChild_does_not_affect_range>
|
||||
{
|
||||
const div = document.createElement('div');
|
||||
const p1 = document.createElement('p');
|
||||
const p2 = document.createElement('p');
|
||||
div.appendChild(p1);
|
||||
div.appendChild(p2);
|
||||
|
||||
const range = document.createRange();
|
||||
range.setStart(div, 0);
|
||||
range.setEnd(div, 2);
|
||||
|
||||
// Appending should not affect range offsets (spec: no update for append)
|
||||
const p3 = document.createElement('p');
|
||||
div.appendChild(p3);
|
||||
testing.expectEqual(0, range.startOffset);
|
||||
testing.expectEqual(2, range.endOffset);
|
||||
}
|
||||
</script>
|
||||
|
||||
<script id=insertBefore_shifts_range_offsets>
|
||||
{
|
||||
const div = document.createElement('div');
|
||||
const p1 = document.createElement('p');
|
||||
const p2 = document.createElement('p');
|
||||
div.appendChild(p1);
|
||||
div.appendChild(p2);
|
||||
|
||||
const range = document.createRange();
|
||||
range.setStart(div, 1);
|
||||
range.setEnd(div, 2);
|
||||
|
||||
// Insert before p1 (index 0) — range offsets > 0 should increment
|
||||
const span = document.createElement('span');
|
||||
div.insertBefore(span, p1);
|
||||
testing.expectEqual(2, range.startOffset);
|
||||
testing.expectEqual(3, range.endOffset);
|
||||
}
|
||||
</script>
|
||||
|
||||
<script id=removeChild_shifts_range_offsets>
|
||||
{
|
||||
const div = document.createElement('div');
|
||||
const p1 = document.createElement('p');
|
||||
const p2 = document.createElement('p');
|
||||
const p3 = document.createElement('p');
|
||||
div.appendChild(p1);
|
||||
div.appendChild(p2);
|
||||
div.appendChild(p3);
|
||||
|
||||
const range = document.createRange();
|
||||
range.setStart(div, 1);
|
||||
range.setEnd(div, 3);
|
||||
|
||||
// Remove p1 (index 0) — offsets > 0 should decrement
|
||||
div.removeChild(p1);
|
||||
testing.expectEqual(0, range.startOffset);
|
||||
testing.expectEqual(2, range.endOffset);
|
||||
}
|
||||
</script>
|
||||
|
||||
<script id=removeChild_moves_range_from_descendant>
|
||||
{
|
||||
const div = document.createElement('div');
|
||||
const p = document.createElement('p');
|
||||
const text = document.createTextNode('hello');
|
||||
p.appendChild(text);
|
||||
div.appendChild(p);
|
||||
|
||||
const range = document.createRange();
|
||||
range.setStart(text, 2);
|
||||
range.setEnd(text, 4);
|
||||
|
||||
// Remove p (which contains text) — range should move to (div, index_of_p)
|
||||
div.removeChild(p);
|
||||
testing.expectEqual(div, range.startContainer);
|
||||
testing.expectEqual(0, range.startOffset);
|
||||
testing.expectEqual(div, range.endContainer);
|
||||
testing.expectEqual(0, range.endOffset);
|
||||
}
|
||||
</script>
|
||||
|
||||
<script id=multiple_ranges_updated>
|
||||
{
|
||||
const text = document.createTextNode('abcdefgh');
|
||||
const div = document.createElement('div');
|
||||
div.appendChild(text);
|
||||
|
||||
const range1 = document.createRange();
|
||||
range1.setStart(text, 1);
|
||||
range1.setEnd(text, 3);
|
||||
|
||||
const range2 = document.createRange();
|
||||
range2.setStart(text, 5);
|
||||
range2.setEnd(text, 7);
|
||||
|
||||
// Insert at offset 0 — both ranges should shift
|
||||
text.insertData(0, 'XX');
|
||||
testing.expectEqual(3, range1.startOffset);
|
||||
testing.expectEqual(5, range1.endOffset);
|
||||
testing.expectEqual(7, range2.startOffset);
|
||||
testing.expectEqual(9, range2.endOffset);
|
||||
}
|
||||
</script>
|
||||
|
||||
<script id=data_setter_updates_ranges>
|
||||
{
|
||||
const text = document.createTextNode('abcdef');
|
||||
const div = document.createElement('div');
|
||||
div.appendChild(text);
|
||||
|
||||
const range = document.createRange();
|
||||
range.setStart(text, 2);
|
||||
range.setEnd(text, 5);
|
||||
|
||||
// Setting data replaces all content — range collapses to offset 0
|
||||
text.data = 'new content';
|
||||
testing.expectEqual(text, range.startContainer);
|
||||
testing.expectEqual(0, range.startOffset);
|
||||
testing.expectEqual(text, range.endContainer);
|
||||
testing.expectEqual(0, range.endOffset);
|
||||
}
|
||||
</script>
|
||||
@@ -118,7 +118,7 @@
|
||||
BASE_URL: 'http://127.0.0.1:9582/src/browser/tests/',
|
||||
};
|
||||
|
||||
if (!IS_TEST_RUNNER) {
|
||||
if (window.navigator.userAgent.startsWith("Lightpanda/") == false) {
|
||||
// The page is running in a different browser. Probably a developer making sure
|
||||
// a test is correct. There are a few tweaks we need to do to make this a
|
||||
// seemless, namely around adapting paths/urls.
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
<!DOCTYPE html>
|
||||
<body onload=func1></body>
|
||||
<body onload="func1(event)"></body>
|
||||
<script src="../testing.js"></script>
|
||||
|
||||
<script id=bodyOnLoad1>
|
||||
@@ -14,4 +14,3 @@
|
||||
testing.expectEqual(1, called);
|
||||
});
|
||||
</script>
|
||||
|
||||
|
||||
@@ -33,6 +33,9 @@ _start_offset: u32,
|
||||
_end_container: *Node,
|
||||
_start_container: *Node,
|
||||
|
||||
// Intrusive linked list node for tracking live ranges on the Page.
|
||||
_range_link: std.DoublyLinkedList.Node = .{},
|
||||
|
||||
pub const Type = union(enum) {
|
||||
range: *Range,
|
||||
// TODO: static_range: *StaticRange,
|
||||
@@ -215,6 +218,91 @@ fn isInclusiveAncestorOf(potential_ancestor: *Node, node: *Node) bool {
|
||||
return isAncestorOf(potential_ancestor, node);
|
||||
}
|
||||
|
||||
/// Update this range's boundaries after a replaceData mutation on target.
|
||||
/// All parameters are in UTF-16 code unit offsets.
|
||||
pub fn updateForCharacterDataReplace(self: *AbstractRange, target: *Node, offset: u32, count: u32, data_len: u32) void {
|
||||
if (self._start_container == target) {
|
||||
if (self._start_offset > offset and self._start_offset <= offset + count) {
|
||||
self._start_offset = offset;
|
||||
} else if (self._start_offset > offset + count) {
|
||||
// Use i64 intermediate to avoid u32 underflow when count > data_len
|
||||
self._start_offset = @intCast(@as(i64, self._start_offset) + @as(i64, data_len) - @as(i64, count));
|
||||
}
|
||||
}
|
||||
|
||||
if (self._end_container == target) {
|
||||
if (self._end_offset > offset and self._end_offset <= offset + count) {
|
||||
self._end_offset = offset;
|
||||
} else if (self._end_offset > offset + count) {
|
||||
self._end_offset = @intCast(@as(i64, self._end_offset) + @as(i64, data_len) - @as(i64, count));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Update this range's boundaries after a splitText operation.
|
||||
/// Steps 7b-7e of the DOM spec splitText algorithm.
|
||||
pub fn updateForSplitText(self: *AbstractRange, target: *Node, new_node: *Node, offset: u32, parent: *Node, node_index: u32) void {
|
||||
// Step 7b: ranges on the original node with start > offset move to new node
|
||||
if (self._start_container == target and self._start_offset > offset) {
|
||||
self._start_container = new_node;
|
||||
self._start_offset = self._start_offset - offset;
|
||||
}
|
||||
// Step 7c: ranges on the original node with end > offset move to new node
|
||||
if (self._end_container == target and self._end_offset > offset) {
|
||||
self._end_container = new_node;
|
||||
self._end_offset = self._end_offset - offset;
|
||||
}
|
||||
// Step 7d: ranges on parent with start == node_index + 1 increment
|
||||
if (self._start_container == parent and self._start_offset == node_index + 1) {
|
||||
self._start_offset += 1;
|
||||
}
|
||||
// Step 7e: ranges on parent with end == node_index + 1 increment
|
||||
if (self._end_container == parent and self._end_offset == node_index + 1) {
|
||||
self._end_offset += 1;
|
||||
}
|
||||
}
|
||||
|
||||
/// Update this range's boundaries after a node insertion.
|
||||
pub fn updateForNodeInsertion(self: *AbstractRange, parent: *Node, child_index: u32) void {
|
||||
if (self._start_container == parent and self._start_offset > child_index) {
|
||||
self._start_offset += 1;
|
||||
}
|
||||
if (self._end_container == parent and self._end_offset > child_index) {
|
||||
self._end_offset += 1;
|
||||
}
|
||||
}
|
||||
|
||||
/// Update this range's boundaries after a node removal.
|
||||
pub fn updateForNodeRemoval(self: *AbstractRange, parent: *Node, child: *Node, child_index: u32) void {
|
||||
// Steps 4-5: ranges whose start/end is an inclusive descendant of child
|
||||
// get moved to (parent, child_index).
|
||||
if (isInclusiveDescendantOf(self._start_container, child)) {
|
||||
self._start_container = parent;
|
||||
self._start_offset = child_index;
|
||||
}
|
||||
if (isInclusiveDescendantOf(self._end_container, child)) {
|
||||
self._end_container = parent;
|
||||
self._end_offset = child_index;
|
||||
}
|
||||
|
||||
// Steps 6-7: ranges on parent at offsets > child_index get decremented.
|
||||
if (self._start_container == parent and self._start_offset > child_index) {
|
||||
self._start_offset -= 1;
|
||||
}
|
||||
if (self._end_container == parent and self._end_offset > child_index) {
|
||||
self._end_offset -= 1;
|
||||
}
|
||||
}
|
||||
|
||||
fn isInclusiveDescendantOf(node: *Node, potential_ancestor: *Node) bool {
|
||||
var current: ?*Node = node;
|
||||
while (current) |n| {
|
||||
if (n == potential_ancestor) return true;
|
||||
current = n.parentNode();
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
pub const JsApi = struct {
|
||||
pub const bridge = js.Bridge(AbstractRange);
|
||||
|
||||
|
||||
@@ -21,8 +21,12 @@ const Writer = std.Io.Writer;
|
||||
|
||||
const js = @import("../js/js.zig");
|
||||
const Page = @import("../Page.zig");
|
||||
const Session = @import("../Session.zig");
|
||||
|
||||
const Mime = @import("../Mime.zig");
|
||||
|
||||
const Allocator = std.mem.Allocator;
|
||||
|
||||
/// https://w3c.github.io/FileAPI/#blob-section
|
||||
/// https://developer.mozilla.org/en-US/docs/Web/API/Blob
|
||||
const Blob = @This();
|
||||
@@ -31,6 +35,8 @@ pub const _prototype_root = true;
|
||||
|
||||
_type: Type,
|
||||
|
||||
_arena: Allocator,
|
||||
|
||||
/// Immutable slice of blob.
|
||||
/// Note that another blob may hold a pointer/slice to this,
|
||||
/// so its better to leave the deallocation of it to arena allocator.
|
||||
@@ -69,6 +75,9 @@ pub fn initWithMimeValidation(
|
||||
validate_mime: bool,
|
||||
page: *Page,
|
||||
) !*Blob {
|
||||
const arena = try page.getArena(.{ .debug = "Blob" });
|
||||
errdefer page.releaseArena(arena);
|
||||
|
||||
const options: InitOptions = maybe_options orelse .{};
|
||||
|
||||
const mime: []const u8 = blk: {
|
||||
@@ -77,7 +86,7 @@ pub fn initWithMimeValidation(
|
||||
break :blk "";
|
||||
}
|
||||
|
||||
const buf = try page.arena.dupe(u8, t);
|
||||
const buf = try arena.dupe(u8, t);
|
||||
|
||||
if (validate_mime) {
|
||||
// Full MIME parsing per MIME sniff spec (for Content-Type headers)
|
||||
@@ -99,7 +108,7 @@ pub fn initWithMimeValidation(
|
||||
|
||||
const data = blk: {
|
||||
if (maybe_blob_parts) |blob_parts| {
|
||||
var w: Writer.Allocating = .init(page.arena);
|
||||
var w: Writer.Allocating = .init(arena);
|
||||
const use_native_endings = std.mem.eql(u8, options.endings, "native");
|
||||
try writeBlobParts(&w.writer, blob_parts, use_native_endings);
|
||||
|
||||
@@ -109,11 +118,19 @@ pub fn initWithMimeValidation(
|
||||
break :blk "";
|
||||
};
|
||||
|
||||
return page._factory.create(Blob{
|
||||
const self = try arena.create(Blob);
|
||||
self.* = .{
|
||||
._arena = arena,
|
||||
._type = .generic,
|
||||
._slice = data,
|
||||
._mime = mime,
|
||||
});
|
||||
};
|
||||
return self;
|
||||
}
|
||||
|
||||
pub fn deinit(self: *Blob, shutdown: bool, session: *Session) void {
|
||||
_ = shutdown;
|
||||
session.releaseArena(self._arena);
|
||||
}
|
||||
|
||||
const largest_vector = @max(std.simd.suggestVectorLength(u8) orelse 1, 8);
|
||||
@@ -264,57 +281,31 @@ pub fn bytes(self: *const Blob, page: *Page) !js.Promise {
|
||||
/// from a subset of the blob on which it's called.
|
||||
pub fn slice(
|
||||
self: *const Blob,
|
||||
maybe_start: ?i32,
|
||||
maybe_end: ?i32,
|
||||
maybe_content_type: ?[]const u8,
|
||||
start_: ?i32,
|
||||
end_: ?i32,
|
||||
content_type_: ?[]const u8,
|
||||
page: *Page,
|
||||
) !*Blob {
|
||||
const mime: []const u8 = blk: {
|
||||
if (maybe_content_type) |content_type| {
|
||||
if (content_type.len == 0) {
|
||||
break :blk "";
|
||||
}
|
||||
const data = self._slice;
|
||||
|
||||
break :blk try page.dupeString(content_type);
|
||||
const start = blk: {
|
||||
const requested_start = start_ orelse break :blk 0;
|
||||
if (requested_start < 0) {
|
||||
break :blk data.len -| @abs(requested_start);
|
||||
}
|
||||
|
||||
break :blk "";
|
||||
break :blk @min(data.len, @as(u31, @intCast(requested_start)));
|
||||
};
|
||||
|
||||
const data = self._slice;
|
||||
if (maybe_start) |_start| {
|
||||
const start = blk: {
|
||||
if (_start < 0) {
|
||||
break :blk data.len -| @abs(_start);
|
||||
}
|
||||
const end: usize = blk: {
|
||||
const requested_end = end_ orelse break :blk data.len;
|
||||
if (requested_end < 0) {
|
||||
break :blk @max(start, data.len -| @abs(requested_end));
|
||||
}
|
||||
|
||||
break :blk @min(data.len, @as(u31, @intCast(_start)));
|
||||
};
|
||||
break :blk @min(data.len, @max(start, @as(u31, @intCast(requested_end))));
|
||||
};
|
||||
|
||||
const end: usize = blk: {
|
||||
if (maybe_end) |_end| {
|
||||
if (_end < 0) {
|
||||
break :blk @max(start, data.len -| @abs(_end));
|
||||
}
|
||||
|
||||
break :blk @min(data.len, @max(start, @as(u31, @intCast(_end))));
|
||||
}
|
||||
|
||||
break :blk data.len;
|
||||
};
|
||||
|
||||
return page._factory.create(Blob{
|
||||
._type = .generic,
|
||||
._slice = data[start..end],
|
||||
._mime = mime,
|
||||
});
|
||||
}
|
||||
|
||||
return page._factory.create(Blob{
|
||||
._type = .generic,
|
||||
._slice = data,
|
||||
._mime = mime,
|
||||
});
|
||||
return Blob.init(&.{data[start..end]}, .{ .type = content_type_ orelse "" }, page);
|
||||
}
|
||||
|
||||
/// Returns the size of the Blob in bytes.
|
||||
@@ -334,6 +325,8 @@ pub const JsApi = struct {
|
||||
pub const name = "Blob";
|
||||
pub const prototype_chain = bridge.prototypeChain();
|
||||
pub var class_id: bridge.ClassId = undefined;
|
||||
pub const weak = true;
|
||||
pub const finalizer = bridge.finalizer(Blob.deinit);
|
||||
};
|
||||
|
||||
pub const constructor = bridge.constructor(Blob.init, .{});
|
||||
|
||||
@@ -37,7 +37,7 @@ _data: String = .empty,
|
||||
/// Count UTF-16 code units in a UTF-8 string.
|
||||
/// 4-byte UTF-8 sequences (codepoints >= U+10000) produce 2 UTF-16 code units (surrogate pair),
|
||||
/// everything else produces 1.
|
||||
fn utf16Len(data: []const u8) usize {
|
||||
pub fn utf16Len(data: []const u8) usize {
|
||||
var count: usize = 0;
|
||||
var i: usize = 0;
|
||||
while (i < data.len) {
|
||||
@@ -232,14 +232,13 @@ pub fn setData(self: *CData, value: ?[]const u8, page: *Page) !void {
|
||||
}
|
||||
|
||||
/// JS bridge wrapper for `data` setter.
|
||||
/// Handles [LegacyNullToEmptyString]: null → setData(null) → "".
|
||||
/// Passes everything else (including undefined) through V8 toString,
|
||||
/// so `undefined` becomes the string "undefined" per spec.
|
||||
/// Per spec, setting .data runs replaceData(0, this.length, value),
|
||||
/// which includes live range updates.
|
||||
/// Handles [LegacyNullToEmptyString]: null → "" per spec.
|
||||
pub fn _setData(self: *CData, value: js.Value, page: *Page) !void {
|
||||
if (value.isNull()) {
|
||||
return self.setData(null, page);
|
||||
}
|
||||
return self.setData(try value.toZig([]const u8), page);
|
||||
const new_value: []const u8 = if (value.isNull()) "" else try value.toZig([]const u8);
|
||||
const length = self.getLength();
|
||||
try self.replaceData(0, length, new_value, page);
|
||||
}
|
||||
|
||||
pub fn format(self: *const CData, writer: *std.io.Writer) !void {
|
||||
@@ -272,15 +271,20 @@ pub fn isEqualNode(self: *const CData, other: *const CData) bool {
|
||||
}
|
||||
|
||||
pub fn appendData(self: *CData, data: []const u8, page: *Page) !void {
|
||||
const old_value = self._data;
|
||||
self._data = try String.concat(page.arena, &.{ self._data.str(), data });
|
||||
page.characterDataChange(self.asNode(), old_value);
|
||||
// Per DOM spec, appendData(data) is replaceData(length, 0, data).
|
||||
const length = self.getLength();
|
||||
try self.replaceData(length, 0, data, page);
|
||||
}
|
||||
|
||||
pub fn deleteData(self: *CData, offset: usize, count: usize, page: *Page) !void {
|
||||
const end_utf16 = std.math.add(usize, offset, count) catch std.math.maxInt(usize);
|
||||
const range = try utf16RangeToUtf8(self._data.str(), offset, end_utf16);
|
||||
|
||||
// Update live ranges per DOM spec replaceData steps (deleteData = replaceData with data="")
|
||||
const length = self.getLength();
|
||||
const effective_count: u32 = @intCast(@min(count, length - offset));
|
||||
page.updateRangesForCharacterDataReplace(self.asNode(), @intCast(offset), effective_count, 0);
|
||||
|
||||
const old_data = self._data;
|
||||
const old_value = old_data.str();
|
||||
if (range.start == 0) {
|
||||
@@ -299,6 +303,10 @@ pub fn deleteData(self: *CData, offset: usize, count: usize, page: *Page) !void
|
||||
|
||||
pub fn insertData(self: *CData, offset: usize, data: []const u8, page: *Page) !void {
|
||||
const byte_offset = try utf16OffsetToUtf8(self._data.str(), offset);
|
||||
|
||||
// Update live ranges per DOM spec replaceData steps (insertData = replaceData with count=0)
|
||||
page.updateRangesForCharacterDataReplace(self.asNode(), @intCast(offset), 0, @intCast(utf16Len(data)));
|
||||
|
||||
const old_value = self._data;
|
||||
const existing = old_value.str();
|
||||
self._data = try String.concat(page.arena, &.{
|
||||
@@ -312,6 +320,12 @@ pub fn insertData(self: *CData, offset: usize, data: []const u8, page: *Page) !v
|
||||
pub fn replaceData(self: *CData, offset: usize, count: usize, data: []const u8, page: *Page) !void {
|
||||
const end_utf16 = std.math.add(usize, offset, count) catch std.math.maxInt(usize);
|
||||
const range = try utf16RangeToUtf8(self._data.str(), offset, end_utf16);
|
||||
|
||||
// Update live ranges per DOM spec replaceData steps
|
||||
const length = self.getLength();
|
||||
const effective_count: u32 = @intCast(@min(count, length - offset));
|
||||
page.updateRangesForCharacterDataReplace(self.asNode(), @intCast(offset), effective_count, @intCast(utf16Len(data)));
|
||||
|
||||
const old_value = self._data;
|
||||
const existing = old_value.str();
|
||||
self._data = try String.concat(page.arena, &.{
|
||||
|
||||
@@ -20,6 +20,7 @@ const std = @import("std");
|
||||
const js = @import("../js/js.zig");
|
||||
|
||||
const Page = @import("../Page.zig");
|
||||
const Session = @import("../Session.zig");
|
||||
const EventTarget = @import("EventTarget.zig");
|
||||
const Node = @import("Node.zig");
|
||||
const String = @import("../../string.zig").String;
|
||||
@@ -139,9 +140,9 @@ pub fn acquireRef(self: *Event) void {
|
||||
self._rc += 1;
|
||||
}
|
||||
|
||||
pub fn deinit(self: *Event, shutdown: bool, page: *Page) void {
|
||||
pub fn deinit(self: *Event, shutdown: bool, session: *Session) void {
|
||||
if (shutdown) {
|
||||
page.releaseArena(self._arena);
|
||||
session.releaseArena(self._arena);
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -151,7 +152,7 @@ pub fn deinit(self: *Event, shutdown: bool, page: *Page) void {
|
||||
}
|
||||
|
||||
if (rc == 1) {
|
||||
page.releaseArena(self._arena);
|
||||
session.releaseArena(self._arena);
|
||||
} else {
|
||||
self._rc = rc - 1;
|
||||
}
|
||||
|
||||
@@ -59,7 +59,7 @@ pub fn dispatchEvent(self: *EventTarget, event: *Event, page: *Page) !bool {
|
||||
event._is_trusted = false;
|
||||
|
||||
event.acquireRef();
|
||||
defer event.deinit(false, page);
|
||||
defer event.deinit(false, page._session);
|
||||
try page._event_manager.dispatch(self, event);
|
||||
return !event._cancelable or !event._prevent_default;
|
||||
}
|
||||
|
||||
@@ -18,9 +18,11 @@
|
||||
|
||||
const std = @import("std");
|
||||
|
||||
const Page = @import("../Page.zig");
|
||||
const Blob = @import("Blob.zig");
|
||||
const js = @import("../js/js.zig");
|
||||
const Page = @import("../Page.zig");
|
||||
const Session = @import("../Session.zig");
|
||||
|
||||
const Blob = @import("Blob.zig");
|
||||
|
||||
const File = @This();
|
||||
|
||||
@@ -29,7 +31,13 @@ _proto: *Blob,
|
||||
|
||||
// TODO: Implement File API.
|
||||
pub fn init(page: *Page) !*File {
|
||||
return page._factory.blob(File{ ._proto = undefined });
|
||||
const arena = try page.getArena(.{ .debug = "File" });
|
||||
errdefer page.releaseArena(arena);
|
||||
return page._factory.blob(arena, File{ ._proto = undefined });
|
||||
}
|
||||
|
||||
pub fn deinit(self: *File, shutdown: bool, session: *Session) void {
|
||||
self._proto.deinit(shutdown, session);
|
||||
}
|
||||
|
||||
pub const JsApi = struct {
|
||||
@@ -39,6 +47,8 @@ pub const JsApi = struct {
|
||||
pub const name = "File";
|
||||
pub const prototype_chain = bridge.prototypeChain();
|
||||
pub var class_id: bridge.ClassId = undefined;
|
||||
pub const weak = true;
|
||||
pub const finalizer = bridge.finalizer(File.deinit);
|
||||
};
|
||||
|
||||
pub const constructor = bridge.constructor(File.init, .{});
|
||||
|
||||
@@ -20,6 +20,7 @@ const std = @import("std");
|
||||
const js = @import("../js/js.zig");
|
||||
|
||||
const Page = @import("../Page.zig");
|
||||
const Session = @import("../Session.zig");
|
||||
const EventTarget = @import("EventTarget.zig");
|
||||
const ProgressEvent = @import("event/ProgressEvent.zig");
|
||||
const Blob = @import("Blob.zig");
|
||||
@@ -69,17 +70,15 @@ pub fn init(page: *Page) !*FileReader {
|
||||
});
|
||||
}
|
||||
|
||||
pub fn deinit(self: *FileReader, _: bool, page: *Page) void {
|
||||
const js_ctx = page.js;
|
||||
pub fn deinit(self: *FileReader, _: bool, session: *Session) void {
|
||||
if (self._on_abort) |func| func.release();
|
||||
if (self._on_error) |func| func.release();
|
||||
if (self._on_load) |func| func.release();
|
||||
if (self._on_load_end) |func| func.release();
|
||||
if (self._on_load_start) |func| func.release();
|
||||
if (self._on_progress) |func| func.release();
|
||||
|
||||
if (self._on_abort) |func| js_ctx.release(func);
|
||||
if (self._on_error) |func| js_ctx.release(func);
|
||||
if (self._on_load) |func| js_ctx.release(func);
|
||||
if (self._on_load_end) |func| js_ctx.release(func);
|
||||
if (self._on_load_start) |func| js_ctx.release(func);
|
||||
if (self._on_progress) |func| js_ctx.release(func);
|
||||
|
||||
page.releaseArena(self._arena);
|
||||
session.releaseArena(self._arena);
|
||||
}
|
||||
|
||||
fn asEventTarget(self: *FileReader) *EventTarget {
|
||||
|
||||
@@ -24,6 +24,7 @@ const IS_DEBUG = @import("builtin").mode == .Debug;
|
||||
const Allocator = std.mem.Allocator;
|
||||
|
||||
const Page = @import("../Page.zig");
|
||||
const Session = @import("../Session.zig");
|
||||
const Element = @import("Element.zig");
|
||||
const DOMRect = @import("DOMRect.zig");
|
||||
|
||||
@@ -91,13 +92,13 @@ pub fn init(callback: js.Function.Temp, options: ?ObserverInit, page: *Page) !*I
|
||||
return self;
|
||||
}
|
||||
|
||||
pub fn deinit(self: *IntersectionObserver, shutdown: bool, page: *Page) void {
|
||||
page.js.release(self._callback);
|
||||
pub fn deinit(self: *IntersectionObserver, shutdown: bool, session: *Session) void {
|
||||
self._callback.release();
|
||||
if ((comptime IS_DEBUG) and !shutdown) {
|
||||
std.debug.assert(self._observing.items.len == 0);
|
||||
}
|
||||
|
||||
page.releaseArena(self._arena);
|
||||
session.releaseArena(self._arena);
|
||||
}
|
||||
|
||||
pub fn observe(self: *IntersectionObserver, target: *Element, page: *Page) !void {
|
||||
@@ -137,7 +138,7 @@ pub fn unobserve(self: *IntersectionObserver, target: *Element, page: *Page) voi
|
||||
while (j < self._pending_entries.items.len) {
|
||||
if (self._pending_entries.items[j]._target == target) {
|
||||
const entry = self._pending_entries.swapRemove(j);
|
||||
entry.deinit(false, page);
|
||||
entry.deinit(false, page._session);
|
||||
} else {
|
||||
j += 1;
|
||||
}
|
||||
@@ -157,7 +158,7 @@ pub fn disconnect(self: *IntersectionObserver, page: *Page) void {
|
||||
self._previous_states.clearRetainingCapacity();
|
||||
|
||||
for (self._pending_entries.items) |entry| {
|
||||
entry.deinit(false, page);
|
||||
entry.deinit(false, page._session);
|
||||
}
|
||||
self._pending_entries.clearRetainingCapacity();
|
||||
page.js.safeWeakRef(self);
|
||||
@@ -302,8 +303,8 @@ pub const IntersectionObserverEntry = struct {
|
||||
_intersection_ratio: f64,
|
||||
_is_intersecting: bool,
|
||||
|
||||
pub fn deinit(self: *const IntersectionObserverEntry, _: bool, page: *Page) void {
|
||||
page.releaseArena(self._arena);
|
||||
pub fn deinit(self: *IntersectionObserverEntry, _: bool, session: *Session) void {
|
||||
session.releaseArena(self._arena);
|
||||
}
|
||||
|
||||
pub fn getTarget(self: *const IntersectionObserverEntry) *Element {
|
||||
|
||||
@@ -21,6 +21,7 @@ const String = @import("../../string.zig").String;
|
||||
|
||||
const js = @import("../js/js.zig");
|
||||
const Page = @import("../Page.zig");
|
||||
const Session = @import("../Session.zig");
|
||||
const Node = @import("Node.zig");
|
||||
const Element = @import("Element.zig");
|
||||
const log = @import("../../log.zig");
|
||||
@@ -84,13 +85,13 @@ pub fn init(callback: js.Function.Temp, page: *Page) !*MutationObserver {
|
||||
return self;
|
||||
}
|
||||
|
||||
pub fn deinit(self: *MutationObserver, shutdown: bool, page: *Page) void {
|
||||
page.js.release(self._callback);
|
||||
pub fn deinit(self: *MutationObserver, shutdown: bool, session: *Session) void {
|
||||
self._callback.release();
|
||||
if ((comptime IS_DEBUG) and !shutdown) {
|
||||
std.debug.assert(self._observing.items.len == 0);
|
||||
}
|
||||
|
||||
page.releaseArena(self._arena);
|
||||
session.releaseArena(self._arena);
|
||||
}
|
||||
|
||||
pub fn observe(self: *MutationObserver, target: *Node, options: ObserveOptions, page: *Page) !void {
|
||||
@@ -171,7 +172,7 @@ pub fn disconnect(self: *MutationObserver, page: *Page) void {
|
||||
page.unregisterMutationObserver(self);
|
||||
self._observing.clearRetainingCapacity();
|
||||
for (self._pending_records.items) |record| {
|
||||
record.deinit(false, page);
|
||||
record.deinit(false, page._session);
|
||||
}
|
||||
self._pending_records.clearRetainingCapacity();
|
||||
page.js.safeWeakRef(self);
|
||||
@@ -363,8 +364,8 @@ pub const MutationRecord = struct {
|
||||
characterData,
|
||||
};
|
||||
|
||||
pub fn deinit(self: *const MutationRecord, _: bool, page: *Page) void {
|
||||
page.releaseArena(self._arena);
|
||||
pub fn deinit(self: *MutationRecord, _: bool, session: *Session) void {
|
||||
session.releaseArena(self._arena);
|
||||
}
|
||||
|
||||
pub fn getType(self: *const MutationRecord) []const u8 {
|
||||
|
||||
@@ -293,7 +293,8 @@ pub fn setTextContent(self: *Node, data: []const u8, page: *Page) !void {
|
||||
}
|
||||
return el.replaceChildren(&.{.{ .text = data }}, page);
|
||||
},
|
||||
.cdata => |c| c._data = try page.dupeSSO(data),
|
||||
// Per spec, setting textContent on CharacterData runs replaceData(0, length, value)
|
||||
.cdata => |c| try c.replaceData(0, c.getLength(), data, page),
|
||||
.document => {},
|
||||
.document_type => {},
|
||||
.document_fragment => |frag| {
|
||||
@@ -612,7 +613,11 @@ pub fn getNodeValue(self: *const Node) ?String {
|
||||
|
||||
pub fn setNodeValue(self: *const Node, value: ?String, page: *Page) !void {
|
||||
switch (self._type) {
|
||||
.cdata => |c| try c.setData(if (value) |v| v.str() else null, page),
|
||||
// Per spec, setting nodeValue on CharacterData runs replaceData(0, length, value)
|
||||
.cdata => |c| {
|
||||
const new_value: []const u8 = if (value) |v| v.str() else "";
|
||||
try c.replaceData(0, c.getLength(), new_value, page);
|
||||
},
|
||||
.attribute => |attr| try attr.setValue(value, page),
|
||||
.element => {},
|
||||
.document => {},
|
||||
|
||||
@@ -322,6 +322,11 @@ pub fn insertNode(self: *Range, node: *Node, page: *Page) !void {
|
||||
const container = self._proto._start_container;
|
||||
const offset = self._proto._start_offset;
|
||||
|
||||
// Per spec: if range is collapsed, end offset should extend to include
|
||||
// the inserted node. Capture before insertion since live range updates
|
||||
// in the insert path will adjust non-collapsed ranges automatically.
|
||||
const was_collapsed = self._proto.getCollapsed();
|
||||
|
||||
if (container.is(Node.CData)) |_| {
|
||||
// If container is a text node, we need to split it
|
||||
const parent = container.parentNode() orelse return error.InvalidNodeType;
|
||||
@@ -351,9 +356,10 @@ pub fn insertNode(self: *Range, node: *Node, page: *Page) !void {
|
||||
_ = try container.insertBefore(node, ref_child, page);
|
||||
}
|
||||
|
||||
// Update range to be after the inserted node
|
||||
if (self._proto._start_container == self._proto._end_container) {
|
||||
self._proto._end_offset += 1;
|
||||
// Per spec step 11: if range was collapsed, extend end to include inserted node.
|
||||
// Non-collapsed ranges are already handled by the live range update in the insert path.
|
||||
if (was_collapsed) {
|
||||
self._proto._end_offset = self._proto._start_offset + 1;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -375,9 +381,12 @@ pub fn deleteContents(self: *Range, page: *Page) !void {
|
||||
);
|
||||
page.characterDataChange(self._proto._start_container, old_value);
|
||||
} else {
|
||||
// Delete child nodes in range
|
||||
var offset = self._proto._start_offset;
|
||||
while (offset < self._proto._end_offset) : (offset += 1) {
|
||||
// Delete child nodes in range.
|
||||
// Capture count before the loop: removeChild triggers live range
|
||||
// updates that decrement _end_offset on each removal.
|
||||
const count = self._proto._end_offset - self._proto._start_offset;
|
||||
var i: u32 = 0;
|
||||
while (i < count) : (i += 1) {
|
||||
if (self._proto._start_container.getChildAt(self._proto._start_offset)) |child| {
|
||||
_ = try self._proto._start_container.removeChild(child, page);
|
||||
}
|
||||
@@ -717,3 +726,6 @@ const testing = @import("../../testing.zig");
|
||||
test "WebApi: Range" {
|
||||
try testing.htmlRunner("range.html", .{});
|
||||
}
|
||||
test "WebApi: Range mutations" {
|
||||
try testing.htmlRunner("range_mutations.html", .{});
|
||||
}
|
||||
|
||||
@@ -243,11 +243,10 @@ pub fn createObjectURL(blob: *Blob, page: *Page) ![]const u8 {
|
||||
var uuid_buf: [36]u8 = undefined;
|
||||
@import("../../id.zig").uuidv4(&uuid_buf);
|
||||
|
||||
const origin = (try page.getOrigin(page.call_arena)) orelse "null";
|
||||
const blob_url = try std.fmt.allocPrint(
|
||||
page.arena,
|
||||
"blob:{s}/{s}",
|
||||
.{ origin, uuid_buf },
|
||||
.{ page.origin orelse "null", uuid_buf },
|
||||
);
|
||||
try page._blob_urls.put(page.arena, blob_url, blob);
|
||||
return blob_url;
|
||||
|
||||
@@ -646,9 +646,9 @@ const ScheduleCallback = struct {
|
||||
}
|
||||
|
||||
fn deinit(self: *ScheduleCallback) void {
|
||||
self.page.js.release(self.cb);
|
||||
self.cb.release();
|
||||
for (self.params) |param| {
|
||||
self.page.js.release(param);
|
||||
param.release();
|
||||
}
|
||||
self.page.releaseArena(self.arena);
|
||||
}
|
||||
|
||||
@@ -20,6 +20,7 @@ const std = @import("std");
|
||||
const log = @import("../../../log.zig");
|
||||
const js = @import("../../js/js.zig");
|
||||
const Page = @import("../../Page.zig");
|
||||
const Session = @import("../../Session.zig");
|
||||
|
||||
const Allocator = std.mem.Allocator;
|
||||
|
||||
@@ -61,8 +62,8 @@ pub fn init(page: *Page) !*Animation {
|
||||
return self;
|
||||
}
|
||||
|
||||
pub fn deinit(self: *Animation, _: bool, page: *Page) void {
|
||||
page.releaseArena(self._arena);
|
||||
pub fn deinit(self: *Animation, _: bool, session: *Session) void {
|
||||
session.releaseArena(self._arena);
|
||||
}
|
||||
|
||||
pub fn play(self: *Animation, page: *Page) !void {
|
||||
|
||||
@@ -43,16 +43,26 @@ pub fn splitText(self: *Text, offset: usize, page: *Page) !*Text {
|
||||
const new_node = try page.createTextNode(new_data);
|
||||
const new_text = new_node.as(Text);
|
||||
|
||||
const old_data = data[0..byte_offset];
|
||||
try self._proto.setData(old_data, page);
|
||||
|
||||
// If this node has a parent, insert the new node right after this one
|
||||
const node = self._proto.asNode();
|
||||
|
||||
// Per DOM spec splitText: insert first (step 7a), then update ranges (7b-7e),
|
||||
// then truncate original node (step 8).
|
||||
if (node.parentNode()) |parent| {
|
||||
const next_sibling = node.nextSibling();
|
||||
_ = try parent.insertBefore(new_node, next_sibling, page);
|
||||
|
||||
// splitText-specific range updates (steps 7b-7e)
|
||||
if (parent.getChildIndex(node)) |node_index| {
|
||||
page.updateRangesForSplitText(node, new_node, @intCast(offset), parent, node_index);
|
||||
}
|
||||
}
|
||||
|
||||
// Step 8: truncate original node via replaceData(offset, count, "").
|
||||
// Use replaceData instead of setData so live range updates fire
|
||||
// (matters for detached text nodes where steps 7b-7e were skipped).
|
||||
const length = self._proto.getLength();
|
||||
try self._proto.replaceData(offset, length - offset, "", page);
|
||||
|
||||
return new_text;
|
||||
}
|
||||
|
||||
|
||||
@@ -20,6 +20,7 @@ const std = @import("std");
|
||||
|
||||
const Node = @import("../Node.zig");
|
||||
const Page = @import("../../Page.zig");
|
||||
const Session = @import("../../Session.zig");
|
||||
const GenericIterator = @import("iterator.zig").Entry;
|
||||
|
||||
// Optimized for node.childNodes, which has to be a live list.
|
||||
@@ -53,8 +54,8 @@ pub fn init(node: *Node, page: *Page) !*ChildNodes {
|
||||
return self;
|
||||
}
|
||||
|
||||
pub fn deinit(self: *const ChildNodes, page: *Page) void {
|
||||
page.releaseArena(self._arena);
|
||||
pub fn deinit(self: *const ChildNodes, session: *Session) void {
|
||||
session.releaseArena(self._arena);
|
||||
}
|
||||
|
||||
pub fn length(self: *ChildNodes, page: *Page) !u32 {
|
||||
|
||||
@@ -21,6 +21,7 @@ const std = @import("std");
|
||||
const log = @import("../../../log.zig");
|
||||
const js = @import("../../js/js.zig");
|
||||
const Page = @import("../../Page.zig");
|
||||
const Session = @import("../../Session.zig");
|
||||
const Node = @import("../Node.zig");
|
||||
|
||||
const ChildNodes = @import("ChildNodes.zig");
|
||||
@@ -38,7 +39,7 @@ _data: union(enum) {
|
||||
},
|
||||
_rc: usize = 0,
|
||||
|
||||
pub fn deinit(self: *NodeList, _: bool, page: *Page) void {
|
||||
pub fn deinit(self: *NodeList, _: bool, session: *Session) void {
|
||||
const rc = self._rc;
|
||||
if (rc > 1) {
|
||||
self._rc = rc - 1;
|
||||
@@ -46,8 +47,8 @@ pub fn deinit(self: *NodeList, _: bool, page: *Page) void {
|
||||
}
|
||||
|
||||
switch (self._data) {
|
||||
.selector_list => |list| list.deinit(page),
|
||||
.child_nodes => |cn| cn.deinit(page),
|
||||
.selector_list => |list| list.deinit(session),
|
||||
.child_nodes => |cn| cn.deinit(session),
|
||||
else => {},
|
||||
}
|
||||
}
|
||||
@@ -118,8 +119,8 @@ const Iterator = struct {
|
||||
|
||||
const Entry = struct { u32, *Node };
|
||||
|
||||
pub fn deinit(self: *Iterator, shutdown: bool, page: *Page) void {
|
||||
self.list.deinit(shutdown, page);
|
||||
pub fn deinit(self: *Iterator, shutdown: bool, session: *Session) void {
|
||||
self.list.deinit(shutdown, session);
|
||||
}
|
||||
|
||||
pub fn acquireRef(self: *Iterator) void {
|
||||
|
||||
@@ -19,6 +19,7 @@
|
||||
const std = @import("std");
|
||||
const js = @import("../../js/js.zig");
|
||||
const Page = @import("../../Page.zig");
|
||||
const Session = @import("../../Session.zig");
|
||||
|
||||
pub fn Entry(comptime Inner: type, comptime field: ?[]const u8) type {
|
||||
const R = reflect(Inner, field);
|
||||
@@ -39,9 +40,9 @@ pub fn Entry(comptime Inner: type, comptime field: ?[]const u8) type {
|
||||
return page._factory.create(Self{ .inner = inner });
|
||||
}
|
||||
|
||||
pub fn deinit(self: *Self, shutdown: bool, page: *Page) void {
|
||||
pub fn deinit(self: *Self, shutdown: bool, session: *Session) void {
|
||||
if (@hasDecl(Inner, "deinit")) {
|
||||
self.inner.deinit(shutdown, page);
|
||||
self.inner.deinit(shutdown, session);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -483,6 +483,16 @@ fn isTwoValueShorthand(name: []const u8) bool {
|
||||
.{ "overflow", {} },
|
||||
.{ "overscroll-behavior", {} },
|
||||
.{ "gap", {} },
|
||||
.{ "grid-gap", {} },
|
||||
// Scroll
|
||||
.{ "scroll-padding-block", {} },
|
||||
.{ "scroll-padding-inline", {} },
|
||||
.{ "scroll-snap-align", {} },
|
||||
// Background/Mask
|
||||
.{ "background-size", {} },
|
||||
.{ "border-image-repeat", {} },
|
||||
.{ "mask-repeat", {} },
|
||||
.{ "mask-size", {} },
|
||||
});
|
||||
return shorthands.has(name);
|
||||
}
|
||||
@@ -552,7 +562,6 @@ fn isLengthProperty(name: []const u8) bool {
|
||||
.{ "border-bottom-right-radius", {} },
|
||||
// Text
|
||||
.{ "font-size", {} },
|
||||
.{ "line-height", {} },
|
||||
.{ "letter-spacing", {} },
|
||||
.{ "word-spacing", {} },
|
||||
.{ "text-indent", {} },
|
||||
@@ -561,17 +570,52 @@ fn isLengthProperty(name: []const u8) bool {
|
||||
.{ "row-gap", {} },
|
||||
.{ "column-gap", {} },
|
||||
.{ "flex-basis", {} },
|
||||
// Legacy grid aliases
|
||||
.{ "grid-column-gap", {} },
|
||||
.{ "grid-row-gap", {} },
|
||||
// Outline
|
||||
.{ "outline", {} },
|
||||
.{ "outline-width", {} },
|
||||
.{ "outline-offset", {} },
|
||||
// Multi-column
|
||||
.{ "column-rule-width", {} },
|
||||
.{ "column-width", {} },
|
||||
// Scroll
|
||||
.{ "scroll-margin", {} },
|
||||
.{ "scroll-margin-top", {} },
|
||||
.{ "scroll-margin-right", {} },
|
||||
.{ "scroll-margin-bottom", {} },
|
||||
.{ "scroll-margin-left", {} },
|
||||
.{ "scroll-padding", {} },
|
||||
.{ "scroll-padding-top", {} },
|
||||
.{ "scroll-padding-right", {} },
|
||||
.{ "scroll-padding-bottom", {} },
|
||||
.{ "scroll-padding-left", {} },
|
||||
// Shapes
|
||||
.{ "shape-margin", {} },
|
||||
// Motion path
|
||||
.{ "offset-distance", {} },
|
||||
// Transforms
|
||||
.{ "translate", {} },
|
||||
// Animations
|
||||
.{ "animation-range-end", {} },
|
||||
.{ "animation-range-start", {} },
|
||||
// Other
|
||||
.{ "border-spacing", {} },
|
||||
.{ "text-shadow", {} },
|
||||
.{ "box-shadow", {} },
|
||||
.{ "baseline-shift", {} },
|
||||
.{ "vertical-align", {} },
|
||||
.{ "text-decoration-inset", {} },
|
||||
.{ "block-step-size", {} },
|
||||
// Grid lanes
|
||||
.{ "flow-tolerance", {} },
|
||||
.{ "column-rule-edge-inset", {} },
|
||||
.{ "column-rule-interior-inset", {} },
|
||||
.{ "row-rule-edge-inset", {} },
|
||||
.{ "row-rule-interior-inset", {} },
|
||||
.{ "rule-edge-inset", {} },
|
||||
.{ "rule-interior-inset", {} },
|
||||
});
|
||||
|
||||
return length_properties.has(name);
|
||||
@@ -693,3 +737,55 @@ pub const JsApi = struct {
|
||||
pub const removeProperty = bridge.function(CSSStyleDeclaration.removeProperty, .{});
|
||||
pub const cssFloat = bridge.accessor(CSSStyleDeclaration.getFloat, CSSStyleDeclaration.setFloat, .{});
|
||||
};
|
||||
|
||||
const testing = @import("std").testing;
|
||||
|
||||
test "normalizePropertyValue: unitless zero to 0px" {
|
||||
const cases = .{
|
||||
.{ "width", "0", "0px" },
|
||||
.{ "height", "0", "0px" },
|
||||
.{ "scroll-margin-top", "0", "0px" },
|
||||
.{ "scroll-padding-bottom", "0", "0px" },
|
||||
.{ "column-width", "0", "0px" },
|
||||
.{ "column-rule-width", "0", "0px" },
|
||||
.{ "outline", "0", "0px" },
|
||||
.{ "shape-margin", "0", "0px" },
|
||||
.{ "offset-distance", "0", "0px" },
|
||||
.{ "translate", "0", "0px" },
|
||||
.{ "grid-column-gap", "0", "0px" },
|
||||
.{ "grid-row-gap", "0", "0px" },
|
||||
// Non-length properties should NOT normalize
|
||||
.{ "opacity", "0", "0" },
|
||||
.{ "z-index", "0", "0" },
|
||||
};
|
||||
inline for (cases) |case| {
|
||||
const result = try normalizePropertyValue(testing.allocator, case[0], case[1]);
|
||||
try testing.expectEqualStrings(case[2], result);
|
||||
}
|
||||
}
|
||||
|
||||
test "normalizePropertyValue: first baseline to baseline" {
|
||||
const result = try normalizePropertyValue(testing.allocator, "align-items", "first baseline");
|
||||
try testing.expectEqualStrings("baseline", result);
|
||||
|
||||
const result2 = try normalizePropertyValue(testing.allocator, "align-self", "last baseline");
|
||||
try testing.expectEqualStrings("last baseline", result2);
|
||||
}
|
||||
|
||||
test "normalizePropertyValue: collapse duplicate two-value shorthands" {
|
||||
const cases = .{
|
||||
.{ "overflow", "hidden hidden", "hidden" },
|
||||
.{ "gap", "10px 10px", "10px" },
|
||||
.{ "scroll-snap-align", "start start", "start" },
|
||||
.{ "scroll-padding-block", "5px 5px", "5px" },
|
||||
.{ "background-size", "auto auto", "auto" },
|
||||
.{ "overscroll-behavior", "auto auto", "auto" },
|
||||
// Different values should NOT collapse
|
||||
.{ "overflow", "hidden scroll", "hidden scroll" },
|
||||
.{ "gap", "10px 20px", "10px 20px" },
|
||||
};
|
||||
inline for (cases) |case| {
|
||||
const result = try normalizePropertyValue(testing.allocator, case[0], case[1]);
|
||||
try testing.expectEqualStrings(case[2], result);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -19,6 +19,7 @@
|
||||
const std = @import("std");
|
||||
const js = @import("../../js/js.zig");
|
||||
const Page = @import("../../Page.zig");
|
||||
const Session = @import("../../Session.zig");
|
||||
|
||||
const Allocator = std.mem.Allocator;
|
||||
|
||||
@@ -41,8 +42,8 @@ pub fn init(family: []const u8, source: []const u8, page: *Page) !*FontFace {
|
||||
return self;
|
||||
}
|
||||
|
||||
pub fn deinit(self: *FontFace, _: bool, page: *Page) void {
|
||||
page.releaseArena(self._arena);
|
||||
pub fn deinit(self: *FontFace, _: bool, session: *Session) void {
|
||||
session.releaseArena(self._arena);
|
||||
}
|
||||
|
||||
pub fn getFamily(self: *const FontFace) []const u8 {
|
||||
|
||||
@@ -19,6 +19,7 @@
|
||||
const std = @import("std");
|
||||
const js = @import("../../js/js.zig");
|
||||
const Page = @import("../../Page.zig");
|
||||
const Session = @import("../../Session.zig");
|
||||
const FontFace = @import("FontFace.zig");
|
||||
|
||||
const Allocator = std.mem.Allocator;
|
||||
@@ -38,8 +39,8 @@ pub fn init(page: *Page) !*FontFaceSet {
|
||||
return self;
|
||||
}
|
||||
|
||||
pub fn deinit(self: *FontFaceSet, _: bool, page: *Page) void {
|
||||
page.releaseArena(self._arena);
|
||||
pub fn deinit(self: *FontFaceSet, _: bool, session: *Session) void {
|
||||
session.releaseArena(self._arena);
|
||||
}
|
||||
|
||||
// FontFaceSet.ready - returns an already-resolved Promise.
|
||||
|
||||
@@ -387,22 +387,17 @@ pub fn getAttributeFunction(
|
||||
}
|
||||
|
||||
const attr = element.getAttributeSafe(.wrap(@tagName(listener_type))) orelse return null;
|
||||
const callback = page.js.stringToPersistedFunction(attr) catch |err| switch (err) {
|
||||
error.OutOfMemory => return err,
|
||||
else => {
|
||||
// Not a valid expression; log this to find out if its something we should be supporting.
|
||||
log.warn(.js, "Html.getAttributeFunction", .{
|
||||
.expression = attr,
|
||||
.err = err,
|
||||
});
|
||||
|
||||
return null;
|
||||
},
|
||||
const function = page.js.stringToPersistedFunction(attr, &.{"event"}, &.{}) catch |err| {
|
||||
// Not a valid expression; log this to find out if its something we should be supporting.
|
||||
log.warn(.js, "Html.getAttributeFunction", .{
|
||||
.expression = attr,
|
||||
.err = err,
|
||||
});
|
||||
return null;
|
||||
};
|
||||
|
||||
try self.setAttributeListener(listener_type, callback, page);
|
||||
|
||||
return callback;
|
||||
try self.setAttributeListener(listener_type, function, page);
|
||||
return function;
|
||||
}
|
||||
|
||||
pub fn hasAttributeFunction(self: *HtmlElement, listener_type: GlobalEventHandler, page: *const Page) bool {
|
||||
|
||||
@@ -50,7 +50,7 @@ pub const Build = struct {
|
||||
pub fn complete(node: *Node, page: *Page) !void {
|
||||
const el = node.as(Element);
|
||||
const on_load = el.getAttributeSafe(comptime .wrap("onload")) orelse return;
|
||||
if (page.js.stringToPersistedFunction(on_load)) |func| {
|
||||
if (page.js.stringToPersistedFunction(on_load, &.{"event"}, &.{})) |func| {
|
||||
page.window._on_load = func;
|
||||
} else |err| {
|
||||
log.err(.js, "body.onload", .{ .err = err, .str = on_load });
|
||||
|
||||
@@ -18,7 +18,9 @@
|
||||
|
||||
const std = @import("std");
|
||||
const js = @import("../../../js/js.zig");
|
||||
const URL = @import("../../../URL.zig");
|
||||
const Page = @import("../../../Page.zig");
|
||||
|
||||
const Node = @import("../../Node.zig");
|
||||
const Element = @import("../../Element.zig");
|
||||
const HtmlElement = @import("../Html.zig");
|
||||
@@ -85,6 +87,19 @@ pub fn getElements(self: *Form, page: *Page) !*collections.HTMLFormControlsColle
|
||||
});
|
||||
}
|
||||
|
||||
pub fn getAction(self: *Form, page: *Page) ![]const u8 {
|
||||
const element = self.asElement();
|
||||
const action = element.getAttributeSafe(comptime .wrap("action")) orelse return page.url;
|
||||
if (action.len == 0) {
|
||||
return page.url;
|
||||
}
|
||||
return URL.resolve(page.call_arena, page.base(), action, .{ .encode = true });
|
||||
}
|
||||
|
||||
pub fn setAction(self: *Form, value: []const u8, page: *Page) !void {
|
||||
try self.asElement().setAttributeSafe(comptime .wrap("action"), .wrap(value), page);
|
||||
}
|
||||
|
||||
pub fn getLength(self: *Form, page: *Page) !u32 {
|
||||
const elements = try self.getElements(page);
|
||||
return elements.length(page);
|
||||
@@ -104,6 +119,7 @@ pub const JsApi = struct {
|
||||
|
||||
pub const name = bridge.accessor(Form.getName, Form.setName, .{});
|
||||
pub const method = bridge.accessor(Form.getMethod, Form.setMethod, .{});
|
||||
pub const action = bridge.accessor(Form.getAction, Form.setAction, .{});
|
||||
pub const elements = bridge.accessor(Form.getElements, null, .{});
|
||||
pub const length = bridge.accessor(Form.getLength, null, .{});
|
||||
pub const submit = bridge.function(Form.submit, .{});
|
||||
|
||||
@@ -20,6 +20,7 @@ const std = @import("std");
|
||||
const js = @import("../../js/js.zig");
|
||||
|
||||
const Page = @import("../../Page.zig");
|
||||
const Session = @import("../../Session.zig");
|
||||
const Allocator = std.mem.Allocator;
|
||||
|
||||
const TextDecoder = @This();
|
||||
@@ -59,8 +60,8 @@ pub fn init(label_: ?[]const u8, opts_: ?InitOpts, page: *Page) !*TextDecoder {
|
||||
return self;
|
||||
}
|
||||
|
||||
pub fn deinit(self: *TextDecoder, _: bool, page: *Page) void {
|
||||
page.releaseArena(self._arena);
|
||||
pub fn deinit(self: *TextDecoder, _: bool, session: *Session) void {
|
||||
session.releaseArena(self._arena);
|
||||
}
|
||||
|
||||
pub fn getIgnoreBOM(self: *const TextDecoder) bool {
|
||||
|
||||
@@ -20,6 +20,7 @@ const String = @import("../../../string.zig").String;
|
||||
|
||||
const js = @import("../../js/js.zig");
|
||||
const Page = @import("../../Page.zig");
|
||||
const Session = @import("../../Session.zig");
|
||||
const Event = @import("../Event.zig");
|
||||
const Allocator = std.mem.Allocator;
|
||||
|
||||
@@ -53,8 +54,8 @@ pub fn init(typ: []const u8, opts_: ?Options, page: *Page) !*CompositionEvent {
|
||||
return event;
|
||||
}
|
||||
|
||||
pub fn deinit(self: *CompositionEvent, shutdown: bool, page: *Page) void {
|
||||
self._proto.deinit(shutdown, page);
|
||||
pub fn deinit(self: *CompositionEvent, shutdown: bool, session: *Session) void {
|
||||
self._proto.deinit(shutdown, session);
|
||||
}
|
||||
|
||||
pub fn asEvent(self: *CompositionEvent) *Event {
|
||||
|
||||
@@ -21,6 +21,7 @@ const String = @import("../../../string.zig").String;
|
||||
|
||||
const js = @import("../../js/js.zig");
|
||||
const Page = @import("../../Page.zig");
|
||||
const Session = @import("../../Session.zig");
|
||||
const Event = @import("../Event.zig");
|
||||
const Allocator = std.mem.Allocator;
|
||||
|
||||
@@ -72,11 +73,11 @@ pub fn initCustomEvent(
|
||||
self._detail = detail_;
|
||||
}
|
||||
|
||||
pub fn deinit(self: *CustomEvent, shutdown: bool, page: *Page) void {
|
||||
pub fn deinit(self: *CustomEvent, shutdown: bool, session: *Session) void {
|
||||
if (self._detail) |d| {
|
||||
page.js.release(d);
|
||||
d.release();
|
||||
}
|
||||
self._proto.deinit(shutdown, page);
|
||||
self._proto.deinit(shutdown, session);
|
||||
}
|
||||
|
||||
pub fn asEvent(self: *CustomEvent) *Event {
|
||||
|
||||
@@ -21,6 +21,7 @@ const String = @import("../../../string.zig").String;
|
||||
|
||||
const js = @import("../../js/js.zig");
|
||||
const Page = @import("../../Page.zig");
|
||||
const Session = @import("../../Session.zig");
|
||||
|
||||
const Event = @import("../Event.zig");
|
||||
const Allocator = std.mem.Allocator;
|
||||
@@ -79,11 +80,11 @@ fn initWithTrusted(arena: Allocator, typ: String, opts_: ?Options, trusted: bool
|
||||
return event;
|
||||
}
|
||||
|
||||
pub fn deinit(self: *ErrorEvent, shutdown: bool, page: *Page) void {
|
||||
pub fn deinit(self: *ErrorEvent, shutdown: bool, session: *Session) void {
|
||||
if (self._error) |e| {
|
||||
page.js.release(e);
|
||||
e.release();
|
||||
}
|
||||
self._proto.deinit(shutdown, page);
|
||||
self._proto.deinit(shutdown, session);
|
||||
}
|
||||
|
||||
pub fn asEvent(self: *ErrorEvent) *Event {
|
||||
|
||||
@@ -20,6 +20,7 @@ const std = @import("std");
|
||||
const Allocator = std.mem.Allocator;
|
||||
const String = @import("../../../string.zig").String;
|
||||
const Page = @import("../../Page.zig");
|
||||
const Session = @import("../../Session.zig");
|
||||
const js = @import("../../js/js.zig");
|
||||
|
||||
const Event = @import("../Event.zig");
|
||||
@@ -69,8 +70,8 @@ fn initWithTrusted(arena: Allocator, typ: String, _opts: ?Options, trusted: bool
|
||||
return event;
|
||||
}
|
||||
|
||||
pub fn deinit(self: *FocusEvent, shutdown: bool, page: *Page) void {
|
||||
self._proto.deinit(shutdown, page);
|
||||
pub fn deinit(self: *FocusEvent, shutdown: bool, session: *Session) void {
|
||||
self._proto.deinit(shutdown, session);
|
||||
}
|
||||
|
||||
pub fn asEvent(self: *FocusEvent) *Event {
|
||||
|
||||
@@ -21,6 +21,7 @@ const String = @import("../../../string.zig").String;
|
||||
|
||||
const js = @import("../../js/js.zig");
|
||||
const Page = @import("../../Page.zig");
|
||||
const Session = @import("../../Session.zig");
|
||||
|
||||
const Event = @import("../Event.zig");
|
||||
const UIEvent = @import("UIEvent.zig");
|
||||
@@ -221,8 +222,8 @@ fn initWithTrusted(arena: Allocator, typ: String, _opts: ?Options, trusted: bool
|
||||
return event;
|
||||
}
|
||||
|
||||
pub fn deinit(self: *KeyboardEvent, shutdown: bool, page: *Page) void {
|
||||
self._proto.deinit(shutdown, page);
|
||||
pub fn deinit(self: *KeyboardEvent, shutdown: bool, session: *Session) void {
|
||||
self._proto.deinit(shutdown, session);
|
||||
}
|
||||
|
||||
pub fn asEvent(self: *KeyboardEvent) *Event {
|
||||
|
||||
@@ -22,6 +22,7 @@ const String = @import("../../../string.zig").String;
|
||||
const js = @import("../../js/js.zig");
|
||||
|
||||
const Page = @import("../../Page.zig");
|
||||
const Session = @import("../../Session.zig");
|
||||
const Event = @import("../Event.zig");
|
||||
const Window = @import("../Window.zig");
|
||||
const Allocator = std.mem.Allocator;
|
||||
@@ -72,11 +73,11 @@ fn initWithTrusted(arena: Allocator, typ: String, opts_: ?Options, trusted: bool
|
||||
return event;
|
||||
}
|
||||
|
||||
pub fn deinit(self: *MessageEvent, shutdown: bool, page: *Page) void {
|
||||
pub fn deinit(self: *MessageEvent, shutdown: bool, session: *Session) void {
|
||||
if (self._data) |d| {
|
||||
page.js.release(d);
|
||||
d.release();
|
||||
}
|
||||
self._proto.deinit(shutdown, page);
|
||||
self._proto.deinit(shutdown, session);
|
||||
}
|
||||
|
||||
pub fn asEvent(self: *MessageEvent) *Event {
|
||||
|
||||
@@ -19,6 +19,7 @@
|
||||
const std = @import("std");
|
||||
const String = @import("../../../string.zig").String;
|
||||
const Page = @import("../../Page.zig");
|
||||
const Session = @import("../../Session.zig");
|
||||
const js = @import("../../js/js.zig");
|
||||
|
||||
const Event = @import("../Event.zig");
|
||||
@@ -109,8 +110,8 @@ pub fn init(typ: []const u8, _opts: ?Options, page: *Page) !*MouseEvent {
|
||||
return event;
|
||||
}
|
||||
|
||||
pub fn deinit(self: *MouseEvent, shutdown: bool, page: *Page) void {
|
||||
self._proto.deinit(shutdown, page);
|
||||
pub fn deinit(self: *MouseEvent, shutdown: bool, session: *Session) void {
|
||||
self._proto.deinit(shutdown, session);
|
||||
}
|
||||
|
||||
pub fn asEvent(self: *MouseEvent) *Event {
|
||||
|
||||
@@ -21,6 +21,7 @@ const String = @import("../../../string.zig").String;
|
||||
|
||||
const js = @import("../../js/js.zig");
|
||||
const Page = @import("../../Page.zig");
|
||||
const Session = @import("../../Session.zig");
|
||||
|
||||
const Event = @import("../Event.zig");
|
||||
const NavigationHistoryEntry = @import("../navigation/NavigationHistoryEntry.zig");
|
||||
@@ -82,8 +83,8 @@ fn initWithTrusted(
|
||||
return event;
|
||||
}
|
||||
|
||||
pub fn deinit(self: *NavigationCurrentEntryChangeEvent, shutdown: bool, page: *Page) void {
|
||||
self._proto.deinit(shutdown, page);
|
||||
pub fn deinit(self: *NavigationCurrentEntryChangeEvent, shutdown: bool, session: *Session) void {
|
||||
self._proto.deinit(shutdown, session);
|
||||
}
|
||||
|
||||
pub fn asEvent(self: *NavigationCurrentEntryChangeEvent) *Event {
|
||||
|
||||
@@ -21,6 +21,7 @@ const String = @import("../../../string.zig").String;
|
||||
|
||||
const js = @import("../../js/js.zig");
|
||||
const Page = @import("../../Page.zig");
|
||||
const Session = @import("../../Session.zig");
|
||||
const Event = @import("../Event.zig");
|
||||
const Allocator = std.mem.Allocator;
|
||||
|
||||
@@ -65,8 +66,8 @@ fn initWithTrusted(arena: Allocator, typ: String, _opts: ?Options, trusted: bool
|
||||
return event;
|
||||
}
|
||||
|
||||
pub fn deinit(self: *PageTransitionEvent, shutdown: bool, page: *Page) void {
|
||||
self._proto.deinit(shutdown, page);
|
||||
pub fn deinit(self: *PageTransitionEvent, shutdown: bool, session: *Session) void {
|
||||
self._proto.deinit(shutdown, session);
|
||||
}
|
||||
|
||||
pub fn asEvent(self: *PageTransitionEvent) *Event {
|
||||
|
||||
@@ -21,6 +21,7 @@ const String = @import("../../../string.zig").String;
|
||||
|
||||
const js = @import("../../js/js.zig");
|
||||
const Page = @import("../../Page.zig");
|
||||
const Session = @import("../../Session.zig");
|
||||
const Event = @import("../Event.zig");
|
||||
const MouseEvent = @import("MouseEvent.zig");
|
||||
|
||||
@@ -127,8 +128,8 @@ pub fn init(typ: []const u8, _opts: ?Options, page: *Page) !*PointerEvent {
|
||||
return event;
|
||||
}
|
||||
|
||||
pub fn deinit(self: *PointerEvent, shutdown: bool, page: *Page) void {
|
||||
self._proto.deinit(shutdown, page);
|
||||
pub fn deinit(self: *PointerEvent, shutdown: bool, session: *Session) void {
|
||||
self._proto.deinit(shutdown, session);
|
||||
}
|
||||
|
||||
pub fn asEvent(self: *PointerEvent) *Event {
|
||||
|
||||
@@ -21,6 +21,7 @@ const String = @import("../../../string.zig").String;
|
||||
|
||||
const js = @import("../../js/js.zig");
|
||||
const Page = @import("../../Page.zig");
|
||||
const Session = @import("../../Session.zig");
|
||||
|
||||
const Event = @import("../Event.zig");
|
||||
const Allocator = std.mem.Allocator;
|
||||
@@ -66,8 +67,8 @@ fn initWithTrusted(arena: Allocator, typ: String, _opts: ?Options, trusted: bool
|
||||
return event;
|
||||
}
|
||||
|
||||
pub fn deinit(self: *PopStateEvent, shutdown: bool, page: *Page) void {
|
||||
self._proto.deinit(shutdown, page);
|
||||
pub fn deinit(self: *PopStateEvent, shutdown: bool, session: *Session) void {
|
||||
self._proto.deinit(shutdown, session);
|
||||
}
|
||||
|
||||
pub fn asEvent(self: *PopStateEvent) *Event {
|
||||
|
||||
@@ -20,6 +20,7 @@ const std = @import("std");
|
||||
const String = @import("../../../string.zig").String;
|
||||
|
||||
const Page = @import("../../Page.zig");
|
||||
const Session = @import("../../Session.zig");
|
||||
const Event = @import("../Event.zig");
|
||||
const Allocator = std.mem.Allocator;
|
||||
|
||||
@@ -67,8 +68,8 @@ fn initWithTrusted(arena: Allocator, typ: String, _opts: ?Options, trusted: bool
|
||||
return event;
|
||||
}
|
||||
|
||||
pub fn deinit(self: *ProgressEvent, shutdown: bool, page: *Page) void {
|
||||
self._proto.deinit(shutdown, page);
|
||||
pub fn deinit(self: *ProgressEvent, shutdown: bool, session: *Session) void {
|
||||
self._proto.deinit(shutdown, session);
|
||||
}
|
||||
|
||||
pub fn asEvent(self: *ProgressEvent) *Event {
|
||||
|
||||
@@ -20,6 +20,7 @@ const String = @import("../../../string.zig").String;
|
||||
|
||||
const js = @import("../../js/js.zig");
|
||||
const Page = @import("../../Page.zig");
|
||||
const Session = @import("../../Session.zig");
|
||||
const Event = @import("../Event.zig");
|
||||
const Allocator = std.mem.Allocator;
|
||||
|
||||
@@ -56,14 +57,14 @@ pub fn init(typ: []const u8, opts_: ?Options, page: *Page) !*PromiseRejectionEve
|
||||
return event;
|
||||
}
|
||||
|
||||
pub fn deinit(self: *PromiseRejectionEvent, shutdown: bool, page: *Page) void {
|
||||
pub fn deinit(self: *PromiseRejectionEvent, shutdown: bool, session: *Session) void {
|
||||
if (self._reason) |r| {
|
||||
page.js.release(r);
|
||||
r.release();
|
||||
}
|
||||
if (self._promise) |p| {
|
||||
page.js.release(p);
|
||||
p.release();
|
||||
}
|
||||
self._proto.deinit(shutdown, page);
|
||||
self._proto.deinit(shutdown, session);
|
||||
}
|
||||
|
||||
pub fn asEvent(self: *PromiseRejectionEvent) *Event {
|
||||
|
||||
@@ -19,6 +19,7 @@
|
||||
const std = @import("std");
|
||||
const String = @import("../../../string.zig").String;
|
||||
const Page = @import("../../Page.zig");
|
||||
const Session = @import("../../Session.zig");
|
||||
const js = @import("../../js/js.zig");
|
||||
|
||||
const Event = @import("../Event.zig");
|
||||
@@ -58,8 +59,8 @@ pub fn init(typ: []const u8, _opts: ?Options, page: *Page) !*TextEvent {
|
||||
return event;
|
||||
}
|
||||
|
||||
pub fn deinit(self: *TextEvent, shutdown: bool, page: *Page) void {
|
||||
self._proto.deinit(shutdown, page);
|
||||
pub fn deinit(self: *TextEvent, shutdown: bool, session: *Session) void {
|
||||
self._proto.deinit(shutdown, session);
|
||||
}
|
||||
|
||||
pub fn asEvent(self: *TextEvent) *Event {
|
||||
|
||||
@@ -18,6 +18,7 @@
|
||||
|
||||
const String = @import("../../../string.zig").String;
|
||||
const Page = @import("../../Page.zig");
|
||||
const Session = @import("../../Session.zig");
|
||||
const js = @import("../../js/js.zig");
|
||||
|
||||
const Event = @import("../Event.zig");
|
||||
@@ -69,8 +70,8 @@ pub fn init(typ: []const u8, _opts: ?Options, page: *Page) !*UIEvent {
|
||||
return event;
|
||||
}
|
||||
|
||||
pub fn deinit(self: *UIEvent, shutdown: bool, page: *Page) void {
|
||||
self._proto.deinit(shutdown, page);
|
||||
pub fn deinit(self: *UIEvent, shutdown: bool, session: *Session) void {
|
||||
self._proto.deinit(shutdown, session);
|
||||
}
|
||||
|
||||
pub fn as(self: *UIEvent, comptime T: type) *T {
|
||||
|
||||
@@ -19,6 +19,7 @@
|
||||
const std = @import("std");
|
||||
const String = @import("../../../string.zig").String;
|
||||
const Page = @import("../../Page.zig");
|
||||
const Session = @import("../../Session.zig");
|
||||
const js = @import("../../js/js.zig");
|
||||
|
||||
const Event = @import("../Event.zig");
|
||||
@@ -86,8 +87,8 @@ pub fn init(typ: []const u8, _opts: ?Options, page: *Page) !*WheelEvent {
|
||||
return event;
|
||||
}
|
||||
|
||||
pub fn deinit(self: *WheelEvent, shutdown: bool, page: *Page) void {
|
||||
self._proto.deinit(shutdown, page);
|
||||
pub fn deinit(self: *WheelEvent, shutdown: bool, session: *Session) void {
|
||||
self._proto.deinit(shutdown, session);
|
||||
}
|
||||
|
||||
pub fn asEvent(self: *WheelEvent) *Event {
|
||||
|
||||
@@ -19,7 +19,7 @@
|
||||
const std = @import("std");
|
||||
|
||||
const log = @import("../../../log.zig");
|
||||
const Http = @import("../../../http/Http.zig");
|
||||
const HttpClient = @import("../../HttpClient.zig");
|
||||
|
||||
const js = @import("../../js/js.zig");
|
||||
const Page = @import("../../Page.zig");
|
||||
@@ -45,7 +45,7 @@ pub const InitOpts = Request.InitOpts;
|
||||
pub fn init(input: Input, options: ?InitOpts, page: *Page) !js.Promise {
|
||||
const request = try Request.init(input, options, page);
|
||||
const response = try Response.init(null, .{ .status = 0 }, page);
|
||||
errdefer response.deinit(true, page);
|
||||
errdefer response.deinit(true, page._session);
|
||||
|
||||
const resolver = page.js.local.?.createPromiseResolver();
|
||||
|
||||
@@ -90,7 +90,7 @@ pub fn init(input: Input, options: ?InitOpts, page: *Page) !js.Promise {
|
||||
return resolver.promise();
|
||||
}
|
||||
|
||||
fn httpStartCallback(transfer: *Http.Transfer) !void {
|
||||
fn httpStartCallback(transfer: *HttpClient.Transfer) !void {
|
||||
const self: *Fetch = @ptrCast(@alignCast(transfer.ctx));
|
||||
if (comptime IS_DEBUG) {
|
||||
log.debug(.http, "request start", .{ .url = self._url, .source = "fetch" });
|
||||
@@ -98,7 +98,7 @@ fn httpStartCallback(transfer: *Http.Transfer) !void {
|
||||
self._response._transfer = transfer;
|
||||
}
|
||||
|
||||
fn httpHeaderDoneCallback(transfer: *Http.Transfer) !bool {
|
||||
fn httpHeaderDoneCallback(transfer: *HttpClient.Transfer) !bool {
|
||||
const self: *Fetch = @ptrCast(@alignCast(transfer.ctx));
|
||||
|
||||
const arena = self._response._arena;
|
||||
@@ -148,7 +148,7 @@ fn httpHeaderDoneCallback(transfer: *Http.Transfer) !bool {
|
||||
return true;
|
||||
}
|
||||
|
||||
fn httpDataCallback(transfer: *Http.Transfer, data: []const u8) !void {
|
||||
fn httpDataCallback(transfer: *HttpClient.Transfer, data: []const u8) !void {
|
||||
const self: *Fetch = @ptrCast(@alignCast(transfer.ctx));
|
||||
try self._buf.appendSlice(self._response._arena, data);
|
||||
}
|
||||
@@ -184,7 +184,7 @@ fn httpErrorCallback(ctx: *anyopaque, err: anyerror) void {
|
||||
// clear this. (defer since `self is in the response's arena).
|
||||
|
||||
defer if (self._owns_response) {
|
||||
response.deinit(err == error.Abort, self._page);
|
||||
response.deinit(err == error.Abort, self._page._session);
|
||||
self._owns_response = false;
|
||||
};
|
||||
|
||||
@@ -205,7 +205,7 @@ fn httpShutdownCallback(ctx: *anyopaque) void {
|
||||
if (self._owns_response) {
|
||||
var response = self._response;
|
||||
response._transfer = null;
|
||||
response.deinit(true, self._page);
|
||||
response.deinit(true, self._page._session);
|
||||
// Do not access `self` after this point: the Fetch struct was
|
||||
// allocated from response._arena which has been released.
|
||||
}
|
||||
|
||||
@@ -86,8 +86,8 @@ pub fn forEach(self: *Headers, cb_: js.Function, js_this_: ?js.Object) !void {
|
||||
}
|
||||
|
||||
// TODO: do we really need 2 different header structs??
|
||||
const Http = @import("../../../http/Http.zig");
|
||||
pub fn populateHttpHeader(self: *Headers, allocator: Allocator, http_headers: *Http.Headers) !void {
|
||||
const net_http = @import("../../../network/http.zig");
|
||||
pub fn populateHttpHeader(self: *Headers, allocator: Allocator, http_headers: *net_http.Headers) !void {
|
||||
for (self._list._entries.items) |entry| {
|
||||
const merged = try std.mem.concatWithSentinel(allocator, u8, &.{ entry.name.str(), ": ", entry.value.str() }, 0);
|
||||
try http_headers.add(merged);
|
||||
|
||||
@@ -19,7 +19,7 @@
|
||||
const std = @import("std");
|
||||
|
||||
const js = @import("../../js/js.zig");
|
||||
const Http = @import("../../../http/Http.zig");
|
||||
const net_http = @import("../../../network/http.zig");
|
||||
|
||||
const URL = @import("../URL.zig");
|
||||
const Page = @import("../../Page.zig");
|
||||
@@ -30,7 +30,7 @@ const Allocator = std.mem.Allocator;
|
||||
const Request = @This();
|
||||
|
||||
_url: [:0]const u8,
|
||||
_method: Http.Method,
|
||||
_method: net_http.Method,
|
||||
_headers: ?*Headers,
|
||||
_body: ?[]const u8,
|
||||
_arena: Allocator,
|
||||
@@ -108,14 +108,14 @@ pub fn init(input: Input, opts_: ?InitOpts, page: *Page) !*Request {
|
||||
});
|
||||
}
|
||||
|
||||
fn parseMethod(method: []const u8, page: *Page) !Http.Method {
|
||||
fn parseMethod(method: []const u8, page: *Page) !net_http.Method {
|
||||
if (method.len > "propfind".len) {
|
||||
return error.InvalidMethod;
|
||||
}
|
||||
|
||||
const lower = std.ascii.lowerString(&page.buf, method);
|
||||
|
||||
const method_lookup = std.StaticStringMap(Http.Method).initComptime(.{
|
||||
const method_lookup = std.StaticStringMap(net_http.Method).initComptime(.{
|
||||
.{ "get", .GET },
|
||||
.{ "post", .POST },
|
||||
.{ "delete", .DELETE },
|
||||
|
||||
@@ -18,9 +18,10 @@
|
||||
|
||||
const std = @import("std");
|
||||
const js = @import("../../js/js.zig");
|
||||
const Http = @import("../../../http/Http.zig");
|
||||
const HttpClient = @import("../../HttpClient.zig");
|
||||
|
||||
const Page = @import("../../Page.zig");
|
||||
const Session = @import("../../Session.zig");
|
||||
const Headers = @import("Headers.zig");
|
||||
const ReadableStream = @import("../streams/ReadableStream.zig");
|
||||
const Blob = @import("../Blob.zig");
|
||||
@@ -45,7 +46,7 @@ _type: Type,
|
||||
_status_text: []const u8,
|
||||
_url: [:0]const u8,
|
||||
_is_redirected: bool,
|
||||
_transfer: ?*Http.Transfer = null,
|
||||
_transfer: ?*HttpClient.Transfer = null,
|
||||
|
||||
const InitOpts = struct {
|
||||
status: u16 = 200,
|
||||
@@ -77,7 +78,7 @@ pub fn init(body_: ?[]const u8, opts_: ?InitOpts, page: *Page) !*Response {
|
||||
return self;
|
||||
}
|
||||
|
||||
pub fn deinit(self: *Response, shutdown: bool, page: *Page) void {
|
||||
pub fn deinit(self: *Response, shutdown: bool, session: *Session) void {
|
||||
if (self._transfer) |transfer| {
|
||||
if (shutdown) {
|
||||
transfer.terminate();
|
||||
@@ -86,7 +87,7 @@ pub fn deinit(self: *Response, shutdown: bool, page: *Page) void {
|
||||
}
|
||||
self._transfer = null;
|
||||
}
|
||||
page.releaseArena(self._arena);
|
||||
session.releaseArena(self._arena);
|
||||
}
|
||||
|
||||
pub fn getStatus(self: *const Response) u16 {
|
||||
|
||||
@@ -20,11 +20,14 @@ const std = @import("std");
|
||||
const js = @import("../../js/js.zig");
|
||||
|
||||
const log = @import("../../../log.zig");
|
||||
const Http = @import("../../../http/Http.zig");
|
||||
const HttpClient = @import("../../HttpClient.zig");
|
||||
const net_http = @import("../../../network/http.zig");
|
||||
|
||||
const URL = @import("../../URL.zig");
|
||||
const Mime = @import("../../Mime.zig");
|
||||
const Page = @import("../../Page.zig");
|
||||
const Session = @import("../../Session.zig");
|
||||
|
||||
const Node = @import("../Node.zig");
|
||||
const Event = @import("../Event.zig");
|
||||
const Headers = @import("Headers.zig");
|
||||
@@ -38,10 +41,10 @@ const XMLHttpRequest = @This();
|
||||
_page: *Page,
|
||||
_proto: *XMLHttpRequestEventTarget,
|
||||
_arena: Allocator,
|
||||
_transfer: ?*Http.Transfer = null,
|
||||
_transfer: ?*HttpClient.Transfer = null,
|
||||
|
||||
_url: [:0]const u8 = "",
|
||||
_method: Http.Method = .GET,
|
||||
_method: net_http.Method = .GET,
|
||||
_request_headers: *Headers,
|
||||
_request_body: ?[]const u8 = null,
|
||||
|
||||
@@ -92,7 +95,7 @@ pub fn init(page: *Page) !*XMLHttpRequest {
|
||||
});
|
||||
}
|
||||
|
||||
pub fn deinit(self: *XMLHttpRequest, shutdown: bool, page: *Page) void {
|
||||
pub fn deinit(self: *XMLHttpRequest, shutdown: bool, session: *Session) void {
|
||||
if (self._transfer) |transfer| {
|
||||
if (shutdown) {
|
||||
transfer.terminate();
|
||||
@@ -102,37 +105,36 @@ pub fn deinit(self: *XMLHttpRequest, shutdown: bool, page: *Page) void {
|
||||
self._transfer = null;
|
||||
}
|
||||
|
||||
const js_ctx = page.js;
|
||||
if (self._on_ready_state_change) |func| {
|
||||
js_ctx.release(func);
|
||||
func.release();
|
||||
}
|
||||
|
||||
{
|
||||
const proto = self._proto;
|
||||
if (proto._on_abort) |func| {
|
||||
js_ctx.release(func);
|
||||
func.release();
|
||||
}
|
||||
if (proto._on_error) |func| {
|
||||
js_ctx.release(func);
|
||||
func.release();
|
||||
}
|
||||
if (proto._on_load) |func| {
|
||||
js_ctx.release(func);
|
||||
func.release();
|
||||
}
|
||||
if (proto._on_load_end) |func| {
|
||||
js_ctx.release(func);
|
||||
func.release();
|
||||
}
|
||||
if (proto._on_load_start) |func| {
|
||||
js_ctx.release(func);
|
||||
func.release();
|
||||
}
|
||||
if (proto._on_progress) |func| {
|
||||
js_ctx.release(func);
|
||||
func.release();
|
||||
}
|
||||
if (proto._on_timeout) |func| {
|
||||
js_ctx.release(func);
|
||||
func.release();
|
||||
}
|
||||
}
|
||||
|
||||
page.releaseArena(self._arena);
|
||||
session.releaseArena(self._arena);
|
||||
}
|
||||
|
||||
fn asEventTarget(self: *XMLHttpRequest) *EventTarget {
|
||||
@@ -341,7 +343,7 @@ pub fn getResponseXML(self: *XMLHttpRequest, page: *Page) !?*Node.Document {
|
||||
};
|
||||
}
|
||||
|
||||
fn httpStartCallback(transfer: *Http.Transfer) !void {
|
||||
fn httpStartCallback(transfer: *HttpClient.Transfer) !void {
|
||||
const self: *XMLHttpRequest = @ptrCast(@alignCast(transfer.ctx));
|
||||
if (comptime IS_DEBUG) {
|
||||
log.debug(.http, "request start", .{ .method = self._method, .url = self._url, .source = "xhr" });
|
||||
@@ -349,13 +351,13 @@ fn httpStartCallback(transfer: *Http.Transfer) !void {
|
||||
self._transfer = transfer;
|
||||
}
|
||||
|
||||
fn httpHeaderCallback(transfer: *Http.Transfer, header: Http.Header) !void {
|
||||
fn httpHeaderCallback(transfer: *HttpClient.Transfer, header: net_http.Header) !void {
|
||||
const self: *XMLHttpRequest = @ptrCast(@alignCast(transfer.ctx));
|
||||
const joined = try std.fmt.allocPrint(self._arena, "{s}: {s}", .{ header.name, header.value });
|
||||
try self._response_headers.append(self._arena, joined);
|
||||
}
|
||||
|
||||
fn httpHeaderDoneCallback(transfer: *Http.Transfer) !bool {
|
||||
fn httpHeaderDoneCallback(transfer: *HttpClient.Transfer) !bool {
|
||||
const self: *XMLHttpRequest = @ptrCast(@alignCast(transfer.ctx));
|
||||
|
||||
const header = &transfer.response_header.?;
|
||||
@@ -405,7 +407,7 @@ fn httpHeaderDoneCallback(transfer: *Http.Transfer) !bool {
|
||||
return true;
|
||||
}
|
||||
|
||||
fn httpDataCallback(transfer: *Http.Transfer, data: []const u8) !void {
|
||||
fn httpDataCallback(transfer: *HttpClient.Transfer, data: []const u8) !void {
|
||||
const self: *XMLHttpRequest = @ptrCast(@alignCast(transfer.ctx));
|
||||
try self._response_data.appendSlice(self._arena, data);
|
||||
|
||||
@@ -515,7 +517,7 @@ fn stateChanged(self: *XMLHttpRequest, state: ReadyState, page: *Page) !void {
|
||||
}
|
||||
}
|
||||
|
||||
fn parseMethod(method: []const u8) !Http.Method {
|
||||
fn parseMethod(method: []const u8) !net_http.Method {
|
||||
if (std.ascii.eqlIgnoreCase(method, "get")) {
|
||||
return .GET;
|
||||
}
|
||||
|
||||
@@ -19,6 +19,7 @@
|
||||
const std = @import("std");
|
||||
|
||||
const Page = @import("../../Page.zig");
|
||||
const Session = @import("../../Session.zig");
|
||||
|
||||
const Node = @import("../Node.zig");
|
||||
const Part = @import("Selector.zig").Part;
|
||||
@@ -40,8 +41,8 @@ pub const EntryIterator = GenericIterator(Iterator, null);
|
||||
pub const KeyIterator = GenericIterator(Iterator, "0");
|
||||
pub const ValueIterator = GenericIterator(Iterator, "1");
|
||||
|
||||
pub fn deinit(self: *const List, page: *Page) void {
|
||||
page.releaseArena(self._arena);
|
||||
pub fn deinit(self: *const List, session: *Session) void {
|
||||
session.releaseArena(self._arena);
|
||||
}
|
||||
|
||||
pub fn collect(
|
||||
|
||||
@@ -406,7 +406,7 @@ test "cdp Node: search list" {
|
||||
|
||||
{
|
||||
const l1 = try doc.querySelectorAll(.wrap("a"), page);
|
||||
defer l1.deinit(page);
|
||||
defer l1.deinit(page._session);
|
||||
const s1 = try search_list.create(l1._nodes);
|
||||
try testing.expectEqual("1", s1.name);
|
||||
try testing.expectEqualSlices(u32, &.{ 1, 2 }, s1.node_ids);
|
||||
@@ -417,7 +417,7 @@ test "cdp Node: search list" {
|
||||
|
||||
{
|
||||
const l2 = try doc.querySelectorAll(.wrap("#a1"), page);
|
||||
defer l2.deinit(page);
|
||||
defer l2.deinit(page._session);
|
||||
const s2 = try search_list.create(l2._nodes);
|
||||
try testing.expectEqual("2", s2.name);
|
||||
try testing.expectEqualSlices(u32, &.{1}, s2.node_ids);
|
||||
@@ -425,7 +425,7 @@ test "cdp Node: search list" {
|
||||
|
||||
{
|
||||
const l3 = try doc.querySelectorAll(.wrap("#a2"), page);
|
||||
defer l3.deinit(page);
|
||||
defer l3.deinit(page._session);
|
||||
const s3 = try search_list.create(l3._nodes);
|
||||
try testing.expectEqual("3", s3.name);
|
||||
try testing.expectEqualSlices(u32, &.{2}, s3.node_ids);
|
||||
|
||||
@@ -28,7 +28,7 @@ const js = @import("../browser/js/js.zig");
|
||||
const App = @import("../App.zig");
|
||||
const Browser = @import("../browser/Browser.zig");
|
||||
const Session = @import("../browser/Session.zig");
|
||||
const HttpClient = @import("../http/Client.zig");
|
||||
const HttpClient = @import("../browser/HttpClient.zig");
|
||||
const Page = @import("../browser/Page.zig");
|
||||
const Incrementing = @import("id.zig").Incrementing;
|
||||
const Notification = @import("../Notification.zig");
|
||||
@@ -459,6 +459,12 @@ pub fn BrowserContext(comptime CDP_T: type) type {
|
||||
}
|
||||
self.isolated_worlds.clearRetainingCapacity();
|
||||
|
||||
// do this before closeSession, since we don't want to process any
|
||||
// new notification (Or maybe, instead of the deinit above, we just
|
||||
// rely on those notifications to do our normal cleanup?)
|
||||
|
||||
self.notification.unregisterAll(self);
|
||||
|
||||
// If the session has a page, we need to clear it first. The page
|
||||
// context is always nested inside of the isolated world context,
|
||||
// so we need to shutdown the page one first.
|
||||
@@ -466,7 +472,6 @@ pub fn BrowserContext(comptime CDP_T: type) type {
|
||||
|
||||
self.node_registry.deinit();
|
||||
self.node_search_list.deinit();
|
||||
self.notification.unregisterAll(self);
|
||||
self.notification.deinit();
|
||||
|
||||
if (self.http_proxy_changed) {
|
||||
|
||||
@@ -98,7 +98,7 @@ fn performSearch(cmd: anytype) !void {
|
||||
const bc = cmd.browser_context orelse return error.BrowserContextNotLoaded;
|
||||
const page = bc.session.currentPage() orelse return error.PageNotLoaded;
|
||||
const list = try Selector.querySelectorAll(page.window._document.asNode(), params.query, page);
|
||||
defer list.deinit(page);
|
||||
defer list.deinit(page._session);
|
||||
|
||||
const search = try bc.node_search_list.create(list._nodes);
|
||||
|
||||
@@ -249,7 +249,7 @@ fn querySelectorAll(cmd: anytype) !void {
|
||||
};
|
||||
|
||||
const selected_nodes = try Selector.querySelectorAll(node.dom, params.selector, page);
|
||||
defer selected_nodes.deinit(page);
|
||||
defer selected_nodes.deinit(page._session);
|
||||
|
||||
const nodes = selected_nodes._nodes;
|
||||
|
||||
|
||||
@@ -23,7 +23,8 @@ const id = @import("../id.zig");
|
||||
const log = @import("../../log.zig");
|
||||
const network = @import("network.zig");
|
||||
|
||||
const Http = @import("../../http/Http.zig");
|
||||
const HttpClient = @import("../../browser/HttpClient.zig");
|
||||
const net_http = @import("../../network/http.zig");
|
||||
const Notification = @import("../../Notification.zig");
|
||||
|
||||
pub fn processMessage(cmd: anytype) !void {
|
||||
@@ -49,7 +50,7 @@ pub fn processMessage(cmd: anytype) !void {
|
||||
// Stored in CDP
|
||||
pub const InterceptState = struct {
|
||||
allocator: Allocator,
|
||||
waiting: std.AutoArrayHashMapUnmanaged(u32, *Http.Transfer),
|
||||
waiting: std.AutoArrayHashMapUnmanaged(u32, *HttpClient.Transfer),
|
||||
|
||||
pub fn init(allocator: Allocator) !InterceptState {
|
||||
return .{
|
||||
@@ -62,11 +63,11 @@ pub const InterceptState = struct {
|
||||
return self.waiting.count() == 0;
|
||||
}
|
||||
|
||||
pub fn put(self: *InterceptState, transfer: *Http.Transfer) !void {
|
||||
pub fn put(self: *InterceptState, transfer: *HttpClient.Transfer) !void {
|
||||
return self.waiting.put(self.allocator, transfer.id, transfer);
|
||||
}
|
||||
|
||||
pub fn remove(self: *InterceptState, request_id: u32) ?*Http.Transfer {
|
||||
pub fn remove(self: *InterceptState, request_id: u32) ?*HttpClient.Transfer {
|
||||
const entry = self.waiting.fetchSwapRemove(request_id) orelse return null;
|
||||
return entry.value;
|
||||
}
|
||||
@@ -75,7 +76,7 @@ pub const InterceptState = struct {
|
||||
self.waiting.deinit(self.allocator);
|
||||
}
|
||||
|
||||
pub fn pendingTransfers(self: *const InterceptState) []*Http.Transfer {
|
||||
pub fn pendingTransfers(self: *const InterceptState) []*HttpClient.Transfer {
|
||||
return self.waiting.values();
|
||||
}
|
||||
};
|
||||
@@ -221,7 +222,7 @@ fn continueRequest(cmd: anytype) !void {
|
||||
url: ?[]const u8 = null,
|
||||
method: ?[]const u8 = null,
|
||||
postData: ?[]const u8 = null,
|
||||
headers: ?[]const Http.Header = null,
|
||||
headers: ?[]const net_http.Header = null,
|
||||
interceptResponse: bool = false,
|
||||
})) orelse return error.InvalidParams;
|
||||
|
||||
@@ -246,7 +247,7 @@ fn continueRequest(cmd: anytype) !void {
|
||||
try transfer.updateURL(try arena.dupeZ(u8, url));
|
||||
}
|
||||
if (params.method) |method| {
|
||||
transfer.req.method = std.meta.stringToEnum(Http.Method, method) orelse return error.InvalidParams;
|
||||
transfer.req.method = std.meta.stringToEnum(net_http.Method, method) orelse return error.InvalidParams;
|
||||
}
|
||||
|
||||
if (params.headers) |headers| {
|
||||
@@ -323,7 +324,7 @@ fn fulfillRequest(cmd: anytype) !void {
|
||||
const params = (try cmd.params(struct {
|
||||
requestId: []const u8, // "INT-{d}"
|
||||
responseCode: u16,
|
||||
responseHeaders: ?[]const Http.Header = null,
|
||||
responseHeaders: ?[]const net_http.Header = null,
|
||||
binaryResponseHeaders: ?[]const u8 = null,
|
||||
body: ?[]const u8 = null,
|
||||
responsePhrase: ?[]const u8 = null,
|
||||
|
||||
@@ -24,7 +24,7 @@ const CdpStorage = @import("storage.zig");
|
||||
|
||||
const id = @import("../id.zig");
|
||||
const URL = @import("../../browser/URL.zig");
|
||||
const Transfer = @import("../../http/Client.zig").Transfer;
|
||||
const Transfer = @import("../../browser/HttpClient.zig").Transfer;
|
||||
const Notification = @import("../../Notification.zig");
|
||||
const Mime = @import("../../browser/Mime.zig");
|
||||
|
||||
|
||||
@@ -292,6 +292,10 @@ pub fn pageNavigate(bc: anytype, event: *const Notification.PageNavigate) !void
|
||||
}
|
||||
|
||||
pub fn pageRemove(bc: anytype) !void {
|
||||
// Clear all remote object mappings to prevent stale objectIds from being used
|
||||
// after the context is destroy
|
||||
bc.inspector_session.inspector.resetContextGroup();
|
||||
|
||||
// The main page is going to be removed, we need to remove contexts from other worlds first.
|
||||
for (bc.isolated_worlds.items) |isolated_world| {
|
||||
try isolated_world.removeContext();
|
||||
@@ -410,7 +414,7 @@ pub fn pageNavigated(arena: Allocator, bc: anytype, event: *const Notification.P
|
||||
bc.inspector_session.inspector.contextCreated(
|
||||
&ls.local,
|
||||
"",
|
||||
try page.getOrigin(arena) orelse "",
|
||||
page.origin orelse "",
|
||||
aux_data,
|
||||
true,
|
||||
);
|
||||
|
||||
@@ -1,89 +0,0 @@
|
||||
// Copyright (C) 2023-2026 Lightpanda (Selecy SAS)
|
||||
//
|
||||
// Francis Bouvier <francis@lightpanda.io>
|
||||
// Pierre Tachoire <pierre@lightpanda.io>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU Affero General Public License as
|
||||
// published by the Free Software Foundation, either version 3 of the
|
||||
// License, or (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU Affero General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU Affero General Public License
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
const std = @import("std");
|
||||
const Net = @import("../Net.zig");
|
||||
|
||||
const ENABLE_DEBUG = Net.ENABLE_DEBUG;
|
||||
pub const Client = @import("Client.zig");
|
||||
pub const Transfer = Client.Transfer;
|
||||
|
||||
pub const Method = Net.Method;
|
||||
pub const Header = Net.Header;
|
||||
pub const Headers = Net.Headers;
|
||||
|
||||
const Config = @import("../Config.zig");
|
||||
const RobotStore = @import("../browser/Robots.zig").RobotStore;
|
||||
|
||||
const Allocator = std.mem.Allocator;
|
||||
const ArenaAllocator = std.heap.ArenaAllocator;
|
||||
|
||||
// Client.zig does the bulk of the work and is loosely tied to a browser Page.
|
||||
// But we still need something above Client.zig for the "utility" http stuff
|
||||
// we need to do, like telemetry. The most important thing we want from this
|
||||
// is to be able to share the ca_blob, which can be quite large - loading it
|
||||
// once for all http connections is a win.
|
||||
const Http = @This();
|
||||
|
||||
arena: ArenaAllocator,
|
||||
allocator: Allocator,
|
||||
config: *const Config,
|
||||
ca_blob: ?Net.Blob,
|
||||
robot_store: *RobotStore,
|
||||
|
||||
pub fn init(allocator: Allocator, robot_store: *RobotStore, config: *const Config) !Http {
|
||||
try Net.globalInit();
|
||||
errdefer Net.globalDeinit();
|
||||
|
||||
if (comptime ENABLE_DEBUG) {
|
||||
std.debug.print("curl version: {s}\n\n", .{Net.curl_version()});
|
||||
}
|
||||
|
||||
var arena = ArenaAllocator.init(allocator);
|
||||
errdefer arena.deinit();
|
||||
|
||||
var ca_blob: ?Net.Blob = null;
|
||||
if (config.tlsVerifyHost()) {
|
||||
ca_blob = try Net.loadCerts(allocator);
|
||||
}
|
||||
|
||||
return .{
|
||||
.arena = arena,
|
||||
.allocator = allocator,
|
||||
.config = config,
|
||||
.ca_blob = ca_blob,
|
||||
.robot_store = robot_store,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn deinit(self: *Http) void {
|
||||
if (self.ca_blob) |ca_blob| {
|
||||
const data: [*]u8 = @ptrCast(ca_blob.data);
|
||||
self.allocator.free(data[0..ca_blob.len]);
|
||||
}
|
||||
Net.globalDeinit();
|
||||
self.arena.deinit();
|
||||
}
|
||||
|
||||
pub fn createClient(self: *Http, allocator: Allocator) !*Client {
|
||||
return Client.init(allocator, self.ca_blob, self.robot_store, self.config);
|
||||
}
|
||||
|
||||
pub fn newConnection(self: *Http) !Net.Connection {
|
||||
return Net.Connection.init(self.ca_blob, self.config);
|
||||
}
|
||||
@@ -18,6 +18,7 @@
|
||||
|
||||
const std = @import("std");
|
||||
pub const App = @import("App.zig");
|
||||
pub const Network = @import("network/Runtime.zig");
|
||||
pub const Server = @import("Server.zig");
|
||||
pub const Config = @import("Config.zig");
|
||||
pub const URL = @import("browser/URL.zig");
|
||||
@@ -39,6 +40,7 @@ pub const mcp = @import("mcp.zig");
|
||||
pub const build_config = @import("build_config");
|
||||
pub const crash_handler = @import("crash_handler.zig");
|
||||
|
||||
pub const HttpClient = @import("browser/HttpClient.zig");
|
||||
const IS_DEBUG = @import("builtin").mode == .Debug;
|
||||
|
||||
pub const FetchOpts = struct {
|
||||
@@ -48,7 +50,7 @@ pub const FetchOpts = struct {
|
||||
writer: ?*std.Io.Writer = null,
|
||||
};
|
||||
pub fn fetch(app: *App, url: [:0]const u8, opts: FetchOpts) !void {
|
||||
const http_client = try app.http.createClient(app.allocator);
|
||||
const http_client = try HttpClient.init(app.allocator, &app.network);
|
||||
defer http_client.deinit();
|
||||
|
||||
const notification = try Notification.init(app.allocator);
|
||||
|
||||
14
src/main.zig
14
src/main.zig
@@ -93,18 +93,14 @@ fn run(allocator: Allocator, main_arena: Allocator) !void {
|
||||
return args.printUsageAndExit(false);
|
||||
};
|
||||
|
||||
// _server is global to handle graceful shutdown.
|
||||
var server = try lp.Server.init(app, address);
|
||||
defer server.deinit();
|
||||
|
||||
try sighandler.on(lp.Server.stop, .{&server});
|
||||
|
||||
// max timeout of 1 week.
|
||||
const timeout = if (opts.timeout > 604_800) 604_800_000 else @as(u32, opts.timeout) * 1000;
|
||||
server.run(address, timeout) catch |err| {
|
||||
var server = lp.Server.init(app, address) catch |err| {
|
||||
log.fatal(.app, "server run error", .{ .err = err });
|
||||
return err;
|
||||
};
|
||||
defer server.deinit();
|
||||
|
||||
try sighandler.on(lp.Network.stop, .{&app.network});
|
||||
app.network.run();
|
||||
},
|
||||
.fetch => |opts| {
|
||||
const url = opts.url;
|
||||
|
||||
@@ -46,7 +46,7 @@ pub fn main() !void {
|
||||
var test_arena = std.heap.ArenaAllocator.init(allocator);
|
||||
defer test_arena.deinit();
|
||||
|
||||
const http_client = try app.http.createClient(allocator);
|
||||
const http_client = try lp.HttpClient.init(allocator, &app.network);
|
||||
defer http_client.deinit();
|
||||
|
||||
var browser = try lp.Browser.init(app, .{ .http_client = http_client });
|
||||
|
||||
@@ -3,7 +3,7 @@ const std = @import("std");
|
||||
const lp = @import("lightpanda");
|
||||
|
||||
const App = @import("../App.zig");
|
||||
const HttpClient = @import("../http/Client.zig");
|
||||
const HttpClient = @import("../browser/HttpClient.zig");
|
||||
const testing = @import("../testing.zig");
|
||||
const protocol = @import("protocol.zig");
|
||||
const router = @import("router.zig");
|
||||
@@ -25,7 +25,7 @@ mutex: std.Thread.Mutex = .{},
|
||||
aw: std.io.Writer.Allocating,
|
||||
|
||||
pub fn init(allocator: std.mem.Allocator, app: *App, writer: *std.io.Writer) !*Self {
|
||||
const http_client = try app.http.createClient(allocator);
|
||||
const http_client = try HttpClient.init(allocator, &app.network);
|
||||
errdefer http_client.deinit();
|
||||
|
||||
const notification = try lp.Notification.init(allocator);
|
||||
|
||||
@@ -114,6 +114,7 @@ pub const Tool = struct {
|
||||
};
|
||||
|
||||
pub fn minify(comptime json: []const u8) []const u8 {
|
||||
@setEvalBranchQuota(100000);
|
||||
return comptime blk: {
|
||||
var res: []const u8 = "";
|
||||
var in_string = false;
|
||||
|
||||
@@ -74,6 +74,30 @@ pub const tool_list = [_]protocol.Tool{
|
||||
\\}
|
||||
),
|
||||
},
|
||||
.{
|
||||
.name = "interactiveElements",
|
||||
.description = "Extract interactive elements from the opened page. If a url is provided, it navigates to that url first.",
|
||||
.inputSchema = protocol.minify(
|
||||
\\{
|
||||
\\ "type": "object",
|
||||
\\ "properties": {
|
||||
\\ "url": { "type": "string", "description": "Optional URL to navigate to before extracting interactive elements." }
|
||||
\\ }
|
||||
\\}
|
||||
),
|
||||
},
|
||||
.{
|
||||
.name = "structuredData",
|
||||
.description = "Extract structured data (like JSON-LD, OpenGraph, etc) from the opened page. If a url is provided, it navigates to that url first.",
|
||||
.inputSchema = protocol.minify(
|
||||
\\{
|
||||
\\ "type": "object",
|
||||
\\ "properties": {
|
||||
\\ "url": { "type": "string", "description": "Optional URL to navigate to before extracting structured data." }
|
||||
\\ }
|
||||
\\}
|
||||
),
|
||||
},
|
||||
};
|
||||
|
||||
pub fn handleList(server: *Server, arena: std.mem.Allocator, req: protocol.Request) !void {
|
||||
@@ -108,7 +132,8 @@ const ToolStreamingText = struct {
|
||||
},
|
||||
.links => {
|
||||
if (Selector.querySelectorAll(self.page.document.asNode(), "a[href]", self.page)) |list| {
|
||||
defer list.deinit(self.page);
|
||||
defer list.deinit(self.page._session);
|
||||
|
||||
var first = true;
|
||||
for (list._nodes) |node| {
|
||||
if (node.is(Element.Html.Anchor)) |anchor| {
|
||||
@@ -153,6 +178,8 @@ const ToolAction = enum {
|
||||
navigate,
|
||||
markdown,
|
||||
links,
|
||||
interactiveElements,
|
||||
structuredData,
|
||||
evaluate,
|
||||
semantic_tree,
|
||||
};
|
||||
@@ -162,6 +189,8 @@ const tool_map = std.StaticStringMap(ToolAction).initComptime(.{
|
||||
.{ "navigate", .navigate },
|
||||
.{ "markdown", .markdown },
|
||||
.{ "links", .links },
|
||||
.{ "interactiveElements", .interactiveElements },
|
||||
.{ "structuredData", .structuredData },
|
||||
.{ "evaluate", .evaluate },
|
||||
.{ "semantic_tree", .semantic_tree },
|
||||
});
|
||||
@@ -188,6 +217,8 @@ pub fn handleCall(server: *Server, arena: std.mem.Allocator, req: protocol.Reque
|
||||
.goto, .navigate => try handleGoto(server, arena, req.id.?, call_params.arguments),
|
||||
.markdown => try handleMarkdown(server, arena, req.id.?, call_params.arguments),
|
||||
.links => try handleLinks(server, arena, req.id.?, call_params.arguments),
|
||||
.interactiveElements => try handleInteractiveElements(server, arena, req.id.?, call_params.arguments),
|
||||
.structuredData => try handleStructuredData(server, arena, req.id.?, call_params.arguments),
|
||||
.evaluate => try handleEvaluate(server, arena, req.id.?, call_params.arguments),
|
||||
.semantic_tree => try handleSemanticTree(server, arena, req.id.?, call_params.arguments),
|
||||
}
|
||||
@@ -264,6 +295,58 @@ fn handleSemanticTree(server: *Server, arena: std.mem.Allocator, id: std.json.Va
|
||||
try server.sendResult(id, protocol.CallToolResult(ToolStreamingText){ .content = &content });
|
||||
}
|
||||
|
||||
fn handleInteractiveElements(server: *Server, arena: std.mem.Allocator, id: std.json.Value, arguments: ?std.json.Value) !void {
|
||||
const Params = struct {
|
||||
url: ?[:0]const u8 = null,
|
||||
};
|
||||
if (arguments) |args_raw| {
|
||||
if (std.json.parseFromValueLeaky(Params, arena, args_raw, .{ .ignore_unknown_fields = true })) |args| {
|
||||
if (args.url) |u| {
|
||||
try performGoto(server, u, id);
|
||||
}
|
||||
} else |_| {}
|
||||
}
|
||||
const page = server.session.currentPage() orelse {
|
||||
return server.sendError(id, .PageNotLoaded, "Page not loaded");
|
||||
};
|
||||
|
||||
const elements = lp.interactive.collectInteractiveElements(page.document.asNode(), arena, page) catch |err| {
|
||||
log.err(.mcp, "elements collection failed", .{ .err = err });
|
||||
return server.sendError(id, .InternalError, "Failed to collect interactive elements");
|
||||
};
|
||||
var aw: std.Io.Writer.Allocating = .init(arena);
|
||||
try std.json.Stringify.value(elements, .{}, &aw.writer);
|
||||
|
||||
const content = [_]protocol.TextContent([]const u8){.{ .text = aw.written() }};
|
||||
try server.sendResult(id, protocol.CallToolResult([]const u8){ .content = &content });
|
||||
}
|
||||
|
||||
fn handleStructuredData(server: *Server, arena: std.mem.Allocator, id: std.json.Value, arguments: ?std.json.Value) !void {
|
||||
const Params = struct {
|
||||
url: ?[:0]const u8 = null,
|
||||
};
|
||||
if (arguments) |args_raw| {
|
||||
if (std.json.parseFromValueLeaky(Params, arena, args_raw, .{ .ignore_unknown_fields = true })) |args| {
|
||||
if (args.url) |u| {
|
||||
try performGoto(server, u, id);
|
||||
}
|
||||
} else |_| {}
|
||||
}
|
||||
const page = server.session.currentPage() orelse {
|
||||
return server.sendError(id, .PageNotLoaded, "Page not loaded");
|
||||
};
|
||||
|
||||
const data = lp.structured_data.collectStructuredData(page.document.asNode(), arena, page) catch |err| {
|
||||
log.err(.mcp, "struct data collection failed", .{ .err = err });
|
||||
return server.sendError(id, .InternalError, "Failed to collect structured data");
|
||||
};
|
||||
var aw: std.Io.Writer.Allocating = .init(arena);
|
||||
try std.json.Stringify.value(data, .{}, &aw.writer);
|
||||
|
||||
const content = [_]protocol.TextContent([]const u8){.{ .text = aw.written() }};
|
||||
try server.sendResult(id, protocol.CallToolResult([]const u8){ .content = &content });
|
||||
}
|
||||
|
||||
fn handleEvaluate(server: *Server, arena: std.mem.Allocator, id: std.json.Value, arguments: ?std.json.Value) !void {
|
||||
const args = try parseArguments(EvaluateParams, arena, arguments, server, id, "evaluate");
|
||||
|
||||
|
||||
402
src/network/Runtime.zig
Normal file
402
src/network/Runtime.zig
Normal file
@@ -0,0 +1,402 @@
|
||||
// Copyright (C) 2023-2026 Lightpanda (Selecy SAS)
|
||||
//
|
||||
// Francis Bouvier <francis@lightpanda.io>
|
||||
// Pierre Tachoire <pierre@lightpanda.io>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU Affero General Public License as
|
||||
// published by the Free Software Foundation, either version 3 of the
|
||||
// License, or (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU Affero General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU Affero General Public License
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
const std = @import("std");
|
||||
const builtin = @import("builtin");
|
||||
const net = std.net;
|
||||
const posix = std.posix;
|
||||
const Allocator = std.mem.Allocator;
|
||||
|
||||
const lp = @import("lightpanda");
|
||||
const Config = @import("../Config.zig");
|
||||
const libcurl = @import("../sys/libcurl.zig");
|
||||
|
||||
const net_http = @import("http.zig");
|
||||
const RobotStore = @import("Robots.zig").RobotStore;
|
||||
|
||||
const Runtime = @This();
|
||||
|
||||
const Listener = struct {
|
||||
socket: posix.socket_t,
|
||||
ctx: *anyopaque,
|
||||
onAccept: *const fn (ctx: *anyopaque, socket: posix.socket_t) void,
|
||||
};
|
||||
|
||||
allocator: Allocator,
|
||||
|
||||
config: *const Config,
|
||||
ca_blob: ?net_http.Blob,
|
||||
robot_store: RobotStore,
|
||||
|
||||
pollfds: []posix.pollfd,
|
||||
listener: ?Listener = null,
|
||||
|
||||
// Wakeup pipe: workers write to [1], main thread polls [0]
|
||||
wakeup_pipe: [2]posix.fd_t = .{ -1, -1 },
|
||||
|
||||
shutdown: std.atomic.Value(bool) = .init(false),
|
||||
|
||||
const ZigToCurlAllocator = struct {
|
||||
// C11 requires malloc to return memory aligned to max_align_t (16 bytes on x86_64).
|
||||
// We match this guarantee since libcurl expects malloc-compatible alignment.
|
||||
const alignment = 16;
|
||||
|
||||
const Block = extern struct {
|
||||
size: usize = 0,
|
||||
_padding: [alignment - @sizeOf(usize)]u8 = .{0} ** (alignment - @sizeOf(usize)),
|
||||
|
||||
inline fn fullsize(bytes: usize) usize {
|
||||
return alignment + bytes;
|
||||
}
|
||||
|
||||
inline fn fromPtr(ptr: *anyopaque) *Block {
|
||||
const raw: [*]u8 = @ptrCast(ptr);
|
||||
return @ptrCast(@alignCast(raw - @sizeOf(Block)));
|
||||
}
|
||||
|
||||
inline fn data(self: *Block) [*]u8 {
|
||||
const ptr: [*]u8 = @ptrCast(self);
|
||||
return ptr + @sizeOf(Block);
|
||||
}
|
||||
|
||||
inline fn slice(self: *Block) []align(alignment) u8 {
|
||||
const base: [*]align(alignment) u8 = @ptrCast(@alignCast(self));
|
||||
return base[0 .. alignment + self.size];
|
||||
}
|
||||
};
|
||||
|
||||
comptime {
|
||||
std.debug.assert(@sizeOf(Block) == alignment);
|
||||
}
|
||||
|
||||
var instance: ?ZigToCurlAllocator = null;
|
||||
|
||||
allocator: Allocator,
|
||||
|
||||
pub fn init(allocator: Allocator) void {
|
||||
lp.assert(instance == null, "Initialization of curl must happen only once", .{});
|
||||
instance = .{ .allocator = allocator };
|
||||
}
|
||||
|
||||
pub fn interface() libcurl.CurlAllocator {
|
||||
return .{
|
||||
.free = free,
|
||||
.strdup = strdup,
|
||||
.malloc = malloc,
|
||||
.calloc = calloc,
|
||||
.realloc = realloc,
|
||||
};
|
||||
}
|
||||
|
||||
fn _allocBlock(size: usize) ?*Block {
|
||||
const slice = instance.?.allocator.alignedAlloc(u8, .fromByteUnits(alignment), Block.fullsize(size)) catch return null;
|
||||
const block: *Block = @ptrCast(@alignCast(slice.ptr));
|
||||
block.size = size;
|
||||
return block;
|
||||
}
|
||||
|
||||
fn _freeBlock(header: *Block) void {
|
||||
instance.?.allocator.free(header.slice());
|
||||
}
|
||||
|
||||
fn malloc(size: usize) ?*anyopaque {
|
||||
const block = _allocBlock(size) orelse return null;
|
||||
return @ptrCast(block.data());
|
||||
}
|
||||
|
||||
fn calloc(nmemb: usize, size: usize) ?*anyopaque {
|
||||
const total = nmemb * size;
|
||||
const block = _allocBlock(total) orelse return null;
|
||||
const ptr = block.data();
|
||||
@memset(ptr[0..total], 0); // for historical reasons, calloc zeroes memory, but malloc does not.
|
||||
return @ptrCast(ptr);
|
||||
}
|
||||
|
||||
fn realloc(ptr: ?*anyopaque, size: usize) ?*anyopaque {
|
||||
const p = ptr orelse return malloc(size);
|
||||
const block = Block.fromPtr(p);
|
||||
|
||||
const old_size = block.size;
|
||||
if (size == old_size) return ptr;
|
||||
|
||||
if (instance.?.allocator.resize(block.slice(), alignment + size)) {
|
||||
block.size = size;
|
||||
return ptr;
|
||||
}
|
||||
|
||||
const copy_size = @min(old_size, size);
|
||||
const new_block = _allocBlock(size) orelse return null;
|
||||
@memcpy(new_block.data()[0..copy_size], block.data()[0..copy_size]);
|
||||
_freeBlock(block);
|
||||
return @ptrCast(new_block.data());
|
||||
}
|
||||
|
||||
fn free(ptr: ?*anyopaque) void {
|
||||
const p = ptr orelse return;
|
||||
_freeBlock(Block.fromPtr(p));
|
||||
}
|
||||
|
||||
fn strdup(str: [*:0]const u8) ?[*:0]u8 {
|
||||
const len = std.mem.len(str);
|
||||
const header = _allocBlock(len + 1) orelse return null;
|
||||
const ptr = header.data();
|
||||
@memcpy(ptr[0..len], str[0..len]);
|
||||
ptr[len] = 0;
|
||||
return ptr[0..len :0];
|
||||
}
|
||||
};
|
||||
|
||||
fn globalInit(allocator: Allocator) void {
|
||||
ZigToCurlAllocator.init(allocator);
|
||||
|
||||
libcurl.curl_global_init(.{ .ssl = true }, ZigToCurlAllocator.interface()) catch |err| {
|
||||
lp.assert(false, "curl global init", .{ .err = err });
|
||||
};
|
||||
}
|
||||
|
||||
fn globalDeinit() void {
|
||||
libcurl.curl_global_cleanup();
|
||||
}
|
||||
|
||||
pub fn init(allocator: Allocator, config: *const Config) !Runtime {
|
||||
globalInit(allocator);
|
||||
errdefer globalDeinit();
|
||||
|
||||
const pipe = try posix.pipe2(.{ .NONBLOCK = true, .CLOEXEC = true });
|
||||
|
||||
// 0 is wakeup, 1 is listener
|
||||
const pollfds = try allocator.alloc(posix.pollfd, 2);
|
||||
errdefer allocator.free(pollfds);
|
||||
|
||||
@memset(pollfds, .{ .fd = -1, .events = 0, .revents = 0 });
|
||||
pollfds[0] = .{ .fd = pipe[0], .events = posix.POLL.IN, .revents = 0 };
|
||||
|
||||
var ca_blob: ?net_http.Blob = null;
|
||||
if (config.tlsVerifyHost()) {
|
||||
ca_blob = try loadCerts(allocator);
|
||||
}
|
||||
|
||||
return .{
|
||||
.allocator = allocator,
|
||||
.config = config,
|
||||
.ca_blob = ca_blob,
|
||||
.robot_store = RobotStore.init(allocator),
|
||||
.pollfds = pollfds,
|
||||
.wakeup_pipe = pipe,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn deinit(self: *Runtime) void {
|
||||
for (&self.wakeup_pipe) |*fd| {
|
||||
if (fd.* >= 0) {
|
||||
posix.close(fd.*);
|
||||
fd.* = -1;
|
||||
}
|
||||
}
|
||||
|
||||
self.allocator.free(self.pollfds);
|
||||
|
||||
if (self.ca_blob) |ca_blob| {
|
||||
const data: [*]u8 = @ptrCast(ca_blob.data);
|
||||
self.allocator.free(data[0..ca_blob.len]);
|
||||
}
|
||||
|
||||
self.robot_store.deinit();
|
||||
|
||||
globalDeinit();
|
||||
}
|
||||
|
||||
pub fn bind(
|
||||
self: *Runtime,
|
||||
address: net.Address,
|
||||
ctx: *anyopaque,
|
||||
on_accept: *const fn (ctx: *anyopaque, socket: posix.socket_t) void,
|
||||
) !void {
|
||||
const flags = posix.SOCK.STREAM | posix.SOCK.CLOEXEC | posix.SOCK.NONBLOCK;
|
||||
const listener = try posix.socket(address.any.family, flags, posix.IPPROTO.TCP);
|
||||
errdefer posix.close(listener);
|
||||
|
||||
try posix.setsockopt(listener, posix.SOL.SOCKET, posix.SO.REUSEADDR, &std.mem.toBytes(@as(c_int, 1)));
|
||||
if (@hasDecl(posix.TCP, "NODELAY")) {
|
||||
try posix.setsockopt(listener, posix.IPPROTO.TCP, posix.TCP.NODELAY, &std.mem.toBytes(@as(c_int, 1)));
|
||||
}
|
||||
|
||||
try posix.bind(listener, &address.any, address.getOsSockLen());
|
||||
try posix.listen(listener, self.config.maxPendingConnections());
|
||||
|
||||
if (self.listener != null) return error.TooManyListeners;
|
||||
|
||||
self.listener = .{
|
||||
.socket = listener,
|
||||
.ctx = ctx,
|
||||
.onAccept = on_accept,
|
||||
};
|
||||
self.pollfds[1] = .{
|
||||
.fd = listener,
|
||||
.events = posix.POLL.IN,
|
||||
.revents = 0,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn run(self: *Runtime) void {
|
||||
while (!self.shutdown.load(.acquire)) {
|
||||
const listener = self.listener orelse return;
|
||||
|
||||
_ = posix.poll(self.pollfds, -1) catch |err| {
|
||||
lp.log.err(.app, "poll", .{ .err = err });
|
||||
continue;
|
||||
};
|
||||
|
||||
// check wakeup socket
|
||||
if (self.pollfds[0].revents != 0) {
|
||||
self.pollfds[0].revents = 0;
|
||||
|
||||
// If we were woken up, perhaps everything was cancelled and the iteration can be completed.
|
||||
if (self.shutdown.load(.acquire)) break;
|
||||
}
|
||||
|
||||
// check new connections;
|
||||
if (self.pollfds[1].revents == 0) continue;
|
||||
self.pollfds[1].revents = 0;
|
||||
|
||||
const socket = posix.accept(listener.socket, null, null, posix.SOCK.NONBLOCK) catch |err| {
|
||||
switch (err) {
|
||||
error.SocketNotListening, error.ConnectionAborted => {
|
||||
self.pollfds[1] = .{ .fd = -1, .events = 0, .revents = 0 };
|
||||
self.listener = null;
|
||||
},
|
||||
error.WouldBlock => {},
|
||||
else => {
|
||||
lp.log.err(.app, "accept", .{ .err = err });
|
||||
},
|
||||
}
|
||||
continue;
|
||||
};
|
||||
|
||||
listener.onAccept(listener.ctx, socket);
|
||||
}
|
||||
|
||||
if (self.listener) |listener| {
|
||||
posix.shutdown(listener.socket, .both) catch |err| blk: {
|
||||
if (err == error.SocketNotConnected and builtin.os.tag != .linux) {
|
||||
// This error is normal/expected on BSD/MacOS. We probably
|
||||
// shouldn't bother calling shutdown at all, but I guess this
|
||||
// is safer.
|
||||
break :blk;
|
||||
}
|
||||
lp.log.warn(.app, "listener shutdown", .{ .err = err });
|
||||
};
|
||||
posix.close(listener.socket);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn stop(self: *Runtime) void {
|
||||
self.shutdown.store(true, .release);
|
||||
_ = posix.write(self.wakeup_pipe[1], &.{1}) catch {};
|
||||
}
|
||||
|
||||
pub fn newConnection(self: *Runtime) !net_http.Connection {
|
||||
return net_http.Connection.init(self.ca_blob, self.config);
|
||||
}
|
||||
|
||||
// Wraps lines @ 64 columns. A PEM is basically a base64 encoded DER (which is
|
||||
// what Zig has), with lines wrapped at 64 characters and with a basic header
|
||||
// and footer
|
||||
const LineWriter = struct {
|
||||
col: usize = 0,
|
||||
inner: std.ArrayList(u8).Writer,
|
||||
|
||||
pub fn writeAll(self: *LineWriter, data: []const u8) !void {
|
||||
var writer = self.inner;
|
||||
|
||||
var col = self.col;
|
||||
const len = 64 - col;
|
||||
|
||||
var remain = data;
|
||||
if (remain.len > len) {
|
||||
col = 0;
|
||||
try writer.writeAll(data[0..len]);
|
||||
try writer.writeByte('\n');
|
||||
remain = data[len..];
|
||||
}
|
||||
|
||||
while (remain.len > 64) {
|
||||
try writer.writeAll(remain[0..64]);
|
||||
try writer.writeByte('\n');
|
||||
remain = data[len..];
|
||||
}
|
||||
try writer.writeAll(remain);
|
||||
self.col = col + remain.len;
|
||||
}
|
||||
};
|
||||
|
||||
// TODO: on BSD / Linux, we could just read the PEM file directly.
|
||||
// This whole rescan + decode is really just needed for MacOS. On Linux
|
||||
// bundle.rescan does find the .pem file(s) which could be in a few different
|
||||
// places, so it's still useful, just not efficient.
|
||||
fn loadCerts(allocator: Allocator) !libcurl.CurlBlob {
|
||||
var bundle: std.crypto.Certificate.Bundle = .{};
|
||||
try bundle.rescan(allocator);
|
||||
defer bundle.deinit(allocator);
|
||||
|
||||
const bytes = bundle.bytes.items;
|
||||
if (bytes.len == 0) {
|
||||
lp.log.warn(.app, "No system certificates", .{});
|
||||
return .{
|
||||
.len = 0,
|
||||
.flags = 0,
|
||||
.data = bytes.ptr,
|
||||
};
|
||||
}
|
||||
|
||||
const encoder = std.base64.standard.Encoder;
|
||||
var arr: std.ArrayList(u8) = .empty;
|
||||
|
||||
const encoded_size = encoder.calcSize(bytes.len);
|
||||
const buffer_size = encoded_size +
|
||||
(bundle.map.count() * 75) + // start / end per certificate + extra, just in case
|
||||
(encoded_size / 64) // newline per 64 characters
|
||||
;
|
||||
try arr.ensureTotalCapacity(allocator, buffer_size);
|
||||
errdefer arr.deinit(allocator);
|
||||
var writer = arr.writer(allocator);
|
||||
|
||||
var it = bundle.map.valueIterator();
|
||||
while (it.next()) |index| {
|
||||
const cert = try std.crypto.Certificate.der.Element.parse(bytes, index.*);
|
||||
|
||||
try writer.writeAll("-----BEGIN CERTIFICATE-----\n");
|
||||
var line_writer = LineWriter{ .inner = writer };
|
||||
try encoder.encodeWriter(&line_writer, bytes[index.*..cert.slice.end]);
|
||||
try writer.writeAll("\n-----END CERTIFICATE-----\n");
|
||||
}
|
||||
|
||||
// Final encoding should not be larger than our initial size estimate
|
||||
lp.assert(buffer_size > arr.items.len, "Http loadCerts", .{ .estimate = buffer_size, .len = arr.items.len });
|
||||
|
||||
// Allocate exactly the size needed and copy the data
|
||||
const result = try allocator.dupe(u8, arr.items);
|
||||
// Free the original oversized allocation
|
||||
arr.deinit(allocator);
|
||||
|
||||
return .{
|
||||
.len = result.len,
|
||||
.data = result.ptr,
|
||||
.flags = 0,
|
||||
};
|
||||
}
|
||||
610
src/network/http.zig
Normal file
610
src/network/http.zig
Normal file
@@ -0,0 +1,610 @@
|
||||
// Copyright (C) 2023-2026 Lightpanda (Selecy SAS)
|
||||
//
|
||||
// Francis Bouvier <francis@lightpanda.io>
|
||||
// Pierre Tachoire <pierre@lightpanda.io>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU Affero General Public License as
|
||||
// published by the Free Software Foundation, either version 3 of the
|
||||
// License, or (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU Affero General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU Affero General Public License
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
const std = @import("std");
|
||||
const builtin = @import("builtin");
|
||||
const posix = std.posix;
|
||||
const Allocator = std.mem.Allocator;
|
||||
const ArenaAllocator = std.heap.ArenaAllocator;
|
||||
|
||||
const Config = @import("../Config.zig");
|
||||
const libcurl = @import("../sys/libcurl.zig");
|
||||
|
||||
const log = @import("lightpanda").log;
|
||||
const assert = @import("lightpanda").assert;
|
||||
|
||||
pub const ENABLE_DEBUG = false;
|
||||
const IS_DEBUG = builtin.mode == .Debug;
|
||||
|
||||
pub const Blob = libcurl.CurlBlob;
|
||||
pub const WaitFd = libcurl.CurlWaitFd;
|
||||
pub const writefunc_error = libcurl.curl_writefunc_error;
|
||||
|
||||
const Error = libcurl.Error;
|
||||
const ErrorMulti = libcurl.ErrorMulti;
|
||||
const errorFromCode = libcurl.errorFromCode;
|
||||
const errorMFromCode = libcurl.errorMFromCode;
|
||||
const errorCheck = libcurl.errorCheck;
|
||||
const errorMCheck = libcurl.errorMCheck;
|
||||
|
||||
pub fn curl_version() [*c]const u8 {
|
||||
return libcurl.curl_version();
|
||||
}
|
||||
|
||||
pub const Method = enum(u8) {
|
||||
GET = 0,
|
||||
PUT = 1,
|
||||
POST = 2,
|
||||
DELETE = 3,
|
||||
HEAD = 4,
|
||||
OPTIONS = 5,
|
||||
PATCH = 6,
|
||||
PROPFIND = 7,
|
||||
};
|
||||
|
||||
pub const Header = struct {
|
||||
name: []const u8,
|
||||
value: []const u8,
|
||||
};
|
||||
|
||||
pub const Headers = struct {
|
||||
headers: ?*libcurl.CurlSList,
|
||||
cookies: ?[*c]const u8,
|
||||
|
||||
pub fn init(user_agent: [:0]const u8) !Headers {
|
||||
const header_list = libcurl.curl_slist_append(null, user_agent);
|
||||
if (header_list == null) {
|
||||
return error.OutOfMemory;
|
||||
}
|
||||
return .{ .headers = header_list, .cookies = null };
|
||||
}
|
||||
|
||||
pub fn deinit(self: *const Headers) void {
|
||||
if (self.headers) |hdr| {
|
||||
libcurl.curl_slist_free_all(hdr);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn add(self: *Headers, header: [*c]const u8) !void {
|
||||
// Copies the value
|
||||
const updated_headers = libcurl.curl_slist_append(self.headers, header);
|
||||
if (updated_headers == null) {
|
||||
return error.OutOfMemory;
|
||||
}
|
||||
|
||||
self.headers = updated_headers;
|
||||
}
|
||||
|
||||
fn parseHeader(header_str: []const u8) ?Header {
|
||||
const colon_pos = std.mem.indexOfScalar(u8, header_str, ':') orelse return null;
|
||||
|
||||
const name = std.mem.trim(u8, header_str[0..colon_pos], " \t");
|
||||
const value = std.mem.trim(u8, header_str[colon_pos + 1 ..], " \t");
|
||||
|
||||
return .{ .name = name, .value = value };
|
||||
}
|
||||
|
||||
pub fn iterator(self: *Headers) Iterator {
|
||||
return .{
|
||||
.header = self.headers,
|
||||
.cookies = self.cookies,
|
||||
};
|
||||
}
|
||||
|
||||
const Iterator = struct {
|
||||
header: [*c]libcurl.CurlSList,
|
||||
cookies: ?[*c]const u8,
|
||||
|
||||
pub fn next(self: *Iterator) ?Header {
|
||||
const h = self.header orelse {
|
||||
const cookies = self.cookies orelse return null;
|
||||
self.cookies = null;
|
||||
return .{ .name = "Cookie", .value = std.mem.span(@as([*:0]const u8, cookies)) };
|
||||
};
|
||||
|
||||
self.header = h.*.next;
|
||||
return parseHeader(std.mem.span(@as([*:0]const u8, @ptrCast(h.*.data))));
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
// In normal cases, the header iterator comes from the curl linked list.
|
||||
// But it's also possible to inject a response, via `transfer.fulfill`. In that
|
||||
// case, the resposne headers are a list, []const Http.Header.
|
||||
// This union, is an iterator that exposes the same API for either case.
|
||||
pub const HeaderIterator = union(enum) {
|
||||
curl: CurlHeaderIterator,
|
||||
list: ListHeaderIterator,
|
||||
|
||||
pub fn next(self: *HeaderIterator) ?Header {
|
||||
switch (self.*) {
|
||||
inline else => |*it| return it.next(),
|
||||
}
|
||||
}
|
||||
|
||||
const CurlHeaderIterator = struct {
|
||||
conn: *const Connection,
|
||||
prev: ?*libcurl.CurlHeader = null,
|
||||
|
||||
pub fn next(self: *CurlHeaderIterator) ?Header {
|
||||
const h = libcurl.curl_easy_nextheader(self.conn.easy, .header, -1, self.prev) orelse return null;
|
||||
self.prev = h;
|
||||
|
||||
const header = h.*;
|
||||
return .{
|
||||
.name = std.mem.span(header.name),
|
||||
.value = std.mem.span(header.value),
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
const ListHeaderIterator = struct {
|
||||
index: usize = 0,
|
||||
list: []const Header,
|
||||
|
||||
pub fn next(self: *ListHeaderIterator) ?Header {
|
||||
const idx = self.index;
|
||||
if (idx == self.list.len) {
|
||||
return null;
|
||||
}
|
||||
self.index = idx + 1;
|
||||
return self.list[idx];
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
const HeaderValue = struct {
|
||||
value: []const u8,
|
||||
amount: usize,
|
||||
};
|
||||
|
||||
pub const AuthChallenge = struct {
|
||||
status: u16,
|
||||
source: ?enum { server, proxy },
|
||||
scheme: ?enum { basic, digest },
|
||||
realm: ?[]const u8,
|
||||
|
||||
pub fn parse(status: u16, header: []const u8) !AuthChallenge {
|
||||
var ac: AuthChallenge = .{
|
||||
.status = status,
|
||||
.source = null,
|
||||
.realm = null,
|
||||
.scheme = null,
|
||||
};
|
||||
|
||||
const sep = std.mem.indexOfPos(u8, header, 0, ": ") orelse return error.InvalidHeader;
|
||||
const hname = header[0..sep];
|
||||
const hvalue = header[sep + 2 ..];
|
||||
|
||||
if (std.ascii.eqlIgnoreCase("WWW-Authenticate", hname)) {
|
||||
ac.source = .server;
|
||||
} else if (std.ascii.eqlIgnoreCase("Proxy-Authenticate", hname)) {
|
||||
ac.source = .proxy;
|
||||
} else {
|
||||
return error.InvalidAuthChallenge;
|
||||
}
|
||||
|
||||
const pos = std.mem.indexOfPos(u8, std.mem.trim(u8, hvalue, std.ascii.whitespace[0..]), 0, " ") orelse hvalue.len;
|
||||
const _scheme = hvalue[0..pos];
|
||||
if (std.ascii.eqlIgnoreCase(_scheme, "basic")) {
|
||||
ac.scheme = .basic;
|
||||
} else if (std.ascii.eqlIgnoreCase(_scheme, "digest")) {
|
||||
ac.scheme = .digest;
|
||||
} else {
|
||||
return error.UnknownAuthChallengeScheme;
|
||||
}
|
||||
|
||||
return ac;
|
||||
}
|
||||
};
|
||||
|
||||
pub const ResponseHead = struct {
|
||||
pub const MAX_CONTENT_TYPE_LEN = 64;
|
||||
|
||||
status: u16,
|
||||
url: [*c]const u8,
|
||||
redirect_count: u32,
|
||||
_content_type_len: usize = 0,
|
||||
_content_type: [MAX_CONTENT_TYPE_LEN]u8 = undefined,
|
||||
// this is normally an empty list, but if the response is being injected
|
||||
// than it'll be populated. It isn't meant to be used directly, but should
|
||||
// be used through the transfer.responseHeaderIterator() which abstracts
|
||||
// whether the headers are from a live curl easy handle, or injected.
|
||||
_injected_headers: []const Header = &.{},
|
||||
|
||||
pub fn contentType(self: *ResponseHead) ?[]u8 {
|
||||
if (self._content_type_len == 0) {
|
||||
return null;
|
||||
}
|
||||
return self._content_type[0..self._content_type_len];
|
||||
}
|
||||
};
|
||||
|
||||
pub const Connection = struct {
|
||||
easy: *libcurl.Curl,
|
||||
node: Handles.HandleList.Node = .{},
|
||||
|
||||
pub fn init(
|
||||
ca_blob_: ?libcurl.CurlBlob,
|
||||
config: *const Config,
|
||||
) !Connection {
|
||||
const easy = libcurl.curl_easy_init() orelse return error.FailedToInitializeEasy;
|
||||
errdefer libcurl.curl_easy_cleanup(easy);
|
||||
|
||||
// timeouts
|
||||
try libcurl.curl_easy_setopt(easy, .timeout_ms, config.httpTimeout());
|
||||
try libcurl.curl_easy_setopt(easy, .connect_timeout_ms, config.httpConnectTimeout());
|
||||
|
||||
// redirect behavior
|
||||
try libcurl.curl_easy_setopt(easy, .max_redirs, config.httpMaxRedirects());
|
||||
try libcurl.curl_easy_setopt(easy, .follow_location, 2);
|
||||
try libcurl.curl_easy_setopt(easy, .redir_protocols_str, "HTTP,HTTPS"); // remove FTP and FTPS from the default
|
||||
|
||||
// proxy
|
||||
const http_proxy = config.httpProxy();
|
||||
if (http_proxy) |proxy| {
|
||||
try libcurl.curl_easy_setopt(easy, .proxy, proxy.ptr);
|
||||
}
|
||||
|
||||
// tls
|
||||
if (ca_blob_) |ca_blob| {
|
||||
try libcurl.curl_easy_setopt(easy, .ca_info_blob, ca_blob);
|
||||
if (http_proxy != null) {
|
||||
try libcurl.curl_easy_setopt(easy, .proxy_ca_info_blob, ca_blob);
|
||||
}
|
||||
} else {
|
||||
assert(config.tlsVerifyHost() == false, "Http.init tls_verify_host", .{});
|
||||
|
||||
try libcurl.curl_easy_setopt(easy, .ssl_verify_host, false);
|
||||
try libcurl.curl_easy_setopt(easy, .ssl_verify_peer, false);
|
||||
|
||||
if (http_proxy != null) {
|
||||
try libcurl.curl_easy_setopt(easy, .proxy_ssl_verify_host, false);
|
||||
try libcurl.curl_easy_setopt(easy, .proxy_ssl_verify_peer, false);
|
||||
}
|
||||
}
|
||||
|
||||
// compression, don't remove this. CloudFront will send gzip content
|
||||
// even if we don't support it, and then it won't be decompressed.
|
||||
// empty string means: use whatever's available
|
||||
try libcurl.curl_easy_setopt(easy, .accept_encoding, "");
|
||||
|
||||
// debug
|
||||
if (comptime ENABLE_DEBUG) {
|
||||
try libcurl.curl_easy_setopt(easy, .verbose, true);
|
||||
|
||||
// Sometimes the default debug output hides some useful data. You can
|
||||
// uncomment the following line (BUT KEEP THE LIVE ABOVE AS-IS), to
|
||||
// get more control over the data (specifically, the `CURLINFO_TEXT`
|
||||
// can include useful data).
|
||||
|
||||
// try libcurl.curl_easy_setopt(easy, .debug_function, debugCallback);
|
||||
}
|
||||
|
||||
return .{
|
||||
.easy = easy,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn deinit(self: *const Connection) void {
|
||||
libcurl.curl_easy_cleanup(self.easy);
|
||||
}
|
||||
|
||||
pub fn setURL(self: *const Connection, url: [:0]const u8) !void {
|
||||
try libcurl.curl_easy_setopt(self.easy, .url, url.ptr);
|
||||
}
|
||||
|
||||
// a libcurl request has 2 methods. The first is the method that
|
||||
// controls how libcurl behaves. This specifically influences how redirects
|
||||
// are handled. For example, if you do a POST and get a 301, libcurl will
|
||||
// change that to a GET. But if you do a POST and get a 308, libcurl will
|
||||
// keep the POST (and re-send the body).
|
||||
// The second method is the actual string that's included in the request
|
||||
// headers.
|
||||
// These two methods can be different - you can tell curl to behave as though
|
||||
// you made a GET, but include "POST" in the request header.
|
||||
//
|
||||
// Here, we're only concerned about the 2nd method. If we want, we'll set
|
||||
// the first one based on whether or not we have a body.
|
||||
//
|
||||
// It's important that, for each use of this connection, we set the 2nd
|
||||
// method. Else, if we make a HEAD request and re-use the connection, but
|
||||
// DON'T reset this, it'll keep making HEAD requests.
|
||||
// (I don't know if it's as important to reset the 1st method, or if libcurl
|
||||
// can infer that based on the presence of the body, but we also reset it
|
||||
// to be safe);
|
||||
pub fn setMethod(self: *const Connection, method: Method) !void {
|
||||
const easy = self.easy;
|
||||
const m: [:0]const u8 = switch (method) {
|
||||
.GET => "GET",
|
||||
.POST => "POST",
|
||||
.PUT => "PUT",
|
||||
.DELETE => "DELETE",
|
||||
.HEAD => "HEAD",
|
||||
.OPTIONS => "OPTIONS",
|
||||
.PATCH => "PATCH",
|
||||
.PROPFIND => "PROPFIND",
|
||||
};
|
||||
try libcurl.curl_easy_setopt(easy, .custom_request, m.ptr);
|
||||
}
|
||||
|
||||
pub fn setBody(self: *const Connection, body: []const u8) !void {
|
||||
const easy = self.easy;
|
||||
try libcurl.curl_easy_setopt(easy, .post, true);
|
||||
try libcurl.curl_easy_setopt(easy, .post_field_size, body.len);
|
||||
try libcurl.curl_easy_setopt(easy, .copy_post_fields, body.ptr);
|
||||
}
|
||||
|
||||
pub fn setGetMode(self: *const Connection) !void {
|
||||
try libcurl.curl_easy_setopt(self.easy, .http_get, true);
|
||||
}
|
||||
|
||||
pub fn setHeaders(self: *const Connection, headers: *Headers) !void {
|
||||
try libcurl.curl_easy_setopt(self.easy, .http_header, headers.headers);
|
||||
}
|
||||
|
||||
pub fn setCookies(self: *const Connection, cookies: [*c]const u8) !void {
|
||||
try libcurl.curl_easy_setopt(self.easy, .cookie, cookies);
|
||||
}
|
||||
|
||||
pub fn setPrivate(self: *const Connection, ptr: *anyopaque) !void {
|
||||
try libcurl.curl_easy_setopt(self.easy, .private, ptr);
|
||||
}
|
||||
|
||||
pub fn setProxyCredentials(self: *const Connection, creds: [:0]const u8) !void {
|
||||
try libcurl.curl_easy_setopt(self.easy, .proxy_user_pwd, creds.ptr);
|
||||
}
|
||||
|
||||
pub fn setCredentials(self: *const Connection, creds: [:0]const u8) !void {
|
||||
try libcurl.curl_easy_setopt(self.easy, .user_pwd, creds.ptr);
|
||||
}
|
||||
|
||||
pub fn setCallbacks(
|
||||
self: *const Connection,
|
||||
comptime header_cb: libcurl.CurlHeaderFunction,
|
||||
comptime data_cb: libcurl.CurlWriteFunction,
|
||||
) !void {
|
||||
try libcurl.curl_easy_setopt(self.easy, .header_data, self.easy);
|
||||
try libcurl.curl_easy_setopt(self.easy, .header_function, header_cb);
|
||||
try libcurl.curl_easy_setopt(self.easy, .write_data, self.easy);
|
||||
try libcurl.curl_easy_setopt(self.easy, .write_function, data_cb);
|
||||
}
|
||||
|
||||
pub fn setProxy(self: *const Connection, proxy: ?[*:0]const u8) !void {
|
||||
try libcurl.curl_easy_setopt(self.easy, .proxy, proxy);
|
||||
}
|
||||
|
||||
pub fn setTlsVerify(self: *const Connection, verify: bool, use_proxy: bool) !void {
|
||||
try libcurl.curl_easy_setopt(self.easy, .ssl_verify_host, verify);
|
||||
try libcurl.curl_easy_setopt(self.easy, .ssl_verify_peer, verify);
|
||||
if (use_proxy) {
|
||||
try libcurl.curl_easy_setopt(self.easy, .proxy_ssl_verify_host, verify);
|
||||
try libcurl.curl_easy_setopt(self.easy, .proxy_ssl_verify_peer, verify);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn getEffectiveUrl(self: *const Connection) ![*c]const u8 {
|
||||
var url: [*c]u8 = undefined;
|
||||
try libcurl.curl_easy_getinfo(self.easy, .effective_url, &url);
|
||||
return url;
|
||||
}
|
||||
|
||||
pub fn getResponseCode(self: *const Connection) !u16 {
|
||||
var status: c_long = undefined;
|
||||
try libcurl.curl_easy_getinfo(self.easy, .response_code, &status);
|
||||
if (status < 0 or status > std.math.maxInt(u16)) {
|
||||
return 0;
|
||||
}
|
||||
return @intCast(status);
|
||||
}
|
||||
|
||||
pub fn getRedirectCount(self: *const Connection) !u32 {
|
||||
var count: c_long = undefined;
|
||||
try libcurl.curl_easy_getinfo(self.easy, .redirect_count, &count);
|
||||
return @intCast(count);
|
||||
}
|
||||
|
||||
pub fn getResponseHeader(self: *const Connection, name: [:0]const u8, index: usize) ?HeaderValue {
|
||||
var hdr: ?*libcurl.CurlHeader = null;
|
||||
libcurl.curl_easy_header(self.easy, name, index, .header, -1, &hdr) catch |err| {
|
||||
// ErrorHeader includes OutOfMemory — rare but real errors from curl internals.
|
||||
// Logged and returned as null since callers don't expect errors.
|
||||
log.err(.http, "get response header", .{
|
||||
.name = name,
|
||||
.err = err,
|
||||
});
|
||||
return null;
|
||||
};
|
||||
const h = hdr orelse return null;
|
||||
return .{
|
||||
.amount = h.amount,
|
||||
.value = std.mem.span(h.value),
|
||||
};
|
||||
}
|
||||
|
||||
pub fn getPrivate(self: *const Connection) !*anyopaque {
|
||||
var private: *anyopaque = undefined;
|
||||
try libcurl.curl_easy_getinfo(self.easy, .private, &private);
|
||||
return private;
|
||||
}
|
||||
|
||||
// These are headers that may not be send to the users for inteception.
|
||||
pub fn secretHeaders(_: *const Connection, headers: *Headers, http_headers: *const Config.HttpHeaders) !void {
|
||||
if (http_headers.proxy_bearer_header) |hdr| {
|
||||
try headers.add(hdr);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn request(self: *const Connection, http_headers: *const Config.HttpHeaders) !u16 {
|
||||
var header_list = try Headers.init(http_headers.user_agent_header);
|
||||
defer header_list.deinit();
|
||||
try self.secretHeaders(&header_list, http_headers);
|
||||
try self.setHeaders(&header_list);
|
||||
|
||||
// Add cookies.
|
||||
if (header_list.cookies) |cookies| {
|
||||
try self.setCookies(cookies);
|
||||
}
|
||||
|
||||
try libcurl.curl_easy_perform(self.easy);
|
||||
return self.getResponseCode();
|
||||
}
|
||||
};
|
||||
|
||||
pub const Handles = struct {
|
||||
connections: []Connection,
|
||||
dirty: HandleList,
|
||||
in_use: HandleList,
|
||||
available: HandleList,
|
||||
multi: *libcurl.CurlM,
|
||||
performing: bool = false,
|
||||
|
||||
pub const HandleList = std.DoublyLinkedList;
|
||||
|
||||
pub fn init(
|
||||
allocator: Allocator,
|
||||
ca_blob: ?libcurl.CurlBlob,
|
||||
config: *const Config,
|
||||
) !Handles {
|
||||
const count: usize = config.httpMaxConcurrent();
|
||||
if (count == 0) return error.InvalidMaxConcurrent;
|
||||
|
||||
const multi = libcurl.curl_multi_init() orelse return error.FailedToInitializeMulti;
|
||||
errdefer libcurl.curl_multi_cleanup(multi) catch {};
|
||||
|
||||
try libcurl.curl_multi_setopt(multi, .max_host_connections, config.httpMaxHostOpen());
|
||||
|
||||
const connections = try allocator.alloc(Connection, count);
|
||||
errdefer allocator.free(connections);
|
||||
|
||||
var available: HandleList = .{};
|
||||
for (0..count) |i| {
|
||||
connections[i] = try Connection.init(ca_blob, config);
|
||||
available.append(&connections[i].node);
|
||||
}
|
||||
|
||||
return .{
|
||||
.dirty = .{},
|
||||
.in_use = .{},
|
||||
.connections = connections,
|
||||
.available = available,
|
||||
.multi = multi,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn deinit(self: *Handles, allocator: Allocator) void {
|
||||
for (self.connections) |*conn| {
|
||||
conn.deinit();
|
||||
}
|
||||
allocator.free(self.connections);
|
||||
libcurl.curl_multi_cleanup(self.multi) catch {};
|
||||
}
|
||||
|
||||
pub fn hasAvailable(self: *const Handles) bool {
|
||||
return self.available.first != null;
|
||||
}
|
||||
|
||||
pub fn get(self: *Handles) ?*Connection {
|
||||
if (self.available.popFirst()) |node| {
|
||||
self.in_use.append(node);
|
||||
return @as(*Connection, @fieldParentPtr("node", node));
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
pub fn add(self: *Handles, conn: *const Connection) !void {
|
||||
try libcurl.curl_multi_add_handle(self.multi, conn.easy);
|
||||
}
|
||||
|
||||
pub fn remove(self: *Handles, conn: *Connection) void {
|
||||
if (libcurl.curl_multi_remove_handle(self.multi, conn.easy)) {
|
||||
self.isAvailable(conn);
|
||||
} else |err| {
|
||||
// can happen if we're in a perform() call, so we'll queue this
|
||||
// for cleanup later.
|
||||
const node = &conn.node;
|
||||
self.in_use.remove(node);
|
||||
self.dirty.append(node);
|
||||
log.warn(.http, "multi remove handle", .{ .err = err });
|
||||
}
|
||||
}
|
||||
|
||||
pub fn isAvailable(self: *Handles, conn: *Connection) void {
|
||||
const node = &conn.node;
|
||||
self.in_use.remove(node);
|
||||
self.available.append(node);
|
||||
}
|
||||
|
||||
pub fn perform(self: *Handles) !c_int {
|
||||
self.performing = true;
|
||||
defer self.performing = false;
|
||||
|
||||
const multi = self.multi;
|
||||
var running: c_int = undefined;
|
||||
try libcurl.curl_multi_perform(self.multi, &running);
|
||||
|
||||
{
|
||||
const list = &self.dirty;
|
||||
while (list.first) |node| {
|
||||
list.remove(node);
|
||||
const conn: *Connection = @fieldParentPtr("node", node);
|
||||
if (libcurl.curl_multi_remove_handle(multi, conn.easy)) {
|
||||
self.available.append(node);
|
||||
} else |err| {
|
||||
log.fatal(.http, "multi remove handle", .{ .err = err, .src = "perform" });
|
||||
@panic("multi_remove_handle");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return running;
|
||||
}
|
||||
|
||||
pub fn poll(self: *Handles, extra_fds: []libcurl.CurlWaitFd, timeout_ms: c_int) !void {
|
||||
try libcurl.curl_multi_poll(self.multi, extra_fds, timeout_ms, null);
|
||||
}
|
||||
|
||||
pub const MultiMessage = struct {
|
||||
conn: Connection,
|
||||
err: ?Error,
|
||||
};
|
||||
|
||||
pub fn readMessage(self: *Handles) ?MultiMessage {
|
||||
var messages_count: c_int = 0;
|
||||
const msg = libcurl.curl_multi_info_read(self.multi, &messages_count) orelse return null;
|
||||
return switch (msg.data) {
|
||||
.done => |err| .{
|
||||
.conn = .{ .easy = msg.easy_handle },
|
||||
.err = err,
|
||||
},
|
||||
else => unreachable,
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
fn debugCallback(_: *libcurl.Curl, msg_type: libcurl.CurlInfoType, raw: [*c]u8, len: usize, _: *anyopaque) c_int {
|
||||
const data = raw[0..len];
|
||||
switch (msg_type) {
|
||||
.text => std.debug.print("libcurl [text]: {s}\n", .{data}),
|
||||
.header_out => std.debug.print("libcurl [req-h]: {s}\n", .{data}),
|
||||
.header_in => std.debug.print("libcurl [res-h]: {s}\n", .{data}),
|
||||
// .data_in => std.debug.print("libcurl [res-b]: {s}\n", .{data}),
|
||||
else => std.debug.print("libcurl ?? {d}\n", .{msg_type}),
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
@@ -21,721 +21,10 @@ const builtin = @import("builtin");
|
||||
const posix = std.posix;
|
||||
const Allocator = std.mem.Allocator;
|
||||
const ArenaAllocator = std.heap.ArenaAllocator;
|
||||
const libcurl = @import("sys/libcurl.zig");
|
||||
|
||||
const log = @import("log.zig");
|
||||
const Config = @import("Config.zig");
|
||||
const log = @import("lightpanda").log;
|
||||
const assert = @import("lightpanda").assert;
|
||||
|
||||
pub const ENABLE_DEBUG = false;
|
||||
const IS_DEBUG = builtin.mode == .Debug;
|
||||
|
||||
pub const Blob = libcurl.CurlBlob;
|
||||
pub const WaitFd = libcurl.CurlWaitFd;
|
||||
pub const writefunc_error = libcurl.curl_writefunc_error;
|
||||
|
||||
const Error = libcurl.Error;
|
||||
const ErrorMulti = libcurl.ErrorMulti;
|
||||
const errorFromCode = libcurl.errorFromCode;
|
||||
const errorMFromCode = libcurl.errorMFromCode;
|
||||
const errorCheck = libcurl.errorCheck;
|
||||
const errorMCheck = libcurl.errorMCheck;
|
||||
|
||||
pub fn curl_version() [*c]const u8 {
|
||||
return libcurl.curl_version();
|
||||
}
|
||||
|
||||
pub const Method = enum(u8) {
|
||||
GET = 0,
|
||||
PUT = 1,
|
||||
POST = 2,
|
||||
DELETE = 3,
|
||||
HEAD = 4,
|
||||
OPTIONS = 5,
|
||||
PATCH = 6,
|
||||
PROPFIND = 7,
|
||||
};
|
||||
|
||||
pub const Header = struct {
|
||||
name: []const u8,
|
||||
value: []const u8,
|
||||
};
|
||||
|
||||
pub const Headers = struct {
|
||||
headers: ?*libcurl.CurlSList,
|
||||
cookies: ?[*c]const u8,
|
||||
|
||||
pub fn init(user_agent: [:0]const u8) !Headers {
|
||||
const header_list = libcurl.curl_slist_append(null, user_agent);
|
||||
if (header_list == null) {
|
||||
return error.OutOfMemory;
|
||||
}
|
||||
return .{ .headers = header_list, .cookies = null };
|
||||
}
|
||||
|
||||
pub fn deinit(self: *const Headers) void {
|
||||
if (self.headers) |hdr| {
|
||||
libcurl.curl_slist_free_all(hdr);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn add(self: *Headers, header: [*c]const u8) !void {
|
||||
// Copies the value
|
||||
const updated_headers = libcurl.curl_slist_append(self.headers, header);
|
||||
if (updated_headers == null) {
|
||||
return error.OutOfMemory;
|
||||
}
|
||||
|
||||
self.headers = updated_headers;
|
||||
}
|
||||
|
||||
fn parseHeader(header_str: []const u8) ?Header {
|
||||
const colon_pos = std.mem.indexOfScalar(u8, header_str, ':') orelse return null;
|
||||
|
||||
const name = std.mem.trim(u8, header_str[0..colon_pos], " \t");
|
||||
const value = std.mem.trim(u8, header_str[colon_pos + 1 ..], " \t");
|
||||
|
||||
return .{ .name = name, .value = value };
|
||||
}
|
||||
|
||||
pub fn iterator(self: *Headers) Iterator {
|
||||
return .{
|
||||
.header = self.headers,
|
||||
.cookies = self.cookies,
|
||||
};
|
||||
}
|
||||
|
||||
const Iterator = struct {
|
||||
header: [*c]libcurl.CurlSList,
|
||||
cookies: ?[*c]const u8,
|
||||
|
||||
pub fn next(self: *Iterator) ?Header {
|
||||
const h = self.header orelse {
|
||||
const cookies = self.cookies orelse return null;
|
||||
self.cookies = null;
|
||||
return .{ .name = "Cookie", .value = std.mem.span(@as([*:0]const u8, cookies)) };
|
||||
};
|
||||
|
||||
self.header = h.*.next;
|
||||
return parseHeader(std.mem.span(@as([*:0]const u8, @ptrCast(h.*.data))));
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
// In normal cases, the header iterator comes from the curl linked list.
|
||||
// But it's also possible to inject a response, via `transfer.fulfill`. In that
|
||||
// case, the resposne headers are a list, []const Http.Header.
|
||||
// This union, is an iterator that exposes the same API for either case.
|
||||
pub const HeaderIterator = union(enum) {
|
||||
curl: CurlHeaderIterator,
|
||||
list: ListHeaderIterator,
|
||||
|
||||
pub fn next(self: *HeaderIterator) ?Header {
|
||||
switch (self.*) {
|
||||
inline else => |*it| return it.next(),
|
||||
}
|
||||
}
|
||||
|
||||
const CurlHeaderIterator = struct {
|
||||
conn: *const Connection,
|
||||
prev: ?*libcurl.CurlHeader = null,
|
||||
|
||||
pub fn next(self: *CurlHeaderIterator) ?Header {
|
||||
const h = libcurl.curl_easy_nextheader(self.conn.easy, .header, -1, self.prev) orelse return null;
|
||||
self.prev = h;
|
||||
|
||||
const header = h.*;
|
||||
return .{
|
||||
.name = std.mem.span(header.name),
|
||||
.value = std.mem.span(header.value),
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
const ListHeaderIterator = struct {
|
||||
index: usize = 0,
|
||||
list: []const Header,
|
||||
|
||||
pub fn next(self: *ListHeaderIterator) ?Header {
|
||||
const idx = self.index;
|
||||
if (idx == self.list.len) {
|
||||
return null;
|
||||
}
|
||||
self.index = idx + 1;
|
||||
return self.list[idx];
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
const HeaderValue = struct {
|
||||
value: []const u8,
|
||||
amount: usize,
|
||||
};
|
||||
|
||||
pub const AuthChallenge = struct {
|
||||
status: u16,
|
||||
source: ?enum { server, proxy },
|
||||
scheme: ?enum { basic, digest },
|
||||
realm: ?[]const u8,
|
||||
|
||||
pub fn parse(status: u16, header: []const u8) !AuthChallenge {
|
||||
var ac: AuthChallenge = .{
|
||||
.status = status,
|
||||
.source = null,
|
||||
.realm = null,
|
||||
.scheme = null,
|
||||
};
|
||||
|
||||
const sep = std.mem.indexOfPos(u8, header, 0, ": ") orelse return error.InvalidHeader;
|
||||
const hname = header[0..sep];
|
||||
const hvalue = header[sep + 2 ..];
|
||||
|
||||
if (std.ascii.eqlIgnoreCase("WWW-Authenticate", hname)) {
|
||||
ac.source = .server;
|
||||
} else if (std.ascii.eqlIgnoreCase("Proxy-Authenticate", hname)) {
|
||||
ac.source = .proxy;
|
||||
} else {
|
||||
return error.InvalidAuthChallenge;
|
||||
}
|
||||
|
||||
const pos = std.mem.indexOfPos(u8, std.mem.trim(u8, hvalue, std.ascii.whitespace[0..]), 0, " ") orelse hvalue.len;
|
||||
const _scheme = hvalue[0..pos];
|
||||
if (std.ascii.eqlIgnoreCase(_scheme, "basic")) {
|
||||
ac.scheme = .basic;
|
||||
} else if (std.ascii.eqlIgnoreCase(_scheme, "digest")) {
|
||||
ac.scheme = .digest;
|
||||
} else {
|
||||
return error.UnknownAuthChallengeScheme;
|
||||
}
|
||||
|
||||
return ac;
|
||||
}
|
||||
};
|
||||
|
||||
pub const ResponseHead = struct {
|
||||
pub const MAX_CONTENT_TYPE_LEN = 64;
|
||||
|
||||
status: u16,
|
||||
url: [*c]const u8,
|
||||
redirect_count: u32,
|
||||
_content_type_len: usize = 0,
|
||||
_content_type: [MAX_CONTENT_TYPE_LEN]u8 = undefined,
|
||||
// this is normally an empty list, but if the response is being injected
|
||||
// than it'll be populated. It isn't meant to be used directly, but should
|
||||
// be used through the transfer.responseHeaderIterator() which abstracts
|
||||
// whether the headers are from a live curl easy handle, or injected.
|
||||
_injected_headers: []const Header = &.{},
|
||||
|
||||
pub fn contentType(self: *ResponseHead) ?[]u8 {
|
||||
if (self._content_type_len == 0) {
|
||||
return null;
|
||||
}
|
||||
return self._content_type[0..self._content_type_len];
|
||||
}
|
||||
};
|
||||
|
||||
pub fn globalInit() Error!void {
|
||||
try libcurl.curl_global_init(.{ .ssl = true });
|
||||
}
|
||||
|
||||
pub fn globalDeinit() void {
|
||||
libcurl.curl_global_cleanup();
|
||||
}
|
||||
|
||||
pub const Connection = struct {
|
||||
easy: *libcurl.Curl,
|
||||
node: Handles.HandleList.Node = .{},
|
||||
|
||||
pub fn init(
|
||||
ca_blob_: ?libcurl.CurlBlob,
|
||||
config: *const Config,
|
||||
) !Connection {
|
||||
const easy = libcurl.curl_easy_init() orelse return error.FailedToInitializeEasy;
|
||||
errdefer libcurl.curl_easy_cleanup(easy);
|
||||
|
||||
// timeouts
|
||||
try libcurl.curl_easy_setopt(easy, .timeout_ms, config.httpTimeout());
|
||||
try libcurl.curl_easy_setopt(easy, .connect_timeout_ms, config.httpConnectTimeout());
|
||||
|
||||
// redirect behavior
|
||||
try libcurl.curl_easy_setopt(easy, .max_redirs, config.httpMaxRedirects());
|
||||
try libcurl.curl_easy_setopt(easy, .follow_location, 2);
|
||||
try libcurl.curl_easy_setopt(easy, .redir_protocols_str, "HTTP,HTTPS"); // remove FTP and FTPS from the default
|
||||
|
||||
// proxy
|
||||
const http_proxy = config.httpProxy();
|
||||
if (http_proxy) |proxy| {
|
||||
try libcurl.curl_easy_setopt(easy, .proxy, proxy.ptr);
|
||||
}
|
||||
|
||||
// tls
|
||||
if (ca_blob_) |ca_blob| {
|
||||
try libcurl.curl_easy_setopt(easy, .ca_info_blob, ca_blob);
|
||||
if (http_proxy != null) {
|
||||
try libcurl.curl_easy_setopt(easy, .proxy_ca_info_blob, ca_blob);
|
||||
}
|
||||
} else {
|
||||
assert(config.tlsVerifyHost() == false, "Http.init tls_verify_host", .{});
|
||||
|
||||
try libcurl.curl_easy_setopt(easy, .ssl_verify_host, false);
|
||||
try libcurl.curl_easy_setopt(easy, .ssl_verify_peer, false);
|
||||
|
||||
if (http_proxy != null) {
|
||||
try libcurl.curl_easy_setopt(easy, .proxy_ssl_verify_host, false);
|
||||
try libcurl.curl_easy_setopt(easy, .proxy_ssl_verify_peer, false);
|
||||
}
|
||||
}
|
||||
|
||||
// compression, don't remove this. CloudFront will send gzip content
|
||||
// even if we don't support it, and then it won't be decompressed.
|
||||
// empty string means: use whatever's available
|
||||
try libcurl.curl_easy_setopt(easy, .accept_encoding, "");
|
||||
|
||||
// debug
|
||||
if (comptime ENABLE_DEBUG) {
|
||||
try libcurl.curl_easy_setopt(easy, .verbose, true);
|
||||
|
||||
// Sometimes the default debug output hides some useful data. You can
|
||||
// uncomment the following line (BUT KEEP THE LIVE ABOVE AS-IS), to
|
||||
// get more control over the data (specifically, the `CURLINFO_TEXT`
|
||||
// can include useful data).
|
||||
|
||||
// try libcurl.curl_easy_setopt(easy, .debug_function, debugCallback);
|
||||
}
|
||||
|
||||
return .{
|
||||
.easy = easy,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn deinit(self: *const Connection) void {
|
||||
libcurl.curl_easy_cleanup(self.easy);
|
||||
}
|
||||
|
||||
pub fn setURL(self: *const Connection, url: [:0]const u8) !void {
|
||||
try libcurl.curl_easy_setopt(self.easy, .url, url.ptr);
|
||||
}
|
||||
|
||||
// a libcurl request has 2 methods. The first is the method that
|
||||
// controls how libcurl behaves. This specifically influences how redirects
|
||||
// are handled. For example, if you do a POST and get a 301, libcurl will
|
||||
// change that to a GET. But if you do a POST and get a 308, libcurl will
|
||||
// keep the POST (and re-send the body).
|
||||
// The second method is the actual string that's included in the request
|
||||
// headers.
|
||||
// These two methods can be different - you can tell curl to behave as though
|
||||
// you made a GET, but include "POST" in the request header.
|
||||
//
|
||||
// Here, we're only concerned about the 2nd method. If we want, we'll set
|
||||
// the first one based on whether or not we have a body.
|
||||
//
|
||||
// It's important that, for each use of this connection, we set the 2nd
|
||||
// method. Else, if we make a HEAD request and re-use the connection, but
|
||||
// DON'T reset this, it'll keep making HEAD requests.
|
||||
// (I don't know if it's as important to reset the 1st method, or if libcurl
|
||||
// can infer that based on the presence of the body, but we also reset it
|
||||
// to be safe);
|
||||
pub fn setMethod(self: *const Connection, method: Method) !void {
|
||||
const easy = self.easy;
|
||||
const m: [:0]const u8 = switch (method) {
|
||||
.GET => "GET",
|
||||
.POST => "POST",
|
||||
.PUT => "PUT",
|
||||
.DELETE => "DELETE",
|
||||
.HEAD => "HEAD",
|
||||
.OPTIONS => "OPTIONS",
|
||||
.PATCH => "PATCH",
|
||||
.PROPFIND => "PROPFIND",
|
||||
};
|
||||
try libcurl.curl_easy_setopt(easy, .custom_request, m.ptr);
|
||||
}
|
||||
|
||||
pub fn setBody(self: *const Connection, body: []const u8) !void {
|
||||
const easy = self.easy;
|
||||
try libcurl.curl_easy_setopt(easy, .post, true);
|
||||
try libcurl.curl_easy_setopt(easy, .post_field_size, body.len);
|
||||
try libcurl.curl_easy_setopt(easy, .copy_post_fields, body.ptr);
|
||||
}
|
||||
|
||||
pub fn setGetMode(self: *const Connection) !void {
|
||||
try libcurl.curl_easy_setopt(self.easy, .http_get, true);
|
||||
}
|
||||
|
||||
pub fn setHeaders(self: *const Connection, headers: *Headers) !void {
|
||||
try libcurl.curl_easy_setopt(self.easy, .http_header, headers.headers);
|
||||
}
|
||||
|
||||
pub fn setCookies(self: *const Connection, cookies: [*c]const u8) !void {
|
||||
try libcurl.curl_easy_setopt(self.easy, .cookie, cookies);
|
||||
}
|
||||
|
||||
pub fn setPrivate(self: *const Connection, ptr: *anyopaque) !void {
|
||||
try libcurl.curl_easy_setopt(self.easy, .private, ptr);
|
||||
}
|
||||
|
||||
pub fn setProxyCredentials(self: *const Connection, creds: [:0]const u8) !void {
|
||||
try libcurl.curl_easy_setopt(self.easy, .proxy_user_pwd, creds.ptr);
|
||||
}
|
||||
|
||||
pub fn setCredentials(self: *const Connection, creds: [:0]const u8) !void {
|
||||
try libcurl.curl_easy_setopt(self.easy, .user_pwd, creds.ptr);
|
||||
}
|
||||
|
||||
pub fn setCallbacks(
|
||||
self: *const Connection,
|
||||
comptime header_cb: libcurl.CurlHeaderFunction,
|
||||
comptime data_cb: libcurl.CurlWriteFunction,
|
||||
) !void {
|
||||
try libcurl.curl_easy_setopt(self.easy, .header_data, self.easy);
|
||||
try libcurl.curl_easy_setopt(self.easy, .header_function, header_cb);
|
||||
try libcurl.curl_easy_setopt(self.easy, .write_data, self.easy);
|
||||
try libcurl.curl_easy_setopt(self.easy, .write_function, data_cb);
|
||||
}
|
||||
|
||||
pub fn setProxy(self: *const Connection, proxy: ?[*:0]const u8) !void {
|
||||
try libcurl.curl_easy_setopt(self.easy, .proxy, proxy);
|
||||
}
|
||||
|
||||
pub fn setTlsVerify(self: *const Connection, verify: bool, use_proxy: bool) !void {
|
||||
try libcurl.curl_easy_setopt(self.easy, .ssl_verify_host, verify);
|
||||
try libcurl.curl_easy_setopt(self.easy, .ssl_verify_peer, verify);
|
||||
if (use_proxy) {
|
||||
try libcurl.curl_easy_setopt(self.easy, .proxy_ssl_verify_host, verify);
|
||||
try libcurl.curl_easy_setopt(self.easy, .proxy_ssl_verify_peer, verify);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn getEffectiveUrl(self: *const Connection) ![*c]const u8 {
|
||||
var url: [*c]u8 = undefined;
|
||||
try libcurl.curl_easy_getinfo(self.easy, .effective_url, &url);
|
||||
return url;
|
||||
}
|
||||
|
||||
pub fn getResponseCode(self: *const Connection) !u16 {
|
||||
var status: c_long = undefined;
|
||||
try libcurl.curl_easy_getinfo(self.easy, .response_code, &status);
|
||||
if (status < 0 or status > std.math.maxInt(u16)) {
|
||||
return 0;
|
||||
}
|
||||
return @intCast(status);
|
||||
}
|
||||
|
||||
pub fn getRedirectCount(self: *const Connection) !u32 {
|
||||
var count: c_long = undefined;
|
||||
try libcurl.curl_easy_getinfo(self.easy, .redirect_count, &count);
|
||||
return @intCast(count);
|
||||
}
|
||||
|
||||
pub fn getResponseHeader(self: *const Connection, name: [:0]const u8, index: usize) ?HeaderValue {
|
||||
var hdr: ?*libcurl.CurlHeader = null;
|
||||
libcurl.curl_easy_header(self.easy, name, index, .header, -1, &hdr) catch |err| {
|
||||
// ErrorHeader includes OutOfMemory — rare but real errors from curl internals.
|
||||
// Logged and returned as null since callers don't expect errors.
|
||||
log.err(.http, "get response header", .{
|
||||
.name = name,
|
||||
.err = err,
|
||||
});
|
||||
return null;
|
||||
};
|
||||
const h = hdr orelse return null;
|
||||
return .{
|
||||
.amount = h.amount,
|
||||
.value = std.mem.span(h.value),
|
||||
};
|
||||
}
|
||||
|
||||
pub fn getPrivate(self: *const Connection) !*anyopaque {
|
||||
var private: *anyopaque = undefined;
|
||||
try libcurl.curl_easy_getinfo(self.easy, .private, &private);
|
||||
return private;
|
||||
}
|
||||
|
||||
// These are headers that may not be send to the users for inteception.
|
||||
pub fn secretHeaders(_: *const Connection, headers: *Headers, http_headers: *const Config.HttpHeaders) !void {
|
||||
if (http_headers.proxy_bearer_header) |hdr| {
|
||||
try headers.add(hdr);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn request(self: *const Connection, http_headers: *const Config.HttpHeaders) !u16 {
|
||||
var header_list = try Headers.init(http_headers.user_agent_header);
|
||||
defer header_list.deinit();
|
||||
try self.secretHeaders(&header_list, http_headers);
|
||||
try self.setHeaders(&header_list);
|
||||
|
||||
// Add cookies.
|
||||
if (header_list.cookies) |cookies| {
|
||||
try self.setCookies(cookies);
|
||||
}
|
||||
|
||||
try libcurl.curl_easy_perform(self.easy);
|
||||
return self.getResponseCode();
|
||||
}
|
||||
};
|
||||
|
||||
pub const Handles = struct {
|
||||
connections: []Connection,
|
||||
dirty: HandleList,
|
||||
in_use: HandleList,
|
||||
available: HandleList,
|
||||
multi: *libcurl.CurlM,
|
||||
performing: bool = false,
|
||||
|
||||
pub const HandleList = std.DoublyLinkedList;
|
||||
|
||||
pub fn init(
|
||||
allocator: Allocator,
|
||||
ca_blob: ?libcurl.CurlBlob,
|
||||
config: *const Config,
|
||||
) !Handles {
|
||||
const count: usize = config.httpMaxConcurrent();
|
||||
if (count == 0) return error.InvalidMaxConcurrent;
|
||||
|
||||
const multi = libcurl.curl_multi_init() orelse return error.FailedToInitializeMulti;
|
||||
errdefer libcurl.curl_multi_cleanup(multi) catch {};
|
||||
|
||||
try libcurl.curl_multi_setopt(multi, .max_host_connections, config.httpMaxHostOpen());
|
||||
|
||||
const connections = try allocator.alloc(Connection, count);
|
||||
errdefer allocator.free(connections);
|
||||
|
||||
var available: HandleList = .{};
|
||||
for (0..count) |i| {
|
||||
connections[i] = try Connection.init(ca_blob, config);
|
||||
available.append(&connections[i].node);
|
||||
}
|
||||
|
||||
return .{
|
||||
.dirty = .{},
|
||||
.in_use = .{},
|
||||
.connections = connections,
|
||||
.available = available,
|
||||
.multi = multi,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn deinit(self: *Handles, allocator: Allocator) void {
|
||||
for (self.connections) |*conn| {
|
||||
conn.deinit();
|
||||
}
|
||||
allocator.free(self.connections);
|
||||
libcurl.curl_multi_cleanup(self.multi) catch {};
|
||||
}
|
||||
|
||||
pub fn hasAvailable(self: *const Handles) bool {
|
||||
return self.available.first != null;
|
||||
}
|
||||
|
||||
pub fn get(self: *Handles) ?*Connection {
|
||||
if (self.available.popFirst()) |node| {
|
||||
self.in_use.append(node);
|
||||
return @as(*Connection, @fieldParentPtr("node", node));
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
pub fn add(self: *Handles, conn: *const Connection) !void {
|
||||
try libcurl.curl_multi_add_handle(self.multi, conn.easy);
|
||||
}
|
||||
|
||||
pub fn remove(self: *Handles, conn: *Connection) void {
|
||||
if (libcurl.curl_multi_remove_handle(self.multi, conn.easy)) {
|
||||
self.isAvailable(conn);
|
||||
} else |err| {
|
||||
// can happen if we're in a perform() call, so we'll queue this
|
||||
// for cleanup later.
|
||||
const node = &conn.node;
|
||||
self.in_use.remove(node);
|
||||
self.dirty.append(node);
|
||||
log.warn(.http, "multi remove handle", .{ .err = err });
|
||||
}
|
||||
}
|
||||
|
||||
pub fn isAvailable(self: *Handles, conn: *Connection) void {
|
||||
const node = &conn.node;
|
||||
self.in_use.remove(node);
|
||||
self.available.append(node);
|
||||
}
|
||||
|
||||
pub fn perform(self: *Handles) !c_int {
|
||||
self.performing = true;
|
||||
defer self.performing = false;
|
||||
|
||||
const multi = self.multi;
|
||||
var running: c_int = undefined;
|
||||
try libcurl.curl_multi_perform(self.multi, &running);
|
||||
|
||||
{
|
||||
const list = &self.dirty;
|
||||
while (list.first) |node| {
|
||||
list.remove(node);
|
||||
const conn: *Connection = @fieldParentPtr("node", node);
|
||||
if (libcurl.curl_multi_remove_handle(multi, conn.easy)) {
|
||||
self.available.append(node);
|
||||
} else |err| {
|
||||
log.fatal(.http, "multi remove handle", .{ .err = err, .src = "perform" });
|
||||
@panic("multi_remove_handle");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return running;
|
||||
}
|
||||
|
||||
pub fn poll(self: *Handles, extra_fds: []libcurl.CurlWaitFd, timeout_ms: c_int) !void {
|
||||
try libcurl.curl_multi_poll(self.multi, extra_fds, timeout_ms, null);
|
||||
}
|
||||
|
||||
pub const MultiMessage = struct {
|
||||
conn: Connection,
|
||||
err: ?Error,
|
||||
};
|
||||
|
||||
pub fn readMessage(self: *Handles) ?MultiMessage {
|
||||
var messages_count: c_int = 0;
|
||||
const msg = libcurl.curl_multi_info_read(self.multi, &messages_count) orelse return null;
|
||||
return switch (msg.data) {
|
||||
.done => |err| .{
|
||||
.conn = .{ .easy = msg.easy_handle },
|
||||
.err = err,
|
||||
},
|
||||
else => unreachable,
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
// TODO: on BSD / Linux, we could just read the PEM file directly.
|
||||
// This whole rescan + decode is really just needed for MacOS. On Linux
|
||||
// bundle.rescan does find the .pem file(s) which could be in a few different
|
||||
// places, so it's still useful, just not efficient.
|
||||
pub fn loadCerts(allocator: Allocator) !libcurl.CurlBlob {
|
||||
var bundle: std.crypto.Certificate.Bundle = .{};
|
||||
try bundle.rescan(allocator);
|
||||
defer bundle.deinit(allocator);
|
||||
|
||||
const bytes = bundle.bytes.items;
|
||||
if (bytes.len == 0) {
|
||||
log.warn(.app, "No system certificates", .{});
|
||||
return .{
|
||||
.len = 0,
|
||||
.flags = 0,
|
||||
.data = bytes.ptr,
|
||||
};
|
||||
}
|
||||
|
||||
const encoder = std.base64.standard.Encoder;
|
||||
var arr: std.ArrayList(u8) = .empty;
|
||||
|
||||
const encoded_size = encoder.calcSize(bytes.len);
|
||||
const buffer_size = encoded_size +
|
||||
(bundle.map.count() * 75) + // start / end per certificate + extra, just in case
|
||||
(encoded_size / 64) // newline per 64 characters
|
||||
;
|
||||
try arr.ensureTotalCapacity(allocator, buffer_size);
|
||||
errdefer arr.deinit(allocator);
|
||||
var writer = arr.writer(allocator);
|
||||
|
||||
var it = bundle.map.valueIterator();
|
||||
while (it.next()) |index| {
|
||||
const cert = try std.crypto.Certificate.der.Element.parse(bytes, index.*);
|
||||
|
||||
try writer.writeAll("-----BEGIN CERTIFICATE-----\n");
|
||||
var line_writer = LineWriter{ .inner = writer };
|
||||
try encoder.encodeWriter(&line_writer, bytes[index.*..cert.slice.end]);
|
||||
try writer.writeAll("\n-----END CERTIFICATE-----\n");
|
||||
}
|
||||
|
||||
// Final encoding should not be larger than our initial size estimate
|
||||
assert(buffer_size > arr.items.len, "Http loadCerts", .{ .estimate = buffer_size, .len = arr.items.len });
|
||||
|
||||
// Allocate exactly the size needed and copy the data
|
||||
const result = try allocator.dupe(u8, arr.items);
|
||||
// Free the original oversized allocation
|
||||
arr.deinit(allocator);
|
||||
|
||||
return .{
|
||||
.len = result.len,
|
||||
.data = result.ptr,
|
||||
.flags = 0,
|
||||
};
|
||||
}
|
||||
|
||||
// Wraps lines @ 64 columns. A PEM is basically a base64 encoded DER (which is
|
||||
// what Zig has), with lines wrapped at 64 characters and with a basic header
|
||||
// and footer
|
||||
const LineWriter = struct {
|
||||
col: usize = 0,
|
||||
inner: std.ArrayList(u8).Writer,
|
||||
|
||||
pub fn writeAll(self: *LineWriter, data: []const u8) !void {
|
||||
var writer = self.inner;
|
||||
|
||||
var col = self.col;
|
||||
const len = 64 - col;
|
||||
|
||||
var remain = data;
|
||||
if (remain.len > len) {
|
||||
col = 0;
|
||||
try writer.writeAll(data[0..len]);
|
||||
try writer.writeByte('\n');
|
||||
remain = data[len..];
|
||||
}
|
||||
|
||||
while (remain.len > 64) {
|
||||
try writer.writeAll(remain[0..64]);
|
||||
try writer.writeByte('\n');
|
||||
remain = data[len..];
|
||||
}
|
||||
try writer.writeAll(remain);
|
||||
self.col = col + remain.len;
|
||||
}
|
||||
};
|
||||
|
||||
fn debugCallback(_: *libcurl.Curl, msg_type: libcurl.CurlInfoType, raw: [*c]u8, len: usize, _: *anyopaque) c_int {
|
||||
const data = raw[0..len];
|
||||
switch (msg_type) {
|
||||
.text => std.debug.print("libcurl [text]: {s}\n", .{data}),
|
||||
.header_out => std.debug.print("libcurl [req-h]: {s}\n", .{data}),
|
||||
.header_in => std.debug.print("libcurl [res-h]: {s}\n", .{data}),
|
||||
// .data_in => std.debug.print("libcurl [res-b]: {s}\n", .{data}),
|
||||
else => std.debug.print("libcurl ?? {d}\n", .{msg_type}),
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
// Zig is in a weird backend transition right now. Need to determine if
|
||||
// SIMD is even available.
|
||||
const backend_supports_vectors = switch (builtin.zig_backend) {
|
||||
.stage2_llvm, .stage2_c => true,
|
||||
else => false,
|
||||
};
|
||||
|
||||
// Websocket messages from client->server are masked using a 4 byte XOR mask
|
||||
fn mask(m: []const u8, payload: []u8) void {
|
||||
var data = payload;
|
||||
|
||||
if (!comptime backend_supports_vectors) return simpleMask(m, data);
|
||||
|
||||
const vector_size = std.simd.suggestVectorLength(u8) orelse @sizeOf(usize);
|
||||
if (data.len >= vector_size) {
|
||||
const mask_vector = std.simd.repeat(vector_size, @as(@Vector(4, u8), m[0..4].*));
|
||||
while (data.len >= vector_size) {
|
||||
const slice = data[0..vector_size];
|
||||
const masked_data_slice: @Vector(vector_size, u8) = slice.*;
|
||||
slice.* = masked_data_slice ^ mask_vector;
|
||||
data = data[vector_size..];
|
||||
}
|
||||
}
|
||||
simpleMask(m, data);
|
||||
}
|
||||
|
||||
// Used when SIMD isn't available, or for any remaining part of the message
|
||||
// which is too small to effectively use SIMD.
|
||||
fn simpleMask(m: []const u8, payload: []u8) void {
|
||||
for (payload, 0..) |b, i| {
|
||||
payload[i] = b ^ m[i & 3];
|
||||
}
|
||||
}
|
||||
const CDP_MAX_MESSAGE_SIZE = @import("../Config.zig").CDP_MAX_MESSAGE_SIZE;
|
||||
|
||||
const Fragments = struct {
|
||||
type: Message.Type,
|
||||
@@ -763,76 +52,6 @@ const OpCode = enum(u8) {
|
||||
pong = 128 | 10,
|
||||
};
|
||||
|
||||
fn fillWebsocketHeader(buf: std.ArrayList(u8)) []const u8 {
|
||||
// can't use buf[0..10] here, because the header length
|
||||
// is variable. If it's just 2 bytes, for example, we need the
|
||||
// framed message to be:
|
||||
// h1, h2, data
|
||||
// If we use buf[0..10], we'd get:
|
||||
// h1, h2, 0, 0, 0, 0, 0, 0, 0, 0, data
|
||||
|
||||
var header_buf: [10]u8 = undefined;
|
||||
|
||||
// -10 because we reserved 10 bytes for the header above
|
||||
const header = websocketHeader(&header_buf, .text, buf.items.len - 10);
|
||||
const start = 10 - header.len;
|
||||
|
||||
const message = buf.items;
|
||||
@memcpy(message[start..10], header);
|
||||
return message[start..];
|
||||
}
|
||||
|
||||
// makes the assumption that our caller reserved the first
|
||||
// 10 bytes for the header
|
||||
fn websocketHeader(buf: []u8, op_code: OpCode, payload_len: usize) []const u8 {
|
||||
assert(buf.len == 10, "Websocket.Header", .{ .len = buf.len });
|
||||
|
||||
const len = payload_len;
|
||||
buf[0] = 128 | @intFromEnum(op_code); // fin | opcode
|
||||
|
||||
if (len <= 125) {
|
||||
buf[1] = @intCast(len);
|
||||
return buf[0..2];
|
||||
}
|
||||
|
||||
if (len < 65536) {
|
||||
buf[1] = 126;
|
||||
buf[2] = @intCast((len >> 8) & 0xFF);
|
||||
buf[3] = @intCast(len & 0xFF);
|
||||
return buf[0..4];
|
||||
}
|
||||
|
||||
buf[1] = 127;
|
||||
buf[2] = 0;
|
||||
buf[3] = 0;
|
||||
buf[4] = 0;
|
||||
buf[5] = 0;
|
||||
buf[6] = @intCast((len >> 24) & 0xFF);
|
||||
buf[7] = @intCast((len >> 16) & 0xFF);
|
||||
buf[8] = @intCast((len >> 8) & 0xFF);
|
||||
buf[9] = @intCast(len & 0xFF);
|
||||
return buf[0..10];
|
||||
}
|
||||
|
||||
fn growBuffer(allocator: Allocator, buf: []u8, required_capacity: usize) ![]u8 {
|
||||
// from std.ArrayList
|
||||
var new_capacity = buf.len;
|
||||
while (true) {
|
||||
new_capacity +|= new_capacity / 2 + 8;
|
||||
if (new_capacity >= required_capacity) break;
|
||||
}
|
||||
|
||||
log.debug(.app, "CDP buffer growth", .{ .from = buf.len, .to = new_capacity });
|
||||
|
||||
if (allocator.resize(buf, new_capacity)) {
|
||||
return buf.ptr[0..new_capacity];
|
||||
}
|
||||
const new_buffer = try allocator.alloc(u8, new_capacity);
|
||||
@memcpy(new_buffer[0..buf.len], buf);
|
||||
allocator.free(buf);
|
||||
return new_buffer;
|
||||
}
|
||||
|
||||
// WebSocket message reader. Given websocket message, acts as an iterator that
|
||||
// can return zero or more Messages. When next returns null, any incomplete
|
||||
// message will remain in reader.data
|
||||
@@ -932,7 +151,7 @@ pub fn Reader(comptime EXPECT_MASK: bool) type {
|
||||
if (message_len > 125) {
|
||||
return error.ControlTooLarge;
|
||||
}
|
||||
} else if (message_len > Config.CDP_MAX_MESSAGE_SIZE) {
|
||||
} else if (message_len > CDP_MAX_MESSAGE_SIZE) {
|
||||
return error.TooLarge;
|
||||
} else if (message_len > self.buf.len) {
|
||||
const len = self.buf.len;
|
||||
@@ -960,7 +179,7 @@ pub fn Reader(comptime EXPECT_MASK: bool) type {
|
||||
|
||||
if (is_continuation) {
|
||||
const fragments = &(self.fragments orelse return error.InvalidContinuation);
|
||||
if (fragments.message.items.len + message_len > Config.CDP_MAX_MESSAGE_SIZE) {
|
||||
if (fragments.message.items.len + message_len > CDP_MAX_MESSAGE_SIZE) {
|
||||
return error.TooLarge;
|
||||
}
|
||||
|
||||
@@ -1086,14 +305,6 @@ pub fn Reader(comptime EXPECT_MASK: bool) type {
|
||||
};
|
||||
}
|
||||
|
||||
// In-place string lowercase
|
||||
fn toLower(str: []u8) []u8 {
|
||||
for (str, 0..) |ch, i| {
|
||||
str[i] = std.ascii.toLower(ch);
|
||||
}
|
||||
return str;
|
||||
}
|
||||
|
||||
pub const WsConnection = struct {
|
||||
// CLOSE, 2 length, code
|
||||
const CLOSE_NORMAL = [_]u8{ 136, 2, 3, 232 }; // code: 1000
|
||||
@@ -1385,6 +596,118 @@ pub const WsConnection = struct {
|
||||
}
|
||||
};
|
||||
|
||||
fn fillWebsocketHeader(buf: std.ArrayList(u8)) []const u8 {
|
||||
// can't use buf[0..10] here, because the header length
|
||||
// is variable. If it's just 2 bytes, for example, we need the
|
||||
// framed message to be:
|
||||
// h1, h2, data
|
||||
// If we use buf[0..10], we'd get:
|
||||
// h1, h2, 0, 0, 0, 0, 0, 0, 0, 0, data
|
||||
|
||||
var header_buf: [10]u8 = undefined;
|
||||
|
||||
// -10 because we reserved 10 bytes for the header above
|
||||
const header = websocketHeader(&header_buf, .text, buf.items.len - 10);
|
||||
const start = 10 - header.len;
|
||||
|
||||
const message = buf.items;
|
||||
@memcpy(message[start..10], header);
|
||||
return message[start..];
|
||||
}
|
||||
|
||||
// makes the assumption that our caller reserved the first
|
||||
// 10 bytes for the header
|
||||
fn websocketHeader(buf: []u8, op_code: OpCode, payload_len: usize) []const u8 {
|
||||
assert(buf.len == 10, "Websocket.Header", .{ .len = buf.len });
|
||||
|
||||
const len = payload_len;
|
||||
buf[0] = 128 | @intFromEnum(op_code); // fin | opcode
|
||||
|
||||
if (len <= 125) {
|
||||
buf[1] = @intCast(len);
|
||||
return buf[0..2];
|
||||
}
|
||||
|
||||
if (len < 65536) {
|
||||
buf[1] = 126;
|
||||
buf[2] = @intCast((len >> 8) & 0xFF);
|
||||
buf[3] = @intCast(len & 0xFF);
|
||||
return buf[0..4];
|
||||
}
|
||||
|
||||
buf[1] = 127;
|
||||
buf[2] = 0;
|
||||
buf[3] = 0;
|
||||
buf[4] = 0;
|
||||
buf[5] = 0;
|
||||
buf[6] = @intCast((len >> 24) & 0xFF);
|
||||
buf[7] = @intCast((len >> 16) & 0xFF);
|
||||
buf[8] = @intCast((len >> 8) & 0xFF);
|
||||
buf[9] = @intCast(len & 0xFF);
|
||||
return buf[0..10];
|
||||
}
|
||||
|
||||
fn growBuffer(allocator: Allocator, buf: []u8, required_capacity: usize) ![]u8 {
|
||||
// from std.ArrayList
|
||||
var new_capacity = buf.len;
|
||||
while (true) {
|
||||
new_capacity +|= new_capacity / 2 + 8;
|
||||
if (new_capacity >= required_capacity) break;
|
||||
}
|
||||
|
||||
log.debug(.app, "CDP buffer growth", .{ .from = buf.len, .to = new_capacity });
|
||||
|
||||
if (allocator.resize(buf, new_capacity)) {
|
||||
return buf.ptr[0..new_capacity];
|
||||
}
|
||||
const new_buffer = try allocator.alloc(u8, new_capacity);
|
||||
@memcpy(new_buffer[0..buf.len], buf);
|
||||
allocator.free(buf);
|
||||
return new_buffer;
|
||||
}
|
||||
|
||||
// In-place string lowercase
|
||||
fn toLower(str: []u8) []u8 {
|
||||
for (str, 0..) |ch, i| {
|
||||
str[i] = std.ascii.toLower(ch);
|
||||
}
|
||||
return str;
|
||||
}
|
||||
|
||||
// Used when SIMD isn't available, or for any remaining part of the message
|
||||
// which is too small to effectively use SIMD.
|
||||
fn simpleMask(m: []const u8, payload: []u8) void {
|
||||
for (payload, 0..) |b, i| {
|
||||
payload[i] = b ^ m[i & 3];
|
||||
}
|
||||
}
|
||||
|
||||
// Zig is in a weird backend transition right now. Need to determine if
|
||||
// SIMD is even available.
|
||||
const backend_supports_vectors = switch (builtin.zig_backend) {
|
||||
.stage2_llvm, .stage2_c => true,
|
||||
else => false,
|
||||
};
|
||||
|
||||
// Websocket messages from client->server are masked using a 4 byte XOR mask
|
||||
fn mask(m: []const u8, payload: []u8) void {
|
||||
var data = payload;
|
||||
|
||||
if (!comptime backend_supports_vectors) return simpleMask(m, data);
|
||||
|
||||
const vector_size = std.simd.suggestVectorLength(u8) orelse @sizeOf(usize);
|
||||
if (data.len >= vector_size) {
|
||||
const mask_vector = std.simd.repeat(vector_size, @as(@Vector(4, u8), m[0..4].*));
|
||||
while (data.len >= vector_size) {
|
||||
const slice = data[0..vector_size];
|
||||
const masked_data_slice: @Vector(vector_size, u8) = slice.*;
|
||||
slice.* = masked_data_slice ^ mask_vector;
|
||||
data = data[vector_size..];
|
||||
}
|
||||
}
|
||||
simpleMask(m, data);
|
||||
}
|
||||
|
||||
const testing = std.testing;
|
||||
|
||||
test "mask" {
|
||||
@@ -41,6 +41,20 @@ pub const CurlHeaderFunction = fn ([*]const u8, usize, usize, *anyopaque) usize;
|
||||
pub const CurlWriteFunction = fn ([*]const u8, usize, usize, *anyopaque) usize;
|
||||
pub const curl_writefunc_error: usize = c.CURL_WRITEFUNC_ERROR;
|
||||
|
||||
pub const FreeCallback = fn (ptr: ?*anyopaque) void;
|
||||
pub const StrdupCallback = fn (str: [*:0]const u8) ?[*:0]u8;
|
||||
pub const MallocCallback = fn (size: usize) ?*anyopaque;
|
||||
pub const CallocCallback = fn (nmemb: usize, size: usize) ?*anyopaque;
|
||||
pub const ReallocCallback = fn (ptr: ?*anyopaque, size: usize) ?*anyopaque;
|
||||
|
||||
pub const CurlAllocator = struct {
|
||||
free: FreeCallback,
|
||||
strdup: StrdupCallback,
|
||||
malloc: MallocCallback,
|
||||
calloc: CallocCallback,
|
||||
realloc: ReallocCallback,
|
||||
};
|
||||
|
||||
pub const CurlGlobalFlags = packed struct(u8) {
|
||||
ssl: bool = false,
|
||||
_reserved: u7 = 0,
|
||||
@@ -449,8 +463,41 @@ pub const CurlMsg = struct {
|
||||
data: CurlMsgData,
|
||||
};
|
||||
|
||||
pub fn curl_global_init(flags: CurlGlobalFlags) Error!void {
|
||||
try errorCheck(c.curl_global_init(flags.to_c()));
|
||||
pub fn curl_global_init(flags: CurlGlobalFlags, comptime curl_allocator: ?CurlAllocator) Error!void {
|
||||
const alloc = curl_allocator orelse {
|
||||
return errorCheck(c.curl_global_init(flags.to_c()));
|
||||
};
|
||||
|
||||
// The purpose of these wrappers is to hide callconv
|
||||
// and provide an easy place to add logging when debugging.
|
||||
const free = struct {
|
||||
fn cb(ptr: ?*anyopaque) callconv(.c) void {
|
||||
alloc.free(ptr);
|
||||
}
|
||||
}.cb;
|
||||
const strdup = struct {
|
||||
fn cb(str: [*c]const u8) callconv(.c) [*c]u8 {
|
||||
const s: [*:0]const u8 = @ptrCast(str orelse return null);
|
||||
return @ptrCast(alloc.strdup(s));
|
||||
}
|
||||
}.cb;
|
||||
const malloc = struct {
|
||||
fn cb(size: usize) callconv(.c) ?*anyopaque {
|
||||
return alloc.malloc(size);
|
||||
}
|
||||
}.cb;
|
||||
const calloc = struct {
|
||||
fn cb(nmemb: usize, size: usize) callconv(.c) ?*anyopaque {
|
||||
return alloc.calloc(nmemb, size);
|
||||
}
|
||||
}.cb;
|
||||
const realloc = struct {
|
||||
fn cb(ptr: ?*anyopaque, size: usize) callconv(.c) ?*anyopaque {
|
||||
return alloc.realloc(ptr, size);
|
||||
}
|
||||
}.cb;
|
||||
|
||||
try errorCheck(c.curl_global_init_mem(flags.to_c(), malloc, free, realloc, strdup, calloc));
|
||||
}
|
||||
|
||||
pub fn curl_global_cleanup() void {
|
||||
|
||||
@@ -7,9 +7,9 @@ const Allocator = std.mem.Allocator;
|
||||
|
||||
const log = @import("../log.zig");
|
||||
const App = @import("../App.zig");
|
||||
const Net = @import("../Net.zig");
|
||||
const Config = @import("../Config.zig");
|
||||
const telemetry = @import("telemetry.zig");
|
||||
const Connection = @import("../network/http.zig").Connection;
|
||||
|
||||
const URL = "https://telemetry.lightpanda.io";
|
||||
const MAX_BATCH_SIZE = 20;
|
||||
@@ -20,13 +20,13 @@ pub const LightPanda = struct {
|
||||
allocator: Allocator,
|
||||
mutex: std.Thread.Mutex,
|
||||
cond: Thread.Condition,
|
||||
connection: Net.Connection,
|
||||
connection: Connection,
|
||||
config: *const Config,
|
||||
pending: std.DoublyLinkedList,
|
||||
mem_pool: std.heap.MemoryPool(LightPandaEvent),
|
||||
|
||||
pub fn init(app: *App) !LightPanda {
|
||||
const connection = try app.http.newConnection();
|
||||
const connection = try app.network.newConnection();
|
||||
errdefer connection.deinit();
|
||||
|
||||
try connection.setURL(URL);
|
||||
|
||||
@@ -39,7 +39,7 @@ pub fn reset() void {
|
||||
const App = @import("App.zig");
|
||||
const js = @import("browser/js/js.zig");
|
||||
const Config = @import("Config.zig");
|
||||
const Client = @import("http/Client.zig");
|
||||
const HttpClient = @import("browser/HttpClient.zig");
|
||||
const Page = @import("browser/Page.zig");
|
||||
const Browser = @import("browser/Browser.zig");
|
||||
const Session = @import("browser/Session.zig");
|
||||
@@ -335,7 +335,7 @@ fn isJsonValue(a: std.json.Value, b: std.json.Value) bool {
|
||||
}
|
||||
|
||||
pub var test_app: *App = undefined;
|
||||
pub var test_http: *Client = undefined;
|
||||
pub var test_http: *HttpClient = undefined;
|
||||
pub var test_browser: Browser = undefined;
|
||||
pub var test_notification: *Notification = undefined;
|
||||
pub var test_session: *Session = undefined;
|
||||
@@ -414,15 +414,6 @@ fn runWebApiTest(test_file: [:0]const u8) !void {
|
||||
try_catch.init(&ls.local);
|
||||
defer try_catch.deinit();
|
||||
|
||||
// by default, on load, testing.js will call testing.assertOk(). This makes our
|
||||
// tests work well in a browser. But, for our test runner, we disable that
|
||||
// and call it explicitly. This gives us better error messages.
|
||||
ls.local.eval("window._lightpanda_skip_auto_assert = true;", "auto_assert") catch |err| {
|
||||
const caught = try_catch.caughtOrError(arena_allocator, err);
|
||||
std.debug.print("disable auto assert failure\nError: {f}\n", .{caught});
|
||||
return err;
|
||||
};
|
||||
|
||||
try page.navigate(url, .{});
|
||||
_ = test_session.wait(2000);
|
||||
|
||||
@@ -460,7 +451,7 @@ const log = @import("log.zig");
|
||||
const TestHTTPServer = @import("TestHTTPServer.zig");
|
||||
|
||||
const Server = @import("Server.zig");
|
||||
var test_cdp_server: ?Server = null;
|
||||
var test_cdp_server: ?*Server = null;
|
||||
var test_cdp_server_thread: ?std.Thread = null;
|
||||
var test_http_server: ?TestHTTPServer = null;
|
||||
var test_http_server_thread: ?std.Thread = null;
|
||||
@@ -483,7 +474,7 @@ test "tests:beforeAll" {
|
||||
test_app = try App.init(test_allocator, &test_config);
|
||||
errdefer test_app.deinit();
|
||||
|
||||
test_http = try test_app.http.createClient(test_allocator);
|
||||
test_http = try HttpClient.init(test_allocator, &test_app.network);
|
||||
errdefer test_http.deinit();
|
||||
|
||||
test_browser = try Browser.init(test_app, .{ .http_client = test_http });
|
||||
@@ -509,13 +500,11 @@ test "tests:beforeAll" {
|
||||
}
|
||||
|
||||
test "tests:afterAll" {
|
||||
if (test_cdp_server) |*server| {
|
||||
server.stop();
|
||||
}
|
||||
test_app.network.stop();
|
||||
if (test_cdp_server_thread) |thread| {
|
||||
thread.join();
|
||||
}
|
||||
if (test_cdp_server) |*server| {
|
||||
if (test_cdp_server) |server| {
|
||||
server.deinit();
|
||||
}
|
||||
|
||||
@@ -540,14 +529,14 @@ test "tests:afterAll" {
|
||||
|
||||
fn serveCDP(wg: *std.Thread.WaitGroup) !void {
|
||||
const address = try std.net.Address.parseIp("127.0.0.1", 9583);
|
||||
test_cdp_server = try Server.init(test_app, address);
|
||||
|
||||
wg.finish();
|
||||
|
||||
test_cdp_server.?.run(address, 5) catch |err| {
|
||||
test_cdp_server = Server.init(test_app, address) catch |err| {
|
||||
std.debug.print("CDP server error: {}", .{err});
|
||||
return err;
|
||||
};
|
||||
wg.finish();
|
||||
|
||||
test_app.network.run();
|
||||
}
|
||||
|
||||
fn testHTTPHandler(req: *std.http.Server.Request) !void {
|
||||
|
||||
Reference in New Issue
Block a user