Zig 0.15.1

Depends on https://github.com/lightpanda-io/zig-v8-fork/pull/89
This commit is contained in:
Karl Seguin
2025-08-28 19:42:26 +08:00
parent 94960cc842
commit 1443f38e5f
51 changed files with 508 additions and 525 deletions

View File

@@ -5,7 +5,7 @@ inputs:
zig: zig:
description: 'Zig version to install' description: 'Zig version to install'
required: false required: false
default: '0.14.1' default: '0.15.1'
arch: arch:
description: 'CPU arch used to select the v8 lib' description: 'CPU arch used to select the v8 lib'
required: false required: false

View File

@@ -1,7 +1,7 @@
name: zig-fmt name: zig-fmt
env: env:
ZIG_VERSION: 0.14.1 ZIG_VERSION: 0.15.1
on: on:
pull_request: pull_request:

View File

@@ -1,7 +1,7 @@
FROM debian:stable FROM debian:stable
ARG MINISIG=0.12 ARG MINISIG=0.12
ARG ZIG=0.14.1 ARG ZIG=0.15.1
ARG ZIG_MINISIG=RWSGOq2NVecA2UPNdBUZykf1CCb147pkmdtYxgb3Ti+JO/wCYvhbAb/U ARG ZIG_MINISIG=RWSGOq2NVecA2UPNdBUZykf1CCb147pkmdtYxgb3Ti+JO/wCYvhbAb/U
ARG V8=13.6.233.8 ARG V8=13.6.233.8
ARG ZIG_V8=v0.1.28 ARG ZIG_V8=v0.1.28

View File

@@ -165,7 +165,7 @@ You can also follow the progress of our Javascript support in our dedicated [zig
### Prerequisites ### Prerequisites
Lightpanda is written with [Zig](https://ziglang.org/) `0.14.1`. You have to Lightpanda is written with [Zig](https://ziglang.org/) `0.15.1`. You have to
install it with the right version in order to build the project. install it with the right version in order to build the project.
Lightpanda also depends on Lightpanda also depends on

View File

@@ -23,7 +23,7 @@ const Build = std.Build;
/// Do not rename this constant. It is scanned by some scripts to determine /// Do not rename this constant. It is scanned by some scripts to determine
/// which zig version to install. /// which zig version to install.
const recommended_zig_version = "0.14.1"; const recommended_zig_version = "0.15.1";
pub fn build(b: *Build) !void { pub fn build(b: *Build) !void {
switch (comptime builtin.zig_version.order(std.SemanticVersion.parse(recommended_zig_version) catch unreachable)) { switch (comptime builtin.zig_version.order(std.SemanticVersion.parse(recommended_zig_version) catch unreachable)) {

View File

@@ -5,9 +5,9 @@
.fingerprint = 0xda130f3af836cea0, .fingerprint = 0xda130f3af836cea0,
.dependencies = .{ .dependencies = .{
.v8 = .{ .v8 = .{
.url = "https://github.com/lightpanda-io/zig-v8-fork/archive/cf412d5b3d9d608582571d821e0d552337ef690d.tar.gz", .url = "https://github.com/lightpanda-io/zig-v8-fork/archive/e62726800663d0397d8766f7185040d8b8b69402.tar.gz",
.hash = "v8-0.0.0-xddH69zDAwA4fp1dBo_jEDjS5bhXycPwRlZHp6_X890t", .hash = "v8-0.0.0-xddH69zDAwD5i0hGhAsv3SPeihlj5fXGpJyO15KqBWOn",
}, },
//.v8 = .{ .path = "../zig-v8-fork" }, //.v8 = .{ .path = "../zig-v8-fork" }
}, },
} }

View File

@@ -129,18 +129,18 @@ test "Scheduler" {
try testing.expectDelta(3, try s.runHighPriority(), 1); try testing.expectDelta(3, try s.runHighPriority(), 1);
try testing.expectEqual(0, task.calls.items.len); try testing.expectEqual(0, task.calls.items.len);
std.time.sleep(std.time.ns_per_ms * 5); std.Thread.sleep(std.time.ns_per_ms * 5);
try testing.expectEqual(null, s.runHighPriority()); try testing.expectEqual(null, s.runHighPriority());
try testing.expectEqualSlices(u32, &.{1}, task.calls.items); try testing.expectEqualSlices(u32, &.{1}, task.calls.items);
try s.add(&task, TestTask.run2, 3, .{}); try s.add(&task, TestTask.run2, 3, .{});
try s.add(&task, TestTask.run1, 2, .{}); try s.add(&task, TestTask.run1, 2, .{});
std.time.sleep(std.time.ns_per_ms * 5); std.Thread.sleep(std.time.ns_per_ms * 5);
try testing.expectDelta(null, try s.runHighPriority(), 1); try testing.expectDelta(null, try s.runHighPriority(), 1);
try testing.expectEqualSlices(u32, &.{ 1, 1, 2 }, task.calls.items); try testing.expectEqualSlices(u32, &.{ 1, 1, 2 }, task.calls.items);
std.time.sleep(std.time.ns_per_ms * 5); std.Thread.sleep(std.time.ns_per_ms * 5);
// wont' run secondary // wont' run secondary
try testing.expectEqual(null, try s.runHighPriority()); try testing.expectEqual(null, try s.runHighPriority());
try testing.expectEqualSlices(u32, &.{ 1, 1, 2 }, task.calls.items); try testing.expectEqualSlices(u32, &.{ 1, 1, 2 }, task.calls.items);
@@ -155,13 +155,13 @@ const TestTask = struct {
calls: std.ArrayListUnmanaged(u32) = .{}, calls: std.ArrayListUnmanaged(u32) = .{},
fn run1(ctx: *anyopaque) ?u32 { fn run1(ctx: *anyopaque) ?u32 {
var self: *TestTask = @alignCast(@ptrCast(ctx)); var self: *TestTask = @ptrCast(@alignCast(ctx));
self.calls.append(self.allocator, 1) catch unreachable; self.calls.append(self.allocator, 1) catch unreachable;
return null; return null;
} }
fn run2(ctx: *anyopaque) ?u32 { fn run2(ctx: *anyopaque) ?u32 {
var self: *TestTask = @alignCast(@ptrCast(ctx)); var self: *TestTask = @ptrCast(@alignCast(ctx));
self.calls.append(self.allocator, 2) catch unreachable; self.calls.append(self.allocator, 2) catch unreachable;
return 2; return 2;
} }

View File

@@ -62,7 +62,7 @@ allocator: Allocator,
buffer_pool: BufferPool, buffer_pool: BufferPool,
script_pool: std.heap.MemoryPool(PendingScript), script_pool: std.heap.MemoryPool(PendingScript),
const OrderList = std.DoublyLinkedList(*PendingScript); const OrderList = std.DoublyLinkedList;
pub fn init(browser: *Browser, page: *Page) ScriptManager { pub fn init(browser: *Browser, page: *Page) ScriptManager {
// page isn't fully initialized, we can setup our reference, but that's it. // page isn't fully initialized, we can setup our reference, but that's it.
@@ -96,7 +96,7 @@ pub fn reset(self: *ScriptManager) void {
fn clearList(_: *const ScriptManager, list: *OrderList) void { fn clearList(_: *const ScriptManager, list: *OrderList) void {
while (list.first) |node| { while (list.first) |node| {
const pending_script = node.data; const pending_script: *PendingScript = @fieldParentPtr("node", node);
// this removes it from the list // this removes it from the list
pending_script.deinit(); pending_script.deinit();
} }
@@ -179,7 +179,7 @@ pub fn addFromElement(self: *ScriptManager, element: *parser.Element) !void {
.script = script, .script = script,
.complete = false, .complete = false,
.manager = self, .manager = self,
.node = .{ .data = pending_script }, .node = .{},
}; };
if (source == .@"inline") { if (source == .@"inline") {
@@ -193,7 +193,6 @@ pub fn addFromElement(self: *ScriptManager, element: *parser.Element) !void {
log.debug(.http, "script queue", .{ .url = remote_url.? }); log.debug(.http, "script queue", .{ .url = remote_url.? });
} }
pending_script.node = .{ .data = pending_script };
self.getList(&pending_script.script).append(&pending_script.node); self.getList(&pending_script.script).append(&pending_script.node);
errdefer pending_script.deinit(); errdefer pending_script.deinit();
@@ -323,7 +322,7 @@ fn evaluate(self: *ScriptManager) void {
defer self.is_evaluating = false; defer self.is_evaluating = false;
while (self.scripts.first) |n| { while (self.scripts.first) |n| {
var pending_script = n.data; var pending_script: *PendingScript = @fieldParentPtr("node", n);
if (pending_script.complete == false) { if (pending_script.complete == false) {
return; return;
} }
@@ -343,7 +342,7 @@ fn evaluate(self: *ScriptManager) void {
} }
while (self.deferreds.first) |n| { while (self.deferreds.first) |n| {
var pending_script = n.data; var pending_script: *PendingScript = @fieldParentPtr("node", n);
if (pending_script.complete == false) { if (pending_script.complete == false) {
return; return;
} }
@@ -395,7 +394,7 @@ fn getList(self: *ScriptManager, script: *const Script) *OrderList {
} }
fn startCallback(transfer: *Http.Transfer) !void { fn startCallback(transfer: *Http.Transfer) !void {
const script: *PendingScript = @alignCast(@ptrCast(transfer.ctx)); const script: *PendingScript = @ptrCast(@alignCast(transfer.ctx));
script.startCallback(transfer) catch |err| { script.startCallback(transfer) catch |err| {
log.err(.http, "SM.startCallback", .{ .err = err, .transfer = transfer }); log.err(.http, "SM.startCallback", .{ .err = err, .transfer = transfer });
return err; return err;
@@ -403,7 +402,7 @@ fn startCallback(transfer: *Http.Transfer) !void {
} }
fn headerCallback(transfer: *Http.Transfer) !void { fn headerCallback(transfer: *Http.Transfer) !void {
const script: *PendingScript = @alignCast(@ptrCast(transfer.ctx)); const script: *PendingScript = @ptrCast(@alignCast(transfer.ctx));
script.headerCallback(transfer) catch |err| { script.headerCallback(transfer) catch |err| {
log.err(.http, "SM.headerCallback", .{ log.err(.http, "SM.headerCallback", .{
.err = err, .err = err,
@@ -415,7 +414,7 @@ fn headerCallback(transfer: *Http.Transfer) !void {
} }
fn dataCallback(transfer: *Http.Transfer, data: []const u8) !void { fn dataCallback(transfer: *Http.Transfer, data: []const u8) !void {
const script: *PendingScript = @alignCast(@ptrCast(transfer.ctx)); const script: *PendingScript = @ptrCast(@alignCast(transfer.ctx));
script.dataCallback(transfer, data) catch |err| { script.dataCallback(transfer, data) catch |err| {
log.err(.http, "SM.dataCallback", .{ .err = err, .transfer = transfer, .len = data.len }); log.err(.http, "SM.dataCallback", .{ .err = err, .transfer = transfer, .len = data.len });
return err; return err;
@@ -423,12 +422,12 @@ fn dataCallback(transfer: *Http.Transfer, data: []const u8) !void {
} }
fn doneCallback(ctx: *anyopaque) !void { fn doneCallback(ctx: *anyopaque) !void {
const script: *PendingScript = @alignCast(@ptrCast(ctx)); const script: *PendingScript = @ptrCast(@alignCast(ctx));
script.doneCallback(); script.doneCallback();
} }
fn errorCallback(ctx: *anyopaque, err: anyerror) void { fn errorCallback(ctx: *anyopaque, err: anyerror) void {
const script: *PendingScript = @alignCast(@ptrCast(ctx)); const script: *PendingScript = @ptrCast(@alignCast(ctx));
script.errorCallback(err); script.errorCallback(err);
} }
@@ -682,9 +681,14 @@ const BufferPool = struct {
available: List = .{}, available: List = .{},
allocator: Allocator, allocator: Allocator,
max_concurrent_transfers: u8, max_concurrent_transfers: u8,
node_pool: std.heap.MemoryPool(List.Node), mem_pool: std.heap.MemoryPool(Container),
const List = std.DoublyLinkedList(std.ArrayListUnmanaged(u8)); const List = std.DoublyLinkedList;
const Container = struct {
node: List.Node,
buf: std.ArrayListUnmanaged(u8),
};
fn init(allocator: Allocator, max_concurrent_transfers: u8) BufferPool { fn init(allocator: Allocator, max_concurrent_transfers: u8) BufferPool {
return .{ return .{
@@ -692,7 +696,7 @@ const BufferPool = struct {
.count = 0, .count = 0,
.allocator = allocator, .allocator = allocator,
.max_concurrent_transfers = max_concurrent_transfers, .max_concurrent_transfers = max_concurrent_transfers,
.node_pool = std.heap.MemoryPool(List.Node).init(allocator), .mem_pool = std.heap.MemoryPool(Container).init(allocator),
}; };
} }
@@ -701,21 +705,23 @@ const BufferPool = struct {
var node = self.available.first; var node = self.available.first;
while (node) |n| { while (node) |n| {
n.data.deinit(allocator); const container: *Container = @fieldParentPtr("node", n);
container.buf.deinit(allocator);
node = n.next; node = n.next;
} }
self.node_pool.deinit(); self.mem_pool.deinit();
} }
fn get(self: *BufferPool) ArrayListUnmanaged(u8) { fn get(self: *BufferPool) std.ArrayListUnmanaged(u8) {
const node = self.available.popFirst() orelse { const node = self.available.popFirst() orelse {
// return a new buffer // return a new buffer
return .{}; return .{};
}; };
self.count -= 1; self.count -= 1;
defer self.node_pool.destroy(node); const container: *Container = @fieldParentPtr("node", node);
return node.data; defer self.mem_pool.destroy(container);
return container.buf;
} }
fn release(self: *BufferPool, buffer: ArrayListUnmanaged(u8)) void { fn release(self: *BufferPool, buffer: ArrayListUnmanaged(u8)) void {
@@ -727,16 +733,16 @@ const BufferPool = struct {
return; return;
} }
const node = self.node_pool.create() catch |err| { const container = self.mem_pool.create() catch |err| {
b.deinit(self.allocator); b.deinit(self.allocator);
log.err(.http, "SM BufferPool release", .{ .err = err }); log.err(.http, "SM BufferPool release", .{ .err = err });
return; return;
}; };
b.clearRetainingCapacity(); b.clearRetainingCapacity();
node.* = .{ .data = b }; container.* = .{ .buf = b, .node = .{} };
self.count += 1; self.count += 1;
self.available.append(node); self.available.append(&container.node);
} }
}; };
@@ -769,7 +775,7 @@ const Blocking = struct {
return error.InvalidStatusCode; return error.InvalidStatusCode;
} }
var self: *Blocking = @alignCast(@ptrCast(transfer.ctx)); var self: *Blocking = @ptrCast(@alignCast(transfer.ctx));
self.buffer = self.buffer_pool.get(); self.buffer = self.buffer_pool.get();
} }
@@ -780,7 +786,7 @@ const Blocking = struct {
// .blocking = true, // .blocking = true,
// }); // });
var self: *Blocking = @alignCast(@ptrCast(transfer.ctx)); var self: *Blocking = @ptrCast(@alignCast(transfer.ctx));
self.buffer.appendSlice(self.allocator, data) catch |err| { self.buffer.appendSlice(self.allocator, data) catch |err| {
log.err(.http, "SM.dataCallback", .{ log.err(.http, "SM.dataCallback", .{
.err = err, .err = err,
@@ -793,7 +799,7 @@ const Blocking = struct {
} }
fn doneCallback(ctx: *anyopaque) !void { fn doneCallback(ctx: *anyopaque) !void {
var self: *Blocking = @alignCast(@ptrCast(ctx)); var self: *Blocking = @ptrCast(@alignCast(ctx));
self.state = .{ .done = .{ self.state = .{ .done = .{
.buffer = self.buffer, .buffer = self.buffer,
.buffer_pool = self.buffer_pool, .buffer_pool = self.buffer_pool,
@@ -801,7 +807,7 @@ const Blocking = struct {
} }
fn errorCallback(ctx: *anyopaque, err: anyerror) void { fn errorCallback(ctx: *anyopaque, err: anyerror) void {
var self: *Blocking = @alignCast(@ptrCast(ctx)); var self: *Blocking = @ptrCast(@alignCast(ctx));
self.state = .{ .err = err }; self.state = .{ .err = err };
self.buffer_pool.release(self.buffer); self.buffer_pool.release(self.buffer);
} }

View File

@@ -102,7 +102,7 @@ pub const CharacterData = struct {
// netsurf's CharacterData (text, comment) doesn't implement the // netsurf's CharacterData (text, comment) doesn't implement the
// dom_node_get_attributes and thus will crash if we try to call nodeIsEqualNode. // dom_node_get_attributes and thus will crash if we try to call nodeIsEqualNode.
pub fn _isEqualNode(self: *parser.CharacterData, other_node: *parser.Node) !bool { pub fn _isEqualNode(self: *parser.CharacterData, other_node: *parser.Node) !bool {
if (try parser.nodeType(@alignCast(@ptrCast(self))) != try parser.nodeType(other_node)) { if (try parser.nodeType(@ptrCast(@alignCast(self))) != try parser.nodeType(other_node)) {
return false; return false;
} }

View File

@@ -258,14 +258,14 @@ pub const Document = struct {
} }
pub fn getActiveElement(self: *parser.Document, page: *Page) !?*parser.Element { pub fn getActiveElement(self: *parser.Document, page: *Page) !?*parser.Element {
if (page.getNodeState(@alignCast(@ptrCast(self)))) |state| { if (page.getNodeState(@ptrCast(@alignCast(self)))) |state| {
if (state.active_element) |ae| { if (state.active_element) |ae| {
return ae; return ae;
} }
} }
if (try parser.documentHTMLBody(page.window.document)) |body| { if (try parser.documentHTMLBody(page.window.document)) |body| {
return @alignCast(@ptrCast(body)); return @ptrCast(@alignCast(body));
} }
return try parser.documentGetDocumentElement(self); return try parser.documentGetDocumentElement(self);
@@ -281,7 +281,7 @@ pub const Document = struct {
// we could look for the "disabled" attribute, but that's only meaningful // we could look for the "disabled" attribute, but that's only meaningful
// on certain types, and libdom's vtable doesn't seem to expose this. // on certain types, and libdom's vtable doesn't seem to expose this.
pub fn setFocus(self: *parser.Document, e: *parser.ElementHTML, page: *Page) !void { pub fn setFocus(self: *parser.Document, e: *parser.ElementHTML, page: *Page) !void {
const state = try page.getOrCreateNodeState(@alignCast(@ptrCast(self))); const state = try page.getOrCreateNodeState(@ptrCast(@alignCast(self)));
state.active_element = @ptrCast(e); state.active_element = @ptrCast(e);
} }
@@ -295,7 +295,7 @@ pub const Document = struct {
} }
pub fn get_adoptedStyleSheets(self: *parser.Document, page: *Page) !Env.JsObject { pub fn get_adoptedStyleSheets(self: *parser.Document, page: *Page) !Env.JsObject {
const state = try page.getOrCreateNodeState(@alignCast(@ptrCast(self))); const state = try page.getOrCreateNodeState(@ptrCast(@alignCast(self)));
if (state.adopted_style_sheets) |obj| { if (state.adopted_style_sheets) |obj| {
return obj; return obj;
} }
@@ -306,7 +306,7 @@ pub const Document = struct {
} }
pub fn set_adoptedStyleSheets(self: *parser.Document, sheets: Env.JsObject, page: *Page) !void { pub fn set_adoptedStyleSheets(self: *parser.Document, sheets: Env.JsObject, page: *Page) !void {
const state = try page.getOrCreateNodeState(@alignCast(@ptrCast(self))); const state = try page.getOrCreateNodeState(@ptrCast(@alignCast(self)));
state.adopted_style_sheets = try sheets.persist(); state.adopted_style_sheets = try sheets.persist();
} }
}; };

View File

@@ -85,7 +85,7 @@ pub const DocumentFragment = struct {
} }
pub fn _getElementById(self: *parser.DocumentFragment, id: []const u8) !?ElementUnion { pub fn _getElementById(self: *parser.DocumentFragment, id: []const u8) !?ElementUnion {
const e = try parser.nodeGetElementById(@alignCast(@ptrCast(self)), id) orelse return null; const e = try parser.nodeGetElementById(@ptrCast(@alignCast(self)), id) orelse return null;
return try Element.toInterface(e); return try Element.toInterface(e);
} }
}; };

View File

@@ -137,15 +137,15 @@ pub const Element = struct {
} }
pub fn get_innerHTML(self: *parser.Element, page: *Page) ![]const u8 { pub fn get_innerHTML(self: *parser.Element, page: *Page) ![]const u8 {
var buf: std.ArrayListUnmanaged(u8) = .empty; var aw = std.Io.Writer.Allocating.init(page.call_arena);
try dump.writeChildren(parser.elementToNode(self), .{}, buf.writer(page.call_arena)); try dump.writeChildren(parser.elementToNode(self), .{}, &aw.writer);
return buf.items; return aw.written();
} }
pub fn get_outerHTML(self: *parser.Element, page: *Page) ![]const u8 { pub fn get_outerHTML(self: *parser.Element, page: *Page) ![]const u8 {
var buf: std.ArrayListUnmanaged(u8) = .empty; var aw = std.Io.Writer.Allocating.init(page.call_arena);
try dump.writeNode(parser.elementToNode(self), .{}, buf.writer(page.call_arena)); try dump.writeNode(parser.elementToNode(self), .{}, &aw.writer);
return buf.items; return aw.written();
} }
pub fn set_innerHTML(self: *parser.Element, str: []const u8, page: *Page) !void { pub fn set_innerHTML(self: *parser.Element, str: []const u8, page: *Page) !void {
@@ -184,7 +184,7 @@ pub const Element = struct {
// always index 0, because nodeAppendChild moves the node out of // always index 0, because nodeAppendChild moves the node out of
// the nodeList and into the new tree // the nodeList and into the new tree
const child = try parser.nodeListItem(children, 0) orelse continue; const child = try parser.nodeListItem(children, 0) orelse continue;
_ = try parser.nodeAppendChild(@alignCast(@ptrCast(clean)), child); _ = try parser.nodeAppendChild(@ptrCast(@alignCast(clean)), child);
} }
const state = try page.getOrCreateNodeState(node); const state = try page.getOrCreateNodeState(node);
@@ -537,14 +537,14 @@ pub const Element = struct {
}; };
pub fn _attachShadow(self: *parser.Element, opts: AttachShadowOpts, page: *Page) !*ShadowRoot { pub fn _attachShadow(self: *parser.Element, opts: AttachShadowOpts, page: *Page) !*ShadowRoot {
const mode = std.meta.stringToEnum(ShadowRoot.Mode, opts.mode) orelse return error.InvalidArgument; const mode = std.meta.stringToEnum(ShadowRoot.Mode, opts.mode) orelse return error.InvalidArgument;
const state = try page.getOrCreateNodeState(@alignCast(@ptrCast(self))); const state = try page.getOrCreateNodeState(@ptrCast(@alignCast(self)));
if (state.shadow_root) |sr| { if (state.shadow_root) |sr| {
if (mode != sr.mode) { if (mode != sr.mode) {
// this is the behavior per the spec // this is the behavior per the spec
return error.NotSupportedError; return error.NotSupportedError;
} }
try Node.removeChildren(@alignCast(@ptrCast(sr.proto))); try Node.removeChildren(@ptrCast(@alignCast(sr.proto)));
return sr; return sr;
} }
@@ -558,13 +558,13 @@ pub const Element = struct {
.proto = fragment, .proto = fragment,
}; };
state.shadow_root = sr; state.shadow_root = sr;
parser.documentFragmentSetHost(sr.proto, @alignCast(@ptrCast(self))); parser.documentFragmentSetHost(sr.proto, @ptrCast(@alignCast(self)));
return sr; return sr;
} }
pub fn get_shadowRoot(self: *parser.Element, page: *Page) ?*ShadowRoot { pub fn get_shadowRoot(self: *parser.Element, page: *Page) ?*ShadowRoot {
const state = page.getNodeState(@alignCast(@ptrCast(self))) orelse return null; const state = page.getNodeState(@ptrCast(@alignCast(self))) orelse return null;
const sr = state.shadow_root orelse return null; const sr = state.shadow_root orelse return null;
if (sr.mode == .closed) { if (sr.mode == .closed) {
return null; return null;

View File

@@ -111,7 +111,7 @@ pub const MutationObserver = struct {
} }
fn callback(ctx: *anyopaque) ?u32 { fn callback(ctx: *anyopaque) ?u32 {
const self: *MutationObserver = @alignCast(@ptrCast(ctx)); const self: *MutationObserver = @ptrCast(@alignCast(ctx));
if (self.connected == false) { if (self.connected == false) {
self.scheduled = true; self.scheduled = true;
return null; return null;

View File

@@ -601,7 +601,7 @@ pub const Node = struct {
fn toNode(self: NodeOrText, doc: *parser.Document) !*parser.Node { fn toNode(self: NodeOrText, doc: *parser.Document) !*parser.Node {
return switch (self) { return switch (self) {
.node => |n| n, .node => |n| n,
.text => |txt| @alignCast(@ptrCast(try parser.documentCreateTextNode(doc, txt))), .text => |txt| @ptrCast(@alignCast(try parser.documentCreateTextNode(doc, txt))),
}; };
} }

View File

@@ -60,9 +60,9 @@ pub const ShadowRoot = struct {
} }
pub fn get_innerHTML(self: *ShadowRoot, page: *Page) ![]const u8 { pub fn get_innerHTML(self: *ShadowRoot, page: *Page) ![]const u8 {
var buf: std.ArrayListUnmanaged(u8) = .empty; var aw = std.Io.Writer.Allocating.init(page.call_arena);
try dump.writeChildren(parser.documentFragmentToNode(self.proto), .{}, buf.writer(page.call_arena)); try dump.writeChildren(parser.documentFragmentToNode(self.proto), .{}, &aw.writer);
return buf.items; return aw.written();
} }
pub fn set_innerHTML(self: *ShadowRoot, str_: ?[]const u8) !void { pub fn set_innerHTML(self: *ShadowRoot, str_: ?[]const u8) !void {

View File

@@ -30,14 +30,14 @@ pub const Opts = struct {
}; };
// writer must be a std.io.Writer // writer must be a std.io.Writer
pub fn writeHTML(doc: *parser.Document, opts: Opts, writer: anytype) !void { pub fn writeHTML(doc: *parser.Document, opts: Opts, writer: *std.Io.Writer) !void {
try writer.writeAll("<!DOCTYPE html>\n"); try writer.writeAll("<!DOCTYPE html>\n");
try writeChildren(parser.documentToNode(doc), opts, writer); try writeChildren(parser.documentToNode(doc), opts, writer);
try writer.writeAll("\n"); try writer.writeAll("\n");
} }
// Spec: https://www.w3.org/TR/xml/#sec-prolog-dtd // Spec: https://www.w3.org/TR/xml/#sec-prolog-dtd
pub fn writeDocType(doc_type: *parser.DocumentType, writer: anytype) !void { pub fn writeDocType(doc_type: *parser.DocumentType, writer: *std.Io.Writer) !void {
try writer.writeAll("<!DOCTYPE "); try writer.writeAll("<!DOCTYPE ");
try writer.writeAll(try parser.documentTypeGetName(doc_type)); try writer.writeAll(try parser.documentTypeGetName(doc_type));
@@ -62,7 +62,7 @@ pub fn writeDocType(doc_type: *parser.DocumentType, writer: anytype) !void {
try writer.writeAll(">"); try writer.writeAll(">");
} }
pub fn writeNode(node: *parser.Node, opts: Opts, writer: anytype) anyerror!void { pub fn writeNode(node: *parser.Node, opts: Opts, writer: *std.Io.Writer) anyerror!void {
switch (try parser.nodeType(node)) { switch (try parser.nodeType(node)) {
.element => { .element => {
// open the tag // open the tag
@@ -95,7 +95,7 @@ pub fn writeNode(node: *parser.Node, opts: Opts, writer: anytype) anyerror!void
if (opts.page) |page| { if (opts.page) |page| {
if (page.getNodeState(node)) |state| { if (page.getNodeState(node)) |state| {
if (state.shadow_root) |sr| { if (state.shadow_root) |sr| {
try writeChildren(@alignCast(@ptrCast(sr.proto)), opts, writer); try writeChildren(@ptrCast(@alignCast(sr.proto)), opts, writer);
} }
} }
} }
@@ -150,7 +150,7 @@ pub fn writeNode(node: *parser.Node, opts: Opts, writer: anytype) anyerror!void
} }
// writer must be a std.io.Writer // writer must be a std.io.Writer
pub fn writeChildren(root: *parser.Node, opts: Opts, writer: anytype) !void { pub fn writeChildren(root: *parser.Node, opts: Opts, writer: *std.Io.Writer) !void {
const walker = Walker{}; const walker = Walker{};
var next: ?*parser.Node = null; var next: ?*parser.Node = null;
while (true) { while (true) {
@@ -271,13 +271,13 @@ fn testWriteHTML(comptime expected_body: []const u8, src: []const u8) !void {
} }
fn testWriteFullHTML(comptime expected: []const u8, src: []const u8) !void { fn testWriteFullHTML(comptime expected: []const u8, src: []const u8) !void {
var buf = std.ArrayListUnmanaged(u8){}; var aw = std.Io.Writer.Allocating.init(testing.allocator);
defer buf.deinit(testing.allocator); defer aw.deinit();
const doc_html = try parser.documentHTMLParseFromStr(src); const doc_html = try parser.documentHTMLParseFromStr(src);
defer parser.documentHTMLClose(doc_html) catch {}; defer parser.documentHTMLClose(doc_html) catch {};
const doc = parser.documentHTMLToDocument(doc_html); const doc = parser.documentHTMLToDocument(doc_html);
try writeHTML(doc, .{}, buf.writer(testing.allocator)); try writeHTML(doc, .{}, &aw.writer);
try testing.expectEqualStrings(expected, buf.items); try testing.expectEqualStrings(expected, aw.written());
} }

View File

@@ -129,7 +129,7 @@ const TimeoutCallback = struct {
signal: AbortSignal, signal: AbortSignal,
fn run(ctx: *anyopaque) ?u32 { fn run(ctx: *anyopaque) ?u32 {
const self: *TimeoutCallback = @alignCast(@ptrCast(ctx)); const self: *TimeoutCallback = @ptrCast(@alignCast(ctx));
self.signal.abort("TimeoutError") catch |err| { self.signal.abort("TimeoutError") catch |err| {
log.warn(.app, "abort signal timeout", .{ .err = err }); log.warn(.app, "abort signal timeout", .{ .err = err });
}; };

View File

@@ -209,7 +209,7 @@ pub const HTMLDocument = struct {
} }
pub fn get_readyState(self: *parser.DocumentHTML, page: *Page) ![]const u8 { pub fn get_readyState(self: *parser.DocumentHTML, page: *Page) ![]const u8 {
const state = try page.getOrCreateNodeState(@alignCast(@ptrCast(self))); const state = try page.getOrCreateNodeState(@ptrCast(@alignCast(self)));
return @tagName(state.ready_state); return @tagName(state.ready_state);
} }
@@ -292,7 +292,7 @@ pub const HTMLDocument = struct {
} }
pub fn documentIsLoaded(self: *parser.DocumentHTML, page: *Page) !void { pub fn documentIsLoaded(self: *parser.DocumentHTML, page: *Page) !void {
const state = try page.getOrCreateNodeState(@alignCast(@ptrCast(self))); const state = try page.getOrCreateNodeState(@ptrCast(@alignCast(self)));
state.ready_state = .interactive; state.ready_state = .interactive;
log.debug(.script_event, "dispatch event", .{ log.debug(.script_event, "dispatch event", .{
@@ -309,7 +309,7 @@ pub const HTMLDocument = struct {
} }
pub fn documentIsComplete(self: *parser.DocumentHTML, page: *Page) !void { pub fn documentIsComplete(self: *parser.DocumentHTML, page: *Page) !void {
const state = try page.getOrCreateNodeState(@alignCast(@ptrCast(self))); const state = try page.getOrCreateNodeState(@ptrCast(@alignCast(self)));
state.ready_state = .complete; state.ready_state = .complete;
} }
}; };

View File

@@ -145,7 +145,7 @@ pub const HTMLElement = struct {
try Node.removeChildren(n); try Node.removeChildren(n);
// attach the text node. // attach the text node.
_ = try parser.nodeAppendChild(n, @as(*parser.Node, @alignCast(@ptrCast(t)))); _ = try parser.nodeAppendChild(n, @as(*parser.Node, @ptrCast(@alignCast(t))));
} }
pub fn _click(e: *parser.ElementHTML) !void { pub fn _click(e: *parser.ElementHTML) !void {
@@ -264,7 +264,7 @@ pub const HTMLAnchorElement = struct {
// But // But
// document.createElement('a').host // document.createElement('a').host
// should not fail, it should return an empty string // should not fail, it should return an empty string
if (try parser.elementGetAttribute(@alignCast(@ptrCast(self)), "href")) |href| { if (try parser.elementGetAttribute(@ptrCast(@alignCast(self)), "href")) |href| {
return URL.constructor(.{ .string = href }, null, page); // TODO inject base url return URL.constructor(.{ .string = href }, null, page); // TODO inject base url
} }
return .empty; return .empty;
@@ -869,7 +869,7 @@ pub const HTMLScriptElement = struct {
v, v,
); );
if (try Node.get_isConnected(@alignCast(@ptrCast(self)))) { if (try Node.get_isConnected(@ptrCast(@alignCast(self)))) {
// There are sites which do set the src AFTER appending the script // There are sites which do set the src AFTER appending the script
// tag to the document: // tag to the document:
// const s = document.createElement('script'); // const s = document.createElement('script');
@@ -877,7 +877,7 @@ pub const HTMLScriptElement = struct {
// s.src = '...'; // s.src = '...';
// This should load the script. // This should load the script.
// addFromElement protects against double execution. // addFromElement protects against double execution.
try page.script_manager.addFromElement(@alignCast(@ptrCast(self))); try page.script_manager.addFromElement(@ptrCast(@alignCast(self)));
} }
} }
@@ -976,22 +976,22 @@ pub const HTMLScriptElement = struct {
} }
pub fn get_onload(self: *parser.Script, page: *Page) !?Env.Function { pub fn get_onload(self: *parser.Script, page: *Page) !?Env.Function {
const state = page.getNodeState(@alignCast(@ptrCast(self))) orelse return null; const state = page.getNodeState(@ptrCast(@alignCast(self))) orelse return null;
return state.onload; return state.onload;
} }
pub fn set_onload(self: *parser.Script, function: ?Env.Function, page: *Page) !void { pub fn set_onload(self: *parser.Script, function: ?Env.Function, page: *Page) !void {
const state = try page.getOrCreateNodeState(@alignCast(@ptrCast(self))); const state = try page.getOrCreateNodeState(@ptrCast(@alignCast(self)));
state.onload = function; state.onload = function;
} }
pub fn get_onerror(self: *parser.Script, page: *Page) !?Env.Function { pub fn get_onerror(self: *parser.Script, page: *Page) !?Env.Function {
const state = page.getNodeState(@alignCast(@ptrCast(self))) orelse return null; const state = page.getNodeState(@ptrCast(@alignCast(self))) orelse return null;
return state.onerror; return state.onerror;
} }
pub fn set_onerror(self: *parser.Script, function: ?Env.Function, page: *Page) !void { pub fn set_onerror(self: *parser.Script, function: ?Env.Function, page: *Page) !void {
const state = try page.getOrCreateNodeState(@alignCast(@ptrCast(self))); const state = try page.getOrCreateNodeState(@ptrCast(@alignCast(self)));
state.onerror = function; state.onerror = function;
} }
}; };
@@ -1014,7 +1014,7 @@ pub const HTMLStyleElement = struct {
pub const subtype = .node; pub const subtype = .node;
pub fn get_sheet(self: *parser.Style, page: *Page) !*StyleSheet { pub fn get_sheet(self: *parser.Style, page: *Page) !*StyleSheet {
const state = try page.getOrCreateNodeState(@alignCast(@ptrCast(self))); const state = try page.getOrCreateNodeState(@ptrCast(@alignCast(self)));
if (state.style_sheet) |ss| { if (state.style_sheet) |ss| {
return ss; return ss;
} }
@@ -1068,7 +1068,7 @@ pub const HTMLTemplateElement = struct {
pub const subtype = .node; pub const subtype = .node;
pub fn get_content(self: *parser.Template, page: *Page) !*parser.DocumentFragment { pub fn get_content(self: *parser.Template, page: *Page) !*parser.DocumentFragment {
const state = try page.getOrCreateNodeState(@alignCast(@ptrCast(self))); const state = try page.getOrCreateNodeState(@ptrCast(@alignCast(self)));
if (state.template_content) |tc| { if (state.template_content) |tc| {
return tc; return tc;
} }

View File

@@ -64,7 +64,7 @@ pub const HTMLSelectElement = struct {
} }
pub fn get_selectedIndex(select: *parser.Select, page: *Page) !i32 { pub fn get_selectedIndex(select: *parser.Select, page: *Page) !i32 {
const state = try page.getOrCreateNodeState(@alignCast(@ptrCast(select))); const state = try page.getOrCreateNodeState(@ptrCast(@alignCast(select)));
const selected_index = try parser.selectGetSelectedIndex(select); const selected_index = try parser.selectGetSelectedIndex(select);
// See the explicit_index_set field documentation // See the explicit_index_set field documentation
@@ -83,7 +83,7 @@ pub const HTMLSelectElement = struct {
// Libdom's dom_html_select_select_set_selected_index will crash if index // Libdom's dom_html_select_select_set_selected_index will crash if index
// is out of range, and it doesn't properly unset options // is out of range, and it doesn't properly unset options
pub fn set_selectedIndex(select: *parser.Select, index: i32, page: *Page) !void { pub fn set_selectedIndex(select: *parser.Select, index: i32, page: *Page) !void {
var state = try page.getOrCreateNodeState(@alignCast(@ptrCast(select))); var state = try page.getOrCreateNodeState(@ptrCast(@alignCast(select)));
state.explicit_index_set = true; state.explicit_index_set = true;
const options = try parser.selectGetOptions(select); const options = try parser.selectGetOptions(select);
@@ -101,7 +101,7 @@ pub const HTMLSelectElement = struct {
pub fn get_options(select: *parser.Select) HTMLOptionsCollection { pub fn get_options(select: *parser.Select) HTMLOptionsCollection {
return .{ return .{
.select = select, .select = select,
.proto = collection.HTMLCollectionChildren(@alignCast(@ptrCast(select)), .{ .proto = collection.HTMLCollectionChildren(@ptrCast(@alignCast(select)), .{
.mutable = true, .mutable = true,
.include_root = false, .include_root = false,
}), }),
@@ -176,24 +176,24 @@ pub const HTMLOptionsCollection = struct {
}; };
const insert_before: *parser.Node = switch (before) { const insert_before: *parser.Node = switch (before) {
.option => |o| @alignCast(@ptrCast(o)), .option => |o| @ptrCast(@alignCast(o)),
.index => |i| (try self.proto.item(i)) orelse return self.appendOption(option), .index => |i| (try self.proto.item(i)) orelse return self.appendOption(option),
}; };
return Node.before(insert_before, &.{ return Node.before(insert_before, &.{
.{ .node = @alignCast(@ptrCast(option)) }, .{ .node = @ptrCast(@alignCast(option)) },
}); });
} }
pub fn _remove(self: *HTMLOptionsCollection, index: u32) !void { pub fn _remove(self: *HTMLOptionsCollection, index: u32) !void {
const Node = @import("../dom/node.zig").Node; const Node = @import("../dom/node.zig").Node;
const option = (try self.proto.item(index)) orelse return; const option = (try self.proto.item(index)) orelse return;
_ = try Node._removeChild(@alignCast(@ptrCast(self.select)), option); _ = try Node._removeChild(@ptrCast(@alignCast(self.select)), option);
} }
fn appendOption(self: *HTMLOptionsCollection, option: *parser.Option) !void { fn appendOption(self: *HTMLOptionsCollection, option: *parser.Option) !void {
const Node = @import("../dom/node.zig").Node; const Node = @import("../dom/node.zig").Node;
return Node.append(@alignCast(@ptrCast(self.select)), &.{ return Node.append(@ptrCast(@alignCast(self.select)), &.{
.{ .node = @alignCast(@ptrCast(option)) }, .{ .node = @ptrCast(@alignCast(option)) },
}); });
} }
}; };

View File

@@ -403,7 +403,7 @@ const TimerCallback = struct {
args: []Env.JsObject = &.{}, args: []Env.JsObject = &.{},
fn run(ctx: *anyopaque) ?u32 { fn run(ctx: *anyopaque) ?u32 {
const self: *TimerCallback = @alignCast(@ptrCast(ctx)); const self: *TimerCallback = @ptrCast(@alignCast(ctx));
if (self.repeat != null) { if (self.repeat != null) {
if (self.window.timers.contains(self.timer_id) == false) { if (self.window.timers.contains(self.timer_id) == false) {
// it was called // it was called

View File

@@ -44,32 +44,32 @@ pub fn destroy() void {
heap = null; heap = null;
} }
pub export fn m_alloc(size: usize) callconv(.C) ?*anyopaque { pub export fn m_alloc(size: usize) callconv(.c) ?*anyopaque {
if (heap == null) return null; if (heap == null) return null;
return c.mi_heap_malloc(heap.?, size); return c.mi_heap_malloc(heap.?, size);
} }
pub export fn re_alloc(ptr: ?*anyopaque, size: usize) callconv(.C) ?*anyopaque { pub export fn re_alloc(ptr: ?*anyopaque, size: usize) callconv(.c) ?*anyopaque {
if (heap == null) return null; if (heap == null) return null;
return c.mi_heap_realloc(heap.?, ptr, size); return c.mi_heap_realloc(heap.?, ptr, size);
} }
pub export fn c_alloc(nmemb: usize, size: usize) callconv(.C) ?*anyopaque { pub export fn c_alloc(nmemb: usize, size: usize) callconv(.c) ?*anyopaque {
if (heap == null) return null; if (heap == null) return null;
return c.mi_heap_calloc(heap.?, nmemb, size); return c.mi_heap_calloc(heap.?, nmemb, size);
} }
pub export fn str_dup(s: [*c]const u8) callconv(.C) [*c]u8 { pub export fn str_dup(s: [*c]const u8) callconv(.c) [*c]u8 {
if (heap == null) return null; if (heap == null) return null;
return c.mi_heap_strdup(heap.?, s); return c.mi_heap_strdup(heap.?, s);
} }
pub export fn strn_dup(s: [*c]const u8, size: usize) callconv(.C) [*c]u8 { pub export fn strn_dup(s: [*c]const u8, size: usize) callconv(.c) [*c]u8 {
if (heap == null) return null; if (heap == null) return null;
return c.mi_heap_strndup(heap.?, s, size); return c.mi_heap_strndup(heap.?, s, size);
} }
// NOOP, use destroy to clear all the memory allocated at once. // NOOP, use destroy to clear all the memory allocated at once.
pub export fn f_ree(_: ?*anyopaque) callconv(.C) void { pub export fn f_ree(_: ?*anyopaque) callconv(.c) void {
return; return;
} }

View File

@@ -575,7 +575,7 @@ pub fn mutationEventRelatedNode(evt: *MutationEvent) !?*Node {
const err = c._dom_mutation_event_get_related_node(evt, &n); const err = c._dom_mutation_event_get_related_node(evt, &n);
try DOMErr(err); try DOMErr(err);
if (n == null) return null; if (n == null) return null;
return @as(*Node, @alignCast(@ptrCast(n))); return @as(*Node, @ptrCast(@alignCast(n)));
} }
// EventListener // EventListener
@@ -590,7 +590,7 @@ fn eventListenerGetData(lst: *EventListener) ?*anyopaque {
pub const EventTarget = c.dom_event_target; pub const EventTarget = c.dom_event_target;
pub fn eventTargetToNode(et: *EventTarget) *Node { pub fn eventTargetToNode(et: *EventTarget) *Node {
return @as(*Node, @alignCast(@ptrCast(et))); return @as(*Node, @ptrCast(@alignCast(et)));
} }
fn eventTargetVtable(et: *EventTarget) c.dom_event_target_vtable { fn eventTargetVtable(et: *EventTarget) c.dom_event_target_vtable {
@@ -631,7 +631,7 @@ pub const EventNode = struct {
fn idFromListener(lst: *EventListener) ?usize { fn idFromListener(lst: *EventListener) ?usize {
const ctx = eventListenerGetData(lst) orelse return null; const ctx = eventListenerGetData(lst) orelse return null;
const node: *EventNode = @alignCast(@ptrCast(ctx)); const node: *EventNode = @ptrCast(@alignCast(ctx));
return node.id; return node.id;
} }
}; };
@@ -643,11 +643,11 @@ pub fn eventTargetAddEventListener(
capture: bool, capture: bool,
) !*EventListener { ) !*EventListener {
const event_handler = struct { const event_handler = struct {
fn handle(event_: ?*Event, ptr_: ?*anyopaque) callconv(.C) void { fn handle(event_: ?*Event, ptr_: ?*anyopaque) callconv(.c) void {
const ptr = ptr_ orelse return; const ptr = ptr_ orelse return;
const event = event_ orelse return; const event = event_ orelse return;
const node_: *EventNode = @alignCast(@ptrCast(ptr)); const node_: *EventNode = @ptrCast(@alignCast(ptr));
node_.func(node_, event); node_.func(node_, event);
} }
}.handle; }.handle;
@@ -829,12 +829,12 @@ pub const EventTargetTBase = extern struct {
eti: c.dom_event_target_internal = c.dom_event_target_internal{ .listeners = null }, eti: c.dom_event_target_internal = c.dom_event_target_internal{ .listeners = null },
internal_target_type: InternalType, internal_target_type: InternalType,
pub fn add_event_listener(et: [*c]c.dom_event_target, t: [*c]c.dom_string, l: ?*c.struct_dom_event_listener, capture: bool) callconv(.C) c.dom_exception { pub fn add_event_listener(et: [*c]c.dom_event_target, t: [*c]c.dom_string, l: ?*c.struct_dom_event_listener, capture: bool) callconv(.c) c.dom_exception {
const self = @as(*Self, @ptrCast(et)); const self = @as(*Self, @ptrCast(et));
return c._dom_event_target_add_event_listener(&self.eti, t, l, capture); return c._dom_event_target_add_event_listener(&self.eti, t, l, capture);
} }
pub fn dispatch_event(et: [*c]c.dom_event_target, evt: ?*c.struct_dom_event, res: [*c]bool) callconv(.C) c.dom_exception { pub fn dispatch_event(et: [*c]c.dom_event_target, evt: ?*c.struct_dom_event, res: [*c]bool) callconv(.c) c.dom_exception {
const self = @as(*Self, @ptrCast(et)); const self = @as(*Self, @ptrCast(et));
// Set the event target to the target dispatched. // Set the event target to the target dispatched.
const e = c._dom_event_set_target(evt, et); const e = c._dom_event_set_target(evt, et);
@@ -844,7 +844,7 @@ pub const EventTargetTBase = extern struct {
return c._dom_event_target_dispatch(et, &self.eti, evt, c.DOM_AT_TARGET, res); return c._dom_event_target_dispatch(et, &self.eti, evt, c.DOM_AT_TARGET, res);
} }
pub fn remove_event_listener(et: [*c]c.dom_event_target, t: [*c]c.dom_string, l: ?*c.struct_dom_event_listener, capture: bool) callconv(.C) c.dom_exception { pub fn remove_event_listener(et: [*c]c.dom_event_target, t: [*c]c.dom_string, l: ?*c.struct_dom_event_listener, capture: bool) callconv(.c) c.dom_exception {
const self = @as(*Self, @ptrCast(et)); const self = @as(*Self, @ptrCast(et));
return c._dom_event_target_remove_event_listener(&self.eti, t, l, capture); return c._dom_event_target_remove_event_listener(&self.eti, t, l, capture);
} }
@@ -856,12 +856,12 @@ pub const EventTargetTBase = extern struct {
cur: [*c]c.struct_listener_entry, cur: [*c]c.struct_listener_entry,
next: [*c][*c]c.struct_listener_entry, next: [*c][*c]c.struct_listener_entry,
l: [*c]?*c.struct_dom_event_listener, l: [*c]?*c.struct_dom_event_listener,
) callconv(.C) c.dom_exception { ) callconv(.c) c.dom_exception {
const self = @as(*Self, @ptrCast(et)); const self = @as(*Self, @ptrCast(et));
return c._dom_event_target_iter_event_listener(self.eti, t, capture, cur, next, l); return c._dom_event_target_iter_event_listener(self.eti, t, capture, cur, next, l);
} }
pub fn internal_type(et: [*c]c.dom_event_target, internal_type_: [*c]u32) callconv(.C) c.dom_exception { pub fn internal_type(et: [*c]c.dom_event_target, internal_type_: [*c]u32) callconv(.c) c.dom_exception {
const self = @as(*Self, @ptrCast(et)); const self = @as(*Self, @ptrCast(et));
internal_type_.* = @intFromEnum(self.internal_target_type); internal_type_.* = @intFromEnum(self.internal_target_type);
return c.DOM_NO_ERR; return c.DOM_NO_ERR;
@@ -1016,7 +1016,7 @@ pub fn nodeListItem(nodeList: *NodeList, index: u32) !?*Node {
const err = c._dom_nodelist_item(nodeList, index, &n); const err = c._dom_nodelist_item(nodeList, index, &n);
try DOMErr(err); try DOMErr(err);
if (n == null) return null; if (n == null) return null;
return @as(*Node, @alignCast(@ptrCast(n))); return @as(*Node, @ptrCast(@alignCast(n)));
} }
// NodeExternal is the libdom public representation of a Node. // NodeExternal is the libdom public representation of a Node.
@@ -1452,7 +1452,7 @@ fn characterDataVtable(data: *CharacterData) c.dom_characterdata_vtable {
} }
pub inline fn characterDataToNode(cdata: *CharacterData) *Node { pub inline fn characterDataToNode(cdata: *CharacterData) *Node {
return @as(*Node, @alignCast(@ptrCast(cdata))); return @as(*Node, @ptrCast(@alignCast(cdata)));
} }
pub fn characterDataData(cdata: *CharacterData) ![]const u8 { pub fn characterDataData(cdata: *CharacterData) ![]const u8 {
@@ -1537,7 +1537,7 @@ pub const ProcessingInstruction = c.dom_processing_instruction;
// processingInstructionToNode is an helper to convert an ProcessingInstruction to a node. // processingInstructionToNode is an helper to convert an ProcessingInstruction to a node.
pub inline fn processingInstructionToNode(pi: *ProcessingInstruction) *Node { pub inline fn processingInstructionToNode(pi: *ProcessingInstruction) *Node {
return @as(*Node, @alignCast(@ptrCast(pi))); return @as(*Node, @ptrCast(@alignCast(pi)));
} }
pub fn processInstructionCopy(pi: *ProcessingInstruction) !*ProcessingInstruction { pub fn processInstructionCopy(pi: *ProcessingInstruction) !*ProcessingInstruction {
@@ -1592,7 +1592,7 @@ pub fn attributeGetOwnerElement(a: *Attribute) !?*Element {
// attributeToNode is an helper to convert an attribute to a node. // attributeToNode is an helper to convert an attribute to a node.
pub inline fn attributeToNode(a: *Attribute) *Node { pub inline fn attributeToNode(a: *Attribute) *Node {
return @as(*Node, @alignCast(@ptrCast(a))); return @as(*Node, @ptrCast(@alignCast(a)));
} }
// Element // Element
@@ -1754,7 +1754,7 @@ pub fn elementHasClass(elem: *Element, class: []const u8) !bool {
// elementToNode is an helper to convert an element to a node. // elementToNode is an helper to convert an element to a node.
pub inline fn elementToNode(e: *Element) *Node { pub inline fn elementToNode(e: *Element) *Node {
return @as(*Node, @alignCast(@ptrCast(e))); return @as(*Node, @ptrCast(@alignCast(e)));
} }
// TokenList // TokenList
@@ -1823,14 +1823,14 @@ fn elementHTMLVtable(elem_html: *ElementHTML) c.dom_html_element_vtable {
// scriptToElt is an helper to convert an script to an element. // scriptToElt is an helper to convert an script to an element.
pub inline fn scriptToElt(s: *Script) *Element { pub inline fn scriptToElt(s: *Script) *Element {
return @as(*Element, @alignCast(@ptrCast(s))); return @as(*Element, @ptrCast(@alignCast(s)));
} }
// HTMLAnchorElement // HTMLAnchorElement
// anchorToNode is an helper to convert an anchor to a node. // anchorToNode is an helper to convert an anchor to a node.
pub inline fn anchorToNode(a: *Anchor) *Node { pub inline fn anchorToNode(a: *Anchor) *Node {
return @as(*Node, @alignCast(@ptrCast(a))); return @as(*Node, @ptrCast(@alignCast(a)));
} }
pub fn anchorGetTarget(a: *Anchor) ![]const u8 { pub fn anchorGetTarget(a: *Anchor) ![]const u8 {
@@ -1990,7 +1990,7 @@ pub const OptionCollection = c.dom_html_options_collection;
pub const DocumentFragment = c.dom_document_fragment; pub const DocumentFragment = c.dom_document_fragment;
pub inline fn documentFragmentToNode(doc: *DocumentFragment) *Node { pub inline fn documentFragmentToNode(doc: *DocumentFragment) *Node {
return @as(*Node, @alignCast(@ptrCast(doc))); return @as(*Node, @ptrCast(@alignCast(doc)));
} }
pub fn documentFragmentGetHost(frag: *DocumentFragment) ?*Node { pub fn documentFragmentGetHost(frag: *DocumentFragment) ?*Node {
@@ -2097,7 +2097,7 @@ pub inline fn domImplementationCreateHTMLDocument(title: ?[]const u8) !*Document
if (title) |t| { if (title) |t| {
const htitle = try documentCreateElement(doc, "title"); const htitle = try documentCreateElement(doc, "title");
const txt = try documentCreateTextNode(doc, t); const txt = try documentCreateTextNode(doc, t);
_ = try nodeAppendChild(elementToNode(htitle), @as(*Node, @alignCast(@ptrCast(txt)))); _ = try nodeAppendChild(elementToNode(htitle), @as(*Node, @ptrCast(@alignCast(txt))));
_ = try nodeAppendChild(elementToNode(head), elementToNode(htitle)); _ = try nodeAppendChild(elementToNode(head), elementToNode(htitle));
} }
@@ -2115,7 +2115,7 @@ fn documentVtable(doc: *Document) c.dom_document_vtable {
} }
pub inline fn documentToNode(doc: *Document) *Node { pub inline fn documentToNode(doc: *Document) *Node {
return @as(*Node, @alignCast(@ptrCast(doc))); return @as(*Node, @ptrCast(@alignCast(doc)));
} }
pub inline fn documentGetElementById(doc: *Document, id: []const u8) !?*Element { pub inline fn documentGetElementById(doc: *Document, id: []const u8) !?*Element {
@@ -2284,7 +2284,7 @@ pub inline fn documentImportNode(doc: *Document, node: *Node, deep: bool) !*Node
const nodeext = toNodeExternal(Node, node); const nodeext = toNodeExternal(Node, node);
const err = documentVtable(doc).dom_document_import_node.?(doc, nodeext, deep, &res); const err = documentVtable(doc).dom_document_import_node.?(doc, nodeext, deep, &res);
try DOMErr(err); try DOMErr(err);
return @as(*Node, @alignCast(@ptrCast(res))); return @as(*Node, @ptrCast(@alignCast(res)));
} }
pub inline fn documentAdoptNode(doc: *Document, node: *Node) !*Node { pub inline fn documentAdoptNode(doc: *Document, node: *Node) !*Node {
@@ -2292,7 +2292,7 @@ pub inline fn documentAdoptNode(doc: *Document, node: *Node) !*Node {
const nodeext = toNodeExternal(Node, node); const nodeext = toNodeExternal(Node, node);
const err = documentVtable(doc).dom_document_adopt_node.?(doc, nodeext, &res); const err = documentVtable(doc).dom_document_adopt_node.?(doc, nodeext, &res);
try DOMErr(err); try DOMErr(err);
return @as(*Node, @alignCast(@ptrCast(res))); return @as(*Node, @ptrCast(@alignCast(res)));
} }
pub inline fn documentCreateAttribute(doc: *Document, name: []const u8) !*Attribute { pub inline fn documentCreateAttribute(doc: *Document, name: []const u8) !*Attribute {
@@ -2327,7 +2327,7 @@ pub const DocumentHTML = c.dom_html_document;
// documentHTMLToNode is an helper to convert a documentHTML to an node. // documentHTMLToNode is an helper to convert a documentHTML to an node.
pub inline fn documentHTMLToNode(doc: *DocumentHTML) *Node { pub inline fn documentHTMLToNode(doc: *DocumentHTML) *Node {
return @as(*Node, @alignCast(@ptrCast(doc))); return @as(*Node, @ptrCast(@alignCast(doc)));
} }
fn documentHTMLVtable(doc_html: *DocumentHTML) c.dom_html_document_vtable { fn documentHTMLVtable(doc_html: *DocumentHTML) c.dom_html_document_vtable {
@@ -2489,7 +2489,7 @@ pub inline fn documentHTMLBody(doc_html: *DocumentHTML) !?*Body {
} }
pub inline fn bodyToElement(body: *Body) *Element { pub inline fn bodyToElement(body: *Body) *Element {
return @as(*Element, @alignCast(@ptrCast(body))); return @as(*Element, @ptrCast(@alignCast(body)));
} }
pub inline fn documentHTMLSetBody(doc_html: *DocumentHTML, elt: ?*ElementHTML) !void { pub inline fn documentHTMLSetBody(doc_html: *DocumentHTML, elt: ?*ElementHTML) !void {
@@ -2520,7 +2520,7 @@ pub inline fn documentHTMLSetTitle(doc: *DocumentHTML, v: []const u8) !void {
pub fn documentHTMLSetCurrentScript(doc: *DocumentHTML, script: ?*Script) !void { pub fn documentHTMLSetCurrentScript(doc: *DocumentHTML, script: ?*Script) !void {
var s: ?*ElementHTML = null; var s: ?*ElementHTML = null;
if (script != null) s = @alignCast(@ptrCast(script.?)); if (script != null) s = @ptrCast(@alignCast(script.?));
const err = documentHTMLVtable(doc).set_current_script.?(doc, s); const err = documentHTMLVtable(doc).set_current_script.?(doc, s);
try DOMErr(err); try DOMErr(err);
} }
@@ -2999,7 +2999,7 @@ pub fn inputSetType(input: *Input, type_: []const u8) !void {
} }
} }
const new_type = if (found) type_ else "text"; const new_type = if (found) type_ else "text";
try elementSetAttribute(@alignCast(@ptrCast(input)), "type", new_type); try elementSetAttribute(@ptrCast(@alignCast(input)), "type", new_type);
} }
pub fn inputGetValue(input: *Input) ![]const u8 { pub fn inputGetValue(input: *Input) ![]const u8 {

View File

@@ -173,13 +173,13 @@ pub const Page = struct {
} }
fn runMicrotasks(ctx: *anyopaque) ?u32 { fn runMicrotasks(ctx: *anyopaque) ?u32 {
const self: *Page = @alignCast(@ptrCast(ctx)); const self: *Page = @ptrCast(@alignCast(ctx));
self.session.browser.runMicrotasks(); self.session.browser.runMicrotasks();
return 5; return 5;
} }
fn runMessageLoop(ctx: *anyopaque) ?u32 { fn runMessageLoop(ctx: *anyopaque) ?u32 {
const self: *Page = @alignCast(@ptrCast(ctx)); const self: *Page = @ptrCast(@alignCast(ctx));
self.session.browser.runMessageLoop(); self.session.browser.runMessageLoop();
return 100; return 100;
} }
@@ -192,7 +192,7 @@ pub const Page = struct {
}; };
// dump writes the page content into the given file. // dump writes the page content into the given file.
pub fn dump(self: *const Page, opts: DumpOpts, out: std.fs.File) !void { pub fn dump(self: *const Page, opts: DumpOpts, out: *std.Io.Writer) !void {
switch (self.mode) { switch (self.mode) {
.pre => return error.PageNotLoaded, .pre => return error.PageNotLoaded,
.raw => |buf| { .raw => |buf| {
@@ -347,7 +347,7 @@ pub const Page = struct {
// overflow. // overflow.
const _ms: u64 = @intCast(ms); const _ms: u64 = @intCast(ms);
std.time.sleep(std.time.ns_per_ms * _ms); std.Thread.sleep(std.time.ns_per_ms * _ms);
break :SW; break :SW;
} }
@@ -469,9 +469,9 @@ pub const Page = struct {
} }
pub fn origin(self: *const Page, arena: Allocator) ![]const u8 { pub fn origin(self: *const Page, arena: Allocator) ![]const u8 {
var arr: std.ArrayListUnmanaged(u8) = .{}; var aw = std.Io.Writer.Allocating.init(arena);
try self.url.origin(arr.writer(arena)); try self.url.origin(&aw.writer);
return arr.items; return aw.written();
} }
const RequestCookieOpts = struct { const RequestCookieOpts = struct {
@@ -610,7 +610,7 @@ pub const Page = struct {
} }
fn pageHeaderDoneCallback(transfer: *Http.Transfer) !void { fn pageHeaderDoneCallback(transfer: *Http.Transfer) !void {
var self: *Page = @alignCast(@ptrCast(transfer.ctx)); var self: *Page = @ptrCast(@alignCast(transfer.ctx));
// would be different than self.url in the case of a redirect // would be different than self.url in the case of a redirect
const header = &transfer.response_header.?; const header = &transfer.response_header.?;
@@ -625,7 +625,7 @@ pub const Page = struct {
} }
fn pageDataCallback(transfer: *Http.Transfer, data: []const u8) !void { fn pageDataCallback(transfer: *Http.Transfer, data: []const u8) !void {
var self: *Page = @alignCast(@ptrCast(transfer.ctx)); var self: *Page = @ptrCast(@alignCast(transfer.ctx));
if (self.mode == .pre) { if (self.mode == .pre) {
// we lazily do this, because we might need the first chunk of data // we lazily do this, because we might need the first chunk of data
@@ -686,7 +686,7 @@ pub const Page = struct {
fn pageDoneCallback(ctx: *anyopaque) !void { fn pageDoneCallback(ctx: *anyopaque) !void {
log.debug(.http, "navigate done", .{}); log.debug(.http, "navigate done", .{});
var self: *Page = @alignCast(@ptrCast(ctx)); var self: *Page = @ptrCast(@alignCast(ctx));
self.clearTransferArena(); self.clearTransferArena();
switch (self.mode) { switch (self.mode) {
@@ -772,7 +772,7 @@ pub const Page = struct {
fn pageErrorCallback(ctx: *anyopaque, err: anyerror) void { fn pageErrorCallback(ctx: *anyopaque, err: anyerror) void {
log.err(.http, "navigate failed", .{ .err = err }); log.err(.http, "navigate failed", .{ .err = err });
var self: *Page = @alignCast(@ptrCast(ctx)); var self: *Page = @ptrCast(@alignCast(ctx));
self.clearTransferArena(); self.clearTransferArena();
switch (self.mode) { switch (self.mode) {
@@ -1015,7 +1015,7 @@ pub const Page = struct {
pub fn getNodeState(_: *const Page, node: *parser.Node) ?*State { pub fn getNodeState(_: *const Page, node: *parser.Node) ?*State {
if (parser.nodeGetEmbedderData(node)) |state| { if (parser.nodeGetEmbedderData(node)) |state| {
return @alignCast(@ptrCast(state)); return @ptrCast(@alignCast(state));
} }
return null; return null;
} }
@@ -1026,13 +1026,13 @@ pub const Page = struct {
const transfer_arena = self.session.transfer_arena; const transfer_arena = self.session.transfer_arena;
var form_data = try FormData.fromForm(form, submitter, self); var form_data = try FormData.fromForm(form, submitter, self);
const encoding = try parser.elementGetAttribute(@alignCast(@ptrCast(form)), "enctype"); const encoding = try parser.elementGetAttribute(@ptrCast(@alignCast(form)), "enctype");
var buf: std.ArrayListUnmanaged(u8) = .empty; var buf: std.ArrayListUnmanaged(u8) = .empty;
try form_data.write(encoding, buf.writer(transfer_arena)); try form_data.write(encoding, buf.writer(transfer_arena));
const method = try parser.elementGetAttribute(@alignCast(@ptrCast(form)), "method") orelse ""; const method = try parser.elementGetAttribute(@ptrCast(@alignCast(form)), "method") orelse "";
var action = try parser.elementGetAttribute(@alignCast(@ptrCast(form)), "action") orelse self.url.raw; var action = try parser.elementGetAttribute(@ptrCast(@alignCast(form)), "action") orelse self.url.raw;
var opts = NavigateOpts{ var opts = NavigateOpts{
.reason = .form, .reason = .form,
@@ -1113,7 +1113,7 @@ fn timestamp() u32 {
// after the document is loaded, it's ok to execute any async and defer scripts // after the document is loaded, it's ok to execute any async and defer scripts
// immediately. // immediately.
pub export fn scriptAddedCallback(ctx: ?*anyopaque, element: ?*parser.Element) callconv(.c) void { pub export fn scriptAddedCallback(ctx: ?*anyopaque, element: ?*parser.Element) callconv(.c) void {
const self: *Page = @alignCast(@ptrCast(ctx.?)); const self: *Page = @ptrCast(@alignCast(ctx.?));
if (self.delayed_navigation) { if (self.delayed_navigation) {
// if we're planning on navigating to another page, don't run this script // if we're planning on navigating to another page, don't run this script

View File

@@ -358,14 +358,10 @@ pub const Cookie = struct {
if (domain.len > 0) { if (domain.len > 0) {
const no_leading_dot = if (domain[0] == '.') domain[1..] else domain; const no_leading_dot = if (domain[0] == '.') domain[1..] else domain;
var list: std.ArrayListUnmanaged(u8) = .empty; var aw = try std.Io.Writer.Allocating.initCapacity(arena, no_leading_dot.len + 1);
try list.ensureTotalCapacity(arena, no_leading_dot.len + 1); // Expect no precents needed try aw.writer.writeByte('.');
list.appendAssumeCapacity('.'); try std.Uri.Component.percentEncode(&aw.writer, no_leading_dot, isHostChar);
try std.Uri.Component.percentEncode(list.writer( const owned_domain = toLower(aw.written());
arena,
), no_leading_dot, isHostChar);
var owned_domain: []u8 = list.items; // @memory retains memory used before growing
_ = toLower(owned_domain);
if (std.mem.indexOfScalarPos(u8, owned_domain, 1, '.') == null and std.mem.eql(u8, "localhost", owned_domain[1..]) == false) { if (std.mem.indexOfScalarPos(u8, owned_domain, 1, '.') == null and std.mem.eql(u8, "localhost", owned_domain[1..]) == false) {
// can't set a cookie for a TLD // can't set a cookie for a TLD
@@ -387,10 +383,9 @@ pub const Cookie = struct {
pub fn percentEncode(arena: Allocator, component: std.Uri.Component, comptime isValidChar: fn (u8) bool) ![]u8 { pub fn percentEncode(arena: Allocator, component: std.Uri.Component, comptime isValidChar: fn (u8) bool) ![]u8 {
switch (component) { switch (component) {
.raw => |str| { .raw => |str| {
var list: std.ArrayListUnmanaged(u8) = .empty; var aw = try std.Io.Writer.Allocating.initCapacity(arena, str.len);
try list.ensureTotalCapacity(arena, str.len); // Expect no precents needed try std.Uri.Component.percentEncode(&aw.writer, str, isValidChar);
try std.Uri.Component.percentEncode(list.writer(arena), str, isValidChar); return aw.written(); // @memory retains memory used before growing
return list.items; // @memory retains memory used before growing
}, },
.percent_encoded => |str| { .percent_encoded => |str| {
return try arena.dupe(u8, str); return try arena.dupe(u8, str);

View File

@@ -114,16 +114,16 @@ pub const URL = struct {
} }
pub fn get_origin(self: *URL, page: *Page) ![]const u8 { pub fn get_origin(self: *URL, page: *Page) ![]const u8 {
var buf: std.ArrayListUnmanaged(u8) = .empty; var aw = std.Io.Writer.Allocating.init(page.arena);
try self.uri.writeToStream(.{ try self.uri.writeToStream(&aw.writer, .{
.scheme = true, .scheme = true,
.authentication = false, .authentication = false,
.authority = true, .authority = true,
.path = false, .path = false,
.query = false, .query = false,
.fragment = false, .fragment = false,
}, buf.writer(page.arena)); });
return buf.items; return aw.written();
} }
// get_href returns the URL by writing all its components. // get_href returns the URL by writing all its components.
@@ -137,28 +137,28 @@ pub const URL = struct {
// format the url with all its components. // format the url with all its components.
pub fn toString(self: *const URL, arena: Allocator) ![]const u8 { pub fn toString(self: *const URL, arena: Allocator) ![]const u8 {
var buf: std.ArrayListUnmanaged(u8) = .empty; var aw = std.Io.Writer.Allocating.init(arena);
try self.uri.writeToStream(.{ try self.uri.writeToStream(&aw.writer, .{
.scheme = true, .scheme = true,
.authentication = true, .authentication = true,
.authority = true, .authority = true,
.path = uriComponentNullStr(self.uri.path).len > 0, .path = uriComponentNullStr(self.uri.path).len > 0,
}, buf.writer(arena)); });
if (self.search_params.get_size() > 0) { if (self.search_params.get_size() > 0) {
try buf.append(arena, '?'); try aw.writer.writeByte('?');
try self.search_params.write(buf.writer(arena)); try self.search_params.write(&aw.writer);
} }
{ {
const fragment = uriComponentNullStr(self.uri.fragment); const fragment = uriComponentNullStr(self.uri.fragment);
if (fragment.len > 0) { if (fragment.len > 0) {
try buf.append(arena, '#'); try aw.writer.writeByte('#');
try buf.appendSlice(arena, fragment); try aw.writer.writeAll(fragment);
} }
} }
return buf.items; return aw.written();
} }
pub fn get_protocol(self: *URL, page: *Page) ![]const u8 { pub fn get_protocol(self: *URL, page: *Page) ![]const u8 {
@@ -174,17 +174,16 @@ pub const URL = struct {
} }
pub fn get_host(self: *URL, page: *Page) ![]const u8 { pub fn get_host(self: *URL, page: *Page) ![]const u8 {
var buf: std.ArrayListUnmanaged(u8) = .empty; var aw = std.Io.Writer.Allocating.init(page.arena);
try self.uri.writeToStream(&aw.writer, .{
try self.uri.writeToStream(.{
.scheme = false, .scheme = false,
.authentication = false, .authentication = false,
.authority = true, .authority = true,
.path = false, .path = false,
.query = false, .query = false,
.fragment = false, .fragment = false,
}, buf.writer(page.arena)); });
return buf.items; return aw.written();
} }
pub fn get_hostname(self: *URL) []const u8 { pub fn get_hostname(self: *URL) []const u8 {
@@ -195,9 +194,9 @@ pub const URL = struct {
const arena = page.arena; const arena = page.arena;
if (self.uri.port == null) return try arena.dupe(u8, ""); if (self.uri.port == null) return try arena.dupe(u8, "");
var buf: std.ArrayListUnmanaged(u8) = .empty; var aw = std.Io.Writer.Allocating.init(arena);
try std.fmt.formatInt(self.uri.port.?, 10, .lower, .{}, buf.writer(arena)); try aw.writer.printInt(self.uri.port.?, 10, .lower, .{});
return buf.items; return aw.written();
} }
pub fn get_pathname(self: *URL) []const u8 { pub fn get_pathname(self: *URL) []const u8 {

View File

@@ -123,7 +123,7 @@ fn collectForm(form: *parser.Form, submitter_: ?*parser.ElementHTML, page: *Page
// probably want to implement (like disabled fieldsets), so we might want // probably want to implement (like disabled fieldsets), so we might want
// to stick with our own walker even if fix libdom to properly support // to stick with our own walker even if fix libdom to properly support
// dynamically added elements. // dynamically added elements.
const node_list = try @import("../dom/css.zig").querySelectorAll(arena, @alignCast(@ptrCast(form)), "input,select,button,textarea"); const node_list = try @import("../dom/css.zig").querySelectorAll(arena, @ptrCast(@alignCast(form)), "input,select,button,textarea");
const nodes = node_list.nodes.items; const nodes = node_list.nodes.items;
var entries: kv.List = .{}; var entries: kv.List = .{};
@@ -220,7 +220,7 @@ fn collectSelectValues(arena: Allocator, select: *parser.Select, name: []const u
if (is_multiple == false) { if (is_multiple == false) {
const option = try parser.optionCollectionItem(options, @intCast(selected_index)); const option = try parser.optionCollectionItem(options, @intCast(selected_index));
if (try parser.elementGetAttribute(@alignCast(@ptrCast(option)), "disabled") != null) { if (try parser.elementGetAttribute(@ptrCast(@alignCast(option)), "disabled") != null) {
return; return;
} }
const value = try parser.optionGetValue(option); const value = try parser.optionGetValue(option);
@@ -232,7 +232,7 @@ fn collectSelectValues(arena: Allocator, select: *parser.Select, name: []const u
// we can go directly to the first one // we can go directly to the first one
for (@intCast(selected_index)..len) |i| { for (@intCast(selected_index)..len) |i| {
const option = try parser.optionCollectionItem(options, @intCast(i)); const option = try parser.optionCollectionItem(options, @intCast(i));
if (try parser.elementGetAttribute(@alignCast(@ptrCast(option)), "disabled") != null) { if (try parser.elementGetAttribute(@ptrCast(@alignCast(option)), "disabled") != null) {
continue; continue;
} }

View File

@@ -352,7 +352,7 @@ pub const XMLHttpRequest = struct {
return self.headers.append( return self.headers.append(
self.arena, self.arena,
try std.fmt.allocPrintZ(self.arena, "{s}: {s}", .{ name, value }), try std.fmt.allocPrintSentinel(self.arena, "{s}: {s}", .{ name, value }, 0),
); );
} }
@@ -393,19 +393,19 @@ pub const XMLHttpRequest = struct {
} }
fn httpStartCallback(transfer: *Http.Transfer) !void { fn httpStartCallback(transfer: *Http.Transfer) !void {
const self: *XMLHttpRequest = @alignCast(@ptrCast(transfer.ctx)); const self: *XMLHttpRequest = @ptrCast(@alignCast(transfer.ctx));
log.debug(.http, "request start", .{ .method = self.method, .url = self.url, .source = "xhr" }); log.debug(.http, "request start", .{ .method = self.method, .url = self.url, .source = "xhr" });
self.transfer = transfer; self.transfer = transfer;
} }
fn httpHeaderCallback(transfer: *Http.Transfer, header: Http.Header) !void { fn httpHeaderCallback(transfer: *Http.Transfer, header: Http.Header) !void {
const self: *XMLHttpRequest = @alignCast(@ptrCast(transfer.ctx)); const self: *XMLHttpRequest = @ptrCast(@alignCast(transfer.ctx));
const joined = try std.fmt.allocPrint(self.arena, "{s}: {s}", .{ header.name, header.value }); const joined = try std.fmt.allocPrint(self.arena, "{s}: {s}", .{ header.name, header.value });
try self.response_headers.append(self.arena, joined); try self.response_headers.append(self.arena, joined);
} }
fn httpHeaderDoneCallback(transfer: *Http.Transfer) !void { fn httpHeaderDoneCallback(transfer: *Http.Transfer) !void {
const self: *XMLHttpRequest = @alignCast(@ptrCast(transfer.ctx)); const self: *XMLHttpRequest = @ptrCast(@alignCast(transfer.ctx));
const header = &transfer.response_header.?; const header = &transfer.response_header.?;
@@ -441,7 +441,7 @@ pub const XMLHttpRequest = struct {
} }
fn httpDataCallback(transfer: *Http.Transfer, data: []const u8) !void { fn httpDataCallback(transfer: *Http.Transfer, data: []const u8) !void {
const self: *XMLHttpRequest = @alignCast(@ptrCast(transfer.ctx)); const self: *XMLHttpRequest = @ptrCast(@alignCast(transfer.ctx));
try self.response_bytes.appendSlice(self.arena, data); try self.response_bytes.appendSlice(self.arena, data);
const now = std.time.milliTimestamp(); const now = std.time.milliTimestamp();
@@ -459,7 +459,7 @@ pub const XMLHttpRequest = struct {
} }
fn httpDoneCallback(ctx: *anyopaque) !void { fn httpDoneCallback(ctx: *anyopaque) !void {
const self: *XMLHttpRequest = @alignCast(@ptrCast(ctx)); const self: *XMLHttpRequest = @ptrCast(@alignCast(ctx));
log.info(.http, "request complete", .{ log.info(.http, "request complete", .{
.source = "xhr", .source = "xhr",
@@ -484,7 +484,7 @@ pub const XMLHttpRequest = struct {
} }
fn httpErrorCallback(ctx: *anyopaque, err: anyerror) void { fn httpErrorCallback(ctx: *anyopaque, err: anyerror) void {
const self: *XMLHttpRequest = @alignCast(@ptrCast(ctx)); const self: *XMLHttpRequest = @ptrCast(@alignCast(ctx));
// http client will close it after an error, it isn't safe to keep around // http client will close it after an error, it isn't safe to keep around
self.transfer = null; self.transfer = null;
self.onErr(err); self.onErr(err);

View File

@@ -34,13 +34,13 @@ pub const XMLSerializer = struct {
} }
pub fn _serializeToString(_: *const XMLSerializer, root: *parser.Node, page: *Page) ![]const u8 { pub fn _serializeToString(_: *const XMLSerializer, root: *parser.Node, page: *Page) ![]const u8 {
var buf: std.ArrayListUnmanaged(u8) = .empty; var aw = std.Io.Writer.Allocating.init(page.call_arena);
switch (try parser.nodeType(root)) { switch (try parser.nodeType(root)) {
.document => try dump.writeHTML(@as(*parser.Document, @ptrCast(root)), .{}, buf.writer(page.call_arena)), .document => try dump.writeHTML(@as(*parser.Document, @ptrCast(root)), .{}, &aw.writer),
.document_type => try dump.writeDocType(@as(*parser.DocumentType, @ptrCast(root)), buf.writer(page.call_arena)), .document_type => try dump.writeDocType(@as(*parser.DocumentType, @ptrCast(root)), &aw.writer),
else => try dump.writeNode(root, .{}, buf.writer(page.call_arena)), else => try dump.writeNode(root, .{}, &aw.writer),
} }
return buf.items; return aw.written();
} }
}; };

View File

@@ -211,11 +211,11 @@ pub const Writer = struct {
exclude_root: bool = false, exclude_root: bool = false,
}; };
pub fn jsonStringify(self: *const Writer, w: anytype) !void { pub fn jsonStringify(self: *const Writer, w: anytype) error{WriteFailed}!void {
if (self.exclude_root) { if (self.exclude_root) {
_ = self.writeChildren(self.root, 1, w) catch |err| { _ = self.writeChildren(self.root, 1, w) catch |err| {
log.err(.cdp, "node writeChildren", .{ .err = err }); log.err(.cdp, "node writeChildren", .{ .err = err });
return error.OutOfMemory; return error.WriteFailed;
}; };
} else { } else {
self.toJSON(self.root, 0, w) catch |err| { self.toJSON(self.root, 0, w) catch |err| {
@@ -223,7 +223,7 @@ pub const Writer = struct {
// @TypeOf(w).Error. In other words, our code can't return its own // @TypeOf(w).Error. In other words, our code can't return its own
// error, we can only return a writer error. Kinda sucks. // error, we can only return a writer error. Kinda sucks.
log.err(.cdp, "node toJSON stringify", .{ .err = err }); log.err(.cdp, "node toJSON stringify", .{ .err = err });
return error.OutOfMemory; return error.WriteFailed;
}; };
} }
} }
@@ -425,7 +425,7 @@ test "cdp Node: Writer" {
{ {
const node = try registry.register(doc.asNode()); const node = try registry.register(doc.asNode());
const json = try std.json.stringifyAlloc(testing.allocator, Writer{ const json = try std.json.Stringify.valueAlloc(testing.allocator, Writer{
.root = node, .root = node,
.depth = 0, .depth = 0,
.exclude_root = false, .exclude_root = false,
@@ -465,7 +465,7 @@ test "cdp Node: Writer" {
{ {
const node = registry.lookup_by_id.get(1).?; const node = registry.lookup_by_id.get(1).?;
const json = try std.json.stringifyAlloc(testing.allocator, Writer{ const json = try std.json.Stringify.valueAlloc(testing.allocator, Writer{
.root = node, .root = node,
.depth = 1, .depth = 1,
.exclude_root = false, .exclude_root = false,
@@ -520,7 +520,7 @@ test "cdp Node: Writer" {
{ {
const node = registry.lookup_by_id.get(1).?; const node = registry.lookup_by_id.get(1).?;
const json = try std.json.stringifyAlloc(testing.allocator, Writer{ const json = try std.json.Stringify.valueAlloc(testing.allocator, Writer{
.root = node, .root = node,
.depth = -1, .depth = -1,
.exclude_root = true, .exclude_root = true,

View File

@@ -487,58 +487,58 @@ pub fn BrowserContext(comptime CDP_T: type) type {
} }
pub fn onPageRemove(ctx: *anyopaque, _: Notification.PageRemove) !void { pub fn onPageRemove(ctx: *anyopaque, _: Notification.PageRemove) !void {
const self: *Self = @alignCast(@ptrCast(ctx)); const self: *Self = @ptrCast(@alignCast(ctx));
try @import("domains/page.zig").pageRemove(self); try @import("domains/page.zig").pageRemove(self);
} }
pub fn onPageCreated(ctx: *anyopaque, page: *Page) !void { pub fn onPageCreated(ctx: *anyopaque, page: *Page) !void {
const self: *Self = @alignCast(@ptrCast(ctx)); const self: *Self = @ptrCast(@alignCast(ctx));
return @import("domains/page.zig").pageCreated(self, page); return @import("domains/page.zig").pageCreated(self, page);
} }
pub fn onPageNavigate(ctx: *anyopaque, msg: *const Notification.PageNavigate) !void { pub fn onPageNavigate(ctx: *anyopaque, msg: *const Notification.PageNavigate) !void {
const self: *Self = @alignCast(@ptrCast(ctx)); const self: *Self = @ptrCast(@alignCast(ctx));
defer self.resetNotificationArena(); defer self.resetNotificationArena();
return @import("domains/page.zig").pageNavigate(self.notification_arena, self, msg); return @import("domains/page.zig").pageNavigate(self.notification_arena, self, msg);
} }
pub fn onPageNavigated(ctx: *anyopaque, msg: *const Notification.PageNavigated) !void { pub fn onPageNavigated(ctx: *anyopaque, msg: *const Notification.PageNavigated) !void {
const self: *Self = @alignCast(@ptrCast(ctx)); const self: *Self = @ptrCast(@alignCast(ctx));
return @import("domains/page.zig").pageNavigated(self, msg); return @import("domains/page.zig").pageNavigated(self, msg);
} }
pub fn onHttpRequestStart(ctx: *anyopaque, msg: *const Notification.RequestStart) !void { pub fn onHttpRequestStart(ctx: *anyopaque, msg: *const Notification.RequestStart) !void {
const self: *Self = @alignCast(@ptrCast(ctx)); const self: *Self = @ptrCast(@alignCast(ctx));
defer self.resetNotificationArena(); defer self.resetNotificationArena();
try @import("domains/network.zig").httpRequestStart(self.notification_arena, self, msg); try @import("domains/network.zig").httpRequestStart(self.notification_arena, self, msg);
} }
pub fn onHttpRequestIntercept(ctx: *anyopaque, msg: *const Notification.RequestIntercept) !void { pub fn onHttpRequestIntercept(ctx: *anyopaque, msg: *const Notification.RequestIntercept) !void {
const self: *Self = @alignCast(@ptrCast(ctx)); const self: *Self = @ptrCast(@alignCast(ctx));
defer self.resetNotificationArena(); defer self.resetNotificationArena();
try @import("domains/fetch.zig").requestIntercept(self.notification_arena, self, msg); try @import("domains/fetch.zig").requestIntercept(self.notification_arena, self, msg);
} }
pub fn onHttpRequestFail(ctx: *anyopaque, msg: *const Notification.RequestFail) !void { pub fn onHttpRequestFail(ctx: *anyopaque, msg: *const Notification.RequestFail) !void {
const self: *Self = @alignCast(@ptrCast(ctx)); const self: *Self = @ptrCast(@alignCast(ctx));
defer self.resetNotificationArena(); defer self.resetNotificationArena();
return @import("domains/network.zig").httpRequestFail(self.notification_arena, self, msg); return @import("domains/network.zig").httpRequestFail(self.notification_arena, self, msg);
} }
pub fn onHttpResponseHeadersDone(ctx: *anyopaque, msg: *const Notification.ResponseHeaderDone) !void { pub fn onHttpResponseHeadersDone(ctx: *anyopaque, msg: *const Notification.ResponseHeaderDone) !void {
const self: *Self = @alignCast(@ptrCast(ctx)); const self: *Self = @ptrCast(@alignCast(ctx));
defer self.resetNotificationArena(); defer self.resetNotificationArena();
return @import("domains/network.zig").httpResponseHeaderDone(self.notification_arena, self, msg); return @import("domains/network.zig").httpResponseHeaderDone(self.notification_arena, self, msg);
} }
pub fn onHttpRequestDone(ctx: *anyopaque, msg: *const Notification.RequestDone) !void { pub fn onHttpRequestDone(ctx: *anyopaque, msg: *const Notification.RequestDone) !void {
const self: *Self = @alignCast(@ptrCast(ctx)); const self: *Self = @ptrCast(@alignCast(ctx));
defer self.resetNotificationArena(); defer self.resetNotificationArena();
return @import("domains/network.zig").httpRequestDone(self.notification_arena, self, msg); return @import("domains/network.zig").httpRequestDone(self.notification_arena, self, msg);
} }
pub fn onHttpResponseData(ctx: *anyopaque, msg: *const Notification.ResponseData) !void { pub fn onHttpResponseData(ctx: *anyopaque, msg: *const Notification.ResponseData) !void {
const self: *Self = @alignCast(@ptrCast(ctx)); const self: *Self = @ptrCast(@alignCast(ctx));
const arena = self.arena; const arena = self.arena;
const id = msg.transfer.id; const id = msg.transfer.id;
@@ -550,7 +550,7 @@ pub fn BrowserContext(comptime CDP_T: type) type {
} }
pub fn onHttpRequestAuthRequired(ctx: *anyopaque, data: *const Notification.RequestAuthRequired) !void { pub fn onHttpRequestAuthRequired(ctx: *anyopaque, data: *const Notification.RequestAuthRequired) !void {
const self: *Self = @alignCast(@ptrCast(ctx)); const self: *Self = @ptrCast(@alignCast(ctx));
defer self.resetNotificationArena(); defer self.resetNotificationArena();
try @import("domains/fetch.zig").requestAuthRequired(self.notification_arena, self, data); try @import("domains/fetch.zig").requestAuthRequired(self.notification_arena, self, data);
} }
@@ -566,7 +566,7 @@ pub fn BrowserContext(comptime CDP_T: type) type {
} }
pub fn onInspectorResponse(ctx: *anyopaque, _: u32, msg: []const u8) void { pub fn onInspectorResponse(ctx: *anyopaque, _: u32, msg: []const u8) void {
sendInspectorMessage(@alignCast(@ptrCast(ctx)), msg) catch |err| { sendInspectorMessage(@ptrCast(@alignCast(ctx)), msg) catch |err| {
log.err(.cdp, "send inspector response", .{ .err = err }); log.err(.cdp, "send inspector response", .{ .err = err });
}; };
} }
@@ -583,7 +583,7 @@ pub fn BrowserContext(comptime CDP_T: type) type {
log.debug(.cdp, "inspector event", .{ .method = method }); log.debug(.cdp, "inspector event", .{ .method = method });
} }
sendInspectorMessage(@alignCast(@ptrCast(ctx)), msg) catch |err| { sendInspectorMessage(@ptrCast(@alignCast(ctx)), msg) catch |err| {
log.err(.cdp, "send inspector event", .{ .err = err }); log.err(.cdp, "send inspector event", .{ .err = err });
}; };
} }

View File

@@ -372,7 +372,7 @@ fn getNode(arena: Allocator, browser_context: anytype, node_id: ?Node.Id, backen
if (object_id) |object_id_| { if (object_id) |object_id_| {
// Retrieve the object from which ever context it is in. // Retrieve the object from which ever context it is in.
const parser_node = try browser_context.inspector.getNodePtr(arena, object_id_); const parser_node = try browser_context.inspector.getNodePtr(arena, object_id_);
return try browser_context.node_registry.register(@alignCast(@ptrCast(parser_node))); return try browser_context.node_registry.register(@ptrCast(@alignCast(parser_node)));
} }
return error.MissingParams; return error.MissingParams;
} }

View File

@@ -315,10 +315,10 @@ fn continueWithAuth(cmd: anytype) !void {
// restart the request with the provided credentials. // restart the request with the provided credentials.
const arena = transfer.arena.allocator(); const arena = transfer.arena.allocator();
transfer.updateCredentials( transfer.updateCredentials(
try std.fmt.allocPrintZ(arena, "{s}:{s}", .{ try std.fmt.allocPrintSentinel(arena, "{s}:{s}", .{
params.authChallengeResponse.username, params.authChallengeResponse.username,
params.authChallengeResponse.password, params.authChallengeResponse.password,
}), }, 0),
); );
transfer.reset(); transfer.reset();

View File

@@ -81,7 +81,7 @@ fn setExtraHTTPHeaders(cmd: anytype) !void {
try extra_headers.ensureTotalCapacity(arena, params.headers.map.count()); try extra_headers.ensureTotalCapacity(arena, params.headers.map.count());
var it = params.headers.map.iterator(); var it = params.headers.map.iterator();
while (it.next()) |header| { while (it.next()) |header| {
const header_string = try std.fmt.allocPrintZ(arena, "{s}: {s}", .{ header.key_ptr.*, header.value_ptr.* }); const header_string = try std.fmt.allocPrintSentinel(arena, "{s}: {s}", .{ header.key_ptr.*, header.value_ptr.* }, 0);
extra_headers.appendAssumeCapacity(header_string); extra_headers.appendAssumeCapacity(header_string);
} }
@@ -296,58 +296,61 @@ pub const TransferAsRequestWriter = struct {
}; };
} }
pub fn jsonStringify(self: *const TransferAsRequestWriter, writer: anytype) !void { pub fn jsonStringify(self: *const TransferAsRequestWriter, jws: anytype) !void {
const stream = writer.stream; self._jsonStringify(jws) catch return error.WriteFailed;
}
fn _jsonStringify(self: *const TransferAsRequestWriter, jws: anytype) !void {
const writer = jws.writer;
const transfer = self.transfer; const transfer = self.transfer;
try writer.beginObject(); try jws.beginObject();
{ {
try writer.objectField("url"); try jws.objectField("url");
try writer.beginWriteRaw(); try jws.beginWriteRaw();
try stream.writeByte('\"'); try writer.writeByte('\"');
try transfer.uri.writeToStream(.{ try transfer.uri.writeToStream(writer, .{
.scheme = true, .scheme = true,
.authentication = true, .authentication = true,
.authority = true, .authority = true,
.path = true, .path = true,
.query = true, .query = true,
}, stream); });
try stream.writeByte('\"'); try writer.writeByte('\"');
writer.endWriteRaw(); jws.endWriteRaw();
} }
{ {
if (transfer.uri.fragment) |frag| { if (transfer.uri.fragment) |frag| {
try writer.objectField("urlFragment"); try jws.objectField("urlFragment");
try writer.beginWriteRaw(); try jws.beginWriteRaw();
try stream.writeAll("\"#"); try writer.writeAll("\"#");
try stream.writeAll(frag.percent_encoded); try writer.writeAll(frag.percent_encoded);
try stream.writeByte('\"'); try writer.writeByte('\"');
writer.endWriteRaw(); jws.endWriteRaw();
} }
} }
{ {
try writer.objectField("method"); try jws.objectField("method");
try writer.write(@tagName(transfer.req.method)); try jws.write(@tagName(transfer.req.method));
} }
{ {
try writer.objectField("hasPostData"); try jws.objectField("hasPostData");
try writer.write(transfer.req.body != null); try jws.write(transfer.req.body != null);
} }
{ {
try writer.objectField("headers"); try jws.objectField("headers");
try writer.beginObject(); try jws.beginObject();
var it = transfer.req.headers.iterator(); var it = transfer.req.headers.iterator();
while (it.next()) |hdr| { while (it.next()) |hdr| {
try writer.objectField(hdr.name); try jws.objectField(hdr.name);
try writer.write(hdr.value); try jws.write(hdr.value);
} }
try writer.endObject(); try jws.endObject();
} }
try writer.endObject(); try jws.endObject();
} }
}; };
@@ -362,35 +365,39 @@ const TransferAsResponseWriter = struct {
}; };
} }
pub fn jsonStringify(self: *const TransferAsResponseWriter, writer: anytype) !void { pub fn jsonStringify(self: *const TransferAsResponseWriter, jws: anytype) !void {
const stream = writer.stream; self._jsonStringify(jws) catch return error.WriteFailed;
}
fn _jsonStringify(self: *const TransferAsResponseWriter, jws: anytype) !void {
const writer = jws.writer;
const transfer = self.transfer; const transfer = self.transfer;
try writer.beginObject(); try jws.beginObject();
{ {
try writer.objectField("url"); try jws.objectField("url");
try writer.beginWriteRaw(); try jws.beginWriteRaw();
try stream.writeByte('\"'); try writer.writeByte('\"');
try transfer.uri.writeToStream(.{ try transfer.uri.writeToStream(writer, .{
.scheme = true, .scheme = true,
.authentication = true, .authentication = true,
.authority = true, .authority = true,
.path = true, .path = true,
.query = true, .query = true,
}, stream); });
try stream.writeByte('\"'); try writer.writeByte('\"');
writer.endWriteRaw(); jws.endWriteRaw();
} }
if (transfer.response_header) |*rh| { if (transfer.response_header) |*rh| {
// it should not be possible for this to be false, but I'm not // it should not be possible for this to be false, but I'm not
// feeling brave today. // feeling brave today.
const status = rh.status; const status = rh.status;
try writer.objectField("status"); try jws.objectField("status");
try writer.write(status); try jws.write(status);
try writer.objectField("statusText"); try jws.objectField("statusText");
try writer.write(@as(std.http.Status, @enumFromInt(status)).phrase() orelse "Unknown"); try jws.write(@as(std.http.Status, @enumFromInt(status)).phrase() orelse "Unknown");
} }
{ {
@@ -410,10 +417,10 @@ const TransferAsResponseWriter = struct {
} }
} }
try writer.objectField("headers"); try jws.objectField("headers");
try writer.write(std.json.ArrayHashMap([]const u8){ .map = map }); try jws.write(std.json.ArrayHashMap([]const u8){ .map = map });
} }
try writer.endObject(); try jws.endObject();
} }
}; };
@@ -426,20 +433,23 @@ const DocumentUrlWriter = struct {
}; };
} }
pub fn jsonStringify(self: *const DocumentUrlWriter, writer: anytype) !void { pub fn jsonStringify(self: *const DocumentUrlWriter, jws: anytype) !void {
const stream = writer.stream; self._jsonStringify(jws) catch return error.WriteFailed;
}
fn _jsonStringify(self: *const DocumentUrlWriter, jws: anytype) !void {
const writer = jws.writer;
try writer.beginWriteRaw(); try jws.beginWriteRaw();
try stream.writeByte('\"'); try writer.writeByte('\"');
try self.uri.writeToStream(.{ try self.uri.writeToStream(writer, .{
.scheme = true, .scheme = true,
.authentication = true, .authentication = true,
.authority = true, .authority = true,
.path = true, .path = true,
.query = true, .query = true,
}, stream); });
try stream.writeByte('\"'); try writer.writeByte('\"');
writer.endWriteRaw(); jws.endWriteRaw();
} }
}; };

View File

@@ -170,7 +170,7 @@ pub const CookieWriter = struct {
self.writeCookies(w) catch |err| { self.writeCookies(w) catch |err| {
// The only error our jsonStringify method can return is @TypeOf(w).Error. // The only error our jsonStringify method can return is @TypeOf(w).Error.
log.err(.cdp, "json stringify", .{ .err = err }); log.err(.cdp, "json stringify", .{ .err = err });
return error.OutOfMemory; return error.WriteFailed;
}; };
} }

View File

@@ -304,19 +304,17 @@ fn sendMessageToTarget(cmd: anytype) !void {
} }
const Capture = struct { const Capture = struct {
allocator: std.mem.Allocator, aw: std.Io.Writer.Allocating,
buf: std.ArrayListUnmanaged(u8),
pub fn sendJSON(self: *@This(), message: anytype) !void { pub fn sendJSON(self: *@This(), message: anytype) !void {
return std.json.stringify(message, .{ return std.json.Stringify.value(message, .{
.emit_null_optional_fields = false, .emit_null_optional_fields = false,
}, self.buf.writer(self.allocator)); }, &self.aw.writer);
} }
}; };
var capture = Capture{ var capture = Capture{
.buf = .{}, .aw = .init(cmd.arena),
.allocator = cmd.arena,
}; };
cmd.cdp.dispatch(cmd.arena, &capture, params.message) catch |err| { cmd.cdp.dispatch(cmd.arena, &capture, params.message) catch |err| {
@@ -325,7 +323,7 @@ fn sendMessageToTarget(cmd: anytype) !void {
}; };
try cmd.sendEvent("Target.receivedMessageFromTarget", .{ try cmd.sendEvent("Target.receivedMessageFromTarget", .{
.message = capture.buf.items, .message = capture.aw.written(),
.sessionId = params.sessionId, .sessionId = params.sessionId,
}, .{}); }, .{});
} }

View File

@@ -50,10 +50,10 @@ const Client = struct {
}; };
} }
pub fn sendJSON(self: *Client, message: anytype, opts: json.StringifyOptions) !void { pub fn sendJSON(self: *Client, message: anytype, opts: json.Stringify.Options) !void {
var opts_copy = opts; var opts_copy = opts;
opts_copy.whitespace = .indent_2; opts_copy.whitespace = .indent_2;
const serialized = try json.stringifyAlloc(self.allocator, message, opts_copy); const serialized = try json.Stringify.valueAlloc(self.allocator, message, opts_copy);
try self.serialized.append(self.allocator, serialized); try self.serialized.append(self.allocator, serialized);
const value = try json.parseFromSliceLeaky(json.Value, self.allocator, serialized, .{}); const value = try json.parseFromSliceLeaky(json.Value, self.allocator, serialized, .{});
@@ -131,7 +131,7 @@ const TestContext = struct {
pub fn processMessage(self: *TestContext, msg: anytype) !void { pub fn processMessage(self: *TestContext, msg: anytype) !void {
var json_message: []const u8 = undefined; var json_message: []const u8 = undefined;
if (@typeInfo(@TypeOf(msg)) != .pointer) { if (@typeInfo(@TypeOf(msg)) != .pointer) {
json_message = try std.json.stringifyAlloc(self.arena.allocator(), msg, .{}); json_message = try std.json.Stringify.valueAlloc(self.arena.allocator(), msg, .{});
} else { } else {
// assume this is a string we want to send as-is, if it isn't, we'll // assume this is a string we want to send as-is, if it isn't, we'll
// get a compile error, so no big deal. // get a compile error, so no big deal.
@@ -189,7 +189,7 @@ const TestContext = struct {
index: ?usize = null, index: ?usize = null,
}; };
pub fn expectSent(self: *TestContext, expected: anytype, opts: SentOpts) !void { pub fn expectSent(self: *TestContext, expected: anytype, opts: SentOpts) !void {
const serialized = try json.stringifyAlloc(self.arena.allocator(), expected, .{ const serialized = try json.Stringify.valueAlloc(self.arena.allocator(), expected, .{
.whitespace = .indent_2, .whitespace = .indent_2,
.emit_null_optional_fields = false, .emit_null_optional_fields = false,
}); });

View File

@@ -67,10 +67,10 @@ pub const Date = struct {
return std.math.order(a.day, b.day); return std.math.order(a.day, b.day);
} }
pub fn format(self: Date, comptime _: []const u8, _: std.fmt.FormatOptions, out: anytype) !void { pub fn format(self: Date, writer: *std.Io.Writer) !void {
var buf: [11]u8 = undefined; var buf: [11]u8 = undefined;
const n = writeDate(&buf, self); const n = writeDate(&buf, self);
try out.writeAll(buf[0..n]); try writer.writeAll(buf[0..n]);
} }
pub fn jsonStringify(self: Date, out: anytype) !void { pub fn jsonStringify(self: Date, out: anytype) !void {
@@ -167,10 +167,10 @@ pub const Time = struct {
return std.math.order(a.micros, b.micros); return std.math.order(a.micros, b.micros);
} }
pub fn format(self: Time, comptime _: []const u8, _: std.fmt.FormatOptions, out: anytype) !void { pub fn format(self: Time, writer: *std.Io.Writer) !void {
var buf: [15]u8 = undefined; var buf: [15]u8 = undefined;
const n = writeTime(&buf, self); const n = writeTime(&buf, self);
try out.writeAll(buf[0..n]); try writer.writeAll(buf[0..n]);
} }
pub fn jsonStringify(self: Time, out: anytype) !void { pub fn jsonStringify(self: Time, out: anytype) !void {
@@ -464,10 +464,10 @@ pub const DateTime = struct {
return std.math.order(a.micros, b.micros); return std.math.order(a.micros, b.micros);
} }
pub fn format(self: DateTime, comptime _: []const u8, _: std.fmt.FormatOptions, out: anytype) !void { pub fn format(self: DateTime, writer: *std.Io.Writer) !void {
var buf: [28]u8 = undefined; var buf: [28]u8 = undefined;
const n = self.bufWrite(&buf); const n = self.bufWrite(&buf);
try out.writeAll(buf[0..n]); try writer.writeAll(buf[0..n]);
} }
pub fn jsonStringify(self: DateTime, out: anytype) !void { pub fn jsonStringify(self: DateTime, out: anytype) !void {
@@ -510,11 +510,11 @@ fn writeDate(into: []u8, date: Date) u8 {
// the padding (we need to do it ourselfs) // the padding (we need to do it ourselfs)
const year = date.year; const year = date.year;
if (year < 0) { if (year < 0) {
_ = std.fmt.formatIntBuf(into[1..], @as(u16, @intCast(year * -1)), 10, .lower, .{ .width = 4, .fill = '0' }); _ = std.fmt.printInt(into[1..], @as(u16, @intCast(year * -1)), 10, .lower, .{ .width = 4, .fill = '0' });
into[0] = '-'; into[0] = '-';
buf = into[5..]; buf = into[5..];
} else { } else {
_ = std.fmt.formatIntBuf(into, @as(u16, @intCast(year)), 10, .lower, .{ .width = 4, .fill = '0' }); _ = std.fmt.printInt(into, @as(u16, @intCast(year)), 10, .lower, .{ .width = 4, .fill = '0' });
buf = into[4..]; buf = into[4..];
} }
@@ -541,12 +541,12 @@ fn writeTime(into: []u8, time: Time) u8 {
if (@rem(micros, 1000) == 0) { if (@rem(micros, 1000) == 0) {
into[8] = '.'; into[8] = '.';
_ = std.fmt.formatIntBuf(into[9..12], micros / 1000, 10, .lower, .{ .width = 3, .fill = '0' }); _ = std.fmt.printInt(into[9..12], micros / 1000, 10, .lower, .{ .width = 3, .fill = '0' });
return 12; return 12;
} }
into[8] = '.'; into[8] = '.';
_ = std.fmt.formatIntBuf(into[9..15], micros, 10, .lower, .{ .width = 6, .fill = '0' }); _ = std.fmt.printInt(into[9..15], micros, 10, .lower, .{ .width = 6, .fill = '0' });
return 15; return 15;
} }
@@ -730,7 +730,7 @@ test "Date: json" {
{ {
// date, positive year // date, positive year
const date = Date{ .year = 2023, .month = 9, .day = 22 }; const date = Date{ .year = 2023, .month = 9, .day = 22 };
const out = try std.json.stringifyAlloc(testing.allocator, date, .{}); const out = try std.json.Stringify.valueAlloc(testing.allocator, date, .{});
defer testing.allocator.free(out); defer testing.allocator.free(out);
try testing.expectString("\"2023-09-22\"", out); try testing.expectString("\"2023-09-22\"", out);
} }
@@ -738,7 +738,7 @@ test "Date: json" {
{ {
// date, negative year // date, negative year
const date = Date{ .year = -4, .month = 12, .day = 3 }; const date = Date{ .year = -4, .month = 12, .day = 3 };
const out = try std.json.stringifyAlloc(testing.allocator, date, .{}); const out = try std.json.Stringify.valueAlloc(testing.allocator, date, .{});
defer testing.allocator.free(out); defer testing.allocator.free(out);
try testing.expectString("\"-0004-12-03\"", out); try testing.expectString("\"-0004-12-03\"", out);
} }
@@ -754,13 +754,13 @@ test "Date: json" {
test "Date: format" { test "Date: format" {
{ {
var buf: [20]u8 = undefined; var buf: [20]u8 = undefined;
const out = try std.fmt.bufPrint(&buf, "{s}", .{Date{ .year = 2023, .month = 5, .day = 22 }}); const out = try std.fmt.bufPrint(&buf, "{f}", .{Date{ .year = 2023, .month = 5, .day = 22 }});
try testing.expectString("2023-05-22", out); try testing.expectString("2023-05-22", out);
} }
{ {
var buf: [20]u8 = undefined; var buf: [20]u8 = undefined;
const out = try std.fmt.bufPrint(&buf, "{s}", .{Date{ .year = -102, .month = 12, .day = 9 }}); const out = try std.fmt.bufPrint(&buf, "{f}", .{Date{ .year = -102, .month = 12, .day = 9 }});
try testing.expectString("-0102-12-09", out); try testing.expectString("-0102-12-09", out);
} }
} }
@@ -913,7 +913,7 @@ test "Time: json" {
{ {
// time no fraction // time no fraction
const time = Time{ .hour = 23, .min = 59, .sec = 2, .micros = 0 }; const time = Time{ .hour = 23, .min = 59, .sec = 2, .micros = 0 };
const out = try std.json.stringifyAlloc(testing.allocator, time, .{}); const out = try std.json.Stringify.valueAlloc(testing.allocator, time, .{});
defer testing.allocator.free(out); defer testing.allocator.free(out);
try testing.expectString("\"23:59:02\"", out); try testing.expectString("\"23:59:02\"", out);
} }
@@ -921,7 +921,7 @@ test "Time: json" {
{ {
// time, milliseconds only // time, milliseconds only
const time = Time{ .hour = 7, .min = 9, .sec = 32, .micros = 202000 }; const time = Time{ .hour = 7, .min = 9, .sec = 32, .micros = 202000 };
const out = try std.json.stringifyAlloc(testing.allocator, time, .{}); const out = try std.json.Stringify.valueAlloc(testing.allocator, time, .{});
defer testing.allocator.free(out); defer testing.allocator.free(out);
try testing.expectString("\"07:09:32.202\"", out); try testing.expectString("\"07:09:32.202\"", out);
} }
@@ -929,7 +929,7 @@ test "Time: json" {
{ {
// time, micros // time, micros
const time = Time{ .hour = 1, .min = 2, .sec = 3, .micros = 123456 }; const time = Time{ .hour = 1, .min = 2, .sec = 3, .micros = 123456 };
const out = try std.json.stringifyAlloc(testing.allocator, time, .{}); const out = try std.json.Stringify.valueAlloc(testing.allocator, time, .{});
defer testing.allocator.free(out); defer testing.allocator.free(out);
try testing.expectString("\"01:02:03.123456\"", out); try testing.expectString("\"01:02:03.123456\"", out);
} }
@@ -945,37 +945,37 @@ test "Time: json" {
test "Time: format" { test "Time: format" {
{ {
var buf: [20]u8 = undefined; var buf: [20]u8 = undefined;
const out = try std.fmt.bufPrint(&buf, "{s}", .{Time{ .hour = 23, .min = 59, .sec = 59, .micros = 0 }}); const out = try std.fmt.bufPrint(&buf, "{f}", .{Time{ .hour = 23, .min = 59, .sec = 59, .micros = 0 }});
try testing.expectString("23:59:59", out); try testing.expectString("23:59:59", out);
} }
{ {
var buf: [20]u8 = undefined; var buf: [20]u8 = undefined;
const out = try std.fmt.bufPrint(&buf, "{s}", .{Time{ .hour = 8, .min = 9, .sec = 10, .micros = 12 }}); const out = try std.fmt.bufPrint(&buf, "{f}", .{Time{ .hour = 8, .min = 9, .sec = 10, .micros = 12 }});
try testing.expectString("08:09:10.000012", out); try testing.expectString("08:09:10.000012", out);
} }
{ {
var buf: [20]u8 = undefined; var buf: [20]u8 = undefined;
const out = try std.fmt.bufPrint(&buf, "{s}", .{Time{ .hour = 8, .min = 9, .sec = 10, .micros = 123 }}); const out = try std.fmt.bufPrint(&buf, "{f}", .{Time{ .hour = 8, .min = 9, .sec = 10, .micros = 123 }});
try testing.expectString("08:09:10.000123", out); try testing.expectString("08:09:10.000123", out);
} }
{ {
var buf: [20]u8 = undefined; var buf: [20]u8 = undefined;
const out = try std.fmt.bufPrint(&buf, "{s}", .{Time{ .hour = 8, .min = 9, .sec = 10, .micros = 1234 }}); const out = try std.fmt.bufPrint(&buf, "{f}", .{Time{ .hour = 8, .min = 9, .sec = 10, .micros = 1234 }});
try testing.expectString("08:09:10.001234", out); try testing.expectString("08:09:10.001234", out);
} }
{ {
var buf: [20]u8 = undefined; var buf: [20]u8 = undefined;
const out = try std.fmt.bufPrint(&buf, "{s}", .{Time{ .hour = 8, .min = 9, .sec = 10, .micros = 12345 }}); const out = try std.fmt.bufPrint(&buf, "{f}", .{Time{ .hour = 8, .min = 9, .sec = 10, .micros = 12345 }});
try testing.expectString("08:09:10.012345", out); try testing.expectString("08:09:10.012345", out);
} }
{ {
var buf: [20]u8 = undefined; var buf: [20]u8 = undefined;
const out = try std.fmt.bufPrint(&buf, "{s}", .{Time{ .hour = 8, .min = 9, .sec = 10, .micros = 123456 }}); const out = try std.fmt.bufPrint(&buf, "{f}", .{Time{ .hour = 8, .min = 9, .sec = 10, .micros = 123456 }});
try testing.expectString("08:09:10.123456", out); try testing.expectString("08:09:10.123456", out);
} }
} }
@@ -1625,7 +1625,7 @@ test "DateTime: json" {
{ {
// DateTime, time no fraction // DateTime, time no fraction
const dt = try DateTime.parse("2023-09-22T23:59:02Z", .rfc3339); const dt = try DateTime.parse("2023-09-22T23:59:02Z", .rfc3339);
const out = try std.json.stringifyAlloc(testing.allocator, dt, .{}); const out = try std.json.Stringify.valueAlloc(testing.allocator, dt, .{});
defer testing.allocator.free(out); defer testing.allocator.free(out);
try testing.expectString("\"2023-09-22T23:59:02Z\"", out); try testing.expectString("\"2023-09-22T23:59:02Z\"", out);
} }
@@ -1633,7 +1633,7 @@ test "DateTime: json" {
{ {
// time, milliseconds only // time, milliseconds only
const dt = try DateTime.parse("2023-09-22T07:09:32.202Z", .rfc3339); const dt = try DateTime.parse("2023-09-22T07:09:32.202Z", .rfc3339);
const out = try std.json.stringifyAlloc(testing.allocator, dt, .{}); const out = try std.json.Stringify.valueAlloc(testing.allocator, dt, .{});
defer testing.allocator.free(out); defer testing.allocator.free(out);
try testing.expectString("\"2023-09-22T07:09:32.202Z\"", out); try testing.expectString("\"2023-09-22T07:09:32.202Z\"", out);
} }
@@ -1641,7 +1641,7 @@ test "DateTime: json" {
{ {
// time, micros // time, micros
const dt = try DateTime.parse("-0004-12-03T01:02:03.123456Z", .rfc3339); const dt = try DateTime.parse("-0004-12-03T01:02:03.123456Z", .rfc3339);
const out = try std.json.stringifyAlloc(testing.allocator, dt, .{}); const out = try std.json.Stringify.valueAlloc(testing.allocator, dt, .{});
defer testing.allocator.free(out); defer testing.allocator.free(out);
try testing.expectString("\"-0004-12-03T01:02:03.123456Z\"", out); try testing.expectString("\"-0004-12-03T01:02:03.123456Z\"", out);
} }
@@ -1657,37 +1657,37 @@ test "DateTime: json" {
test "DateTime: format" { test "DateTime: format" {
{ {
var buf: [30]u8 = undefined; var buf: [30]u8 = undefined;
const out = try std.fmt.bufPrint(&buf, "{s}", .{try DateTime.initUTC(2023, 5, 22, 23, 59, 59, 0)}); const out = try std.fmt.bufPrint(&buf, "{f}", .{try DateTime.initUTC(2023, 5, 22, 23, 59, 59, 0)});
try testing.expectString("2023-05-22T23:59:59Z", out); try testing.expectString("2023-05-22T23:59:59Z", out);
} }
{ {
var buf: [30]u8 = undefined; var buf: [30]u8 = undefined;
const out = try std.fmt.bufPrint(&buf, "{s}", .{try DateTime.initUTC(2023, 5, 22, 8, 9, 10, 12)}); const out = try std.fmt.bufPrint(&buf, "{f}", .{try DateTime.initUTC(2023, 5, 22, 8, 9, 10, 12)});
try testing.expectString("2023-05-22T08:09:10.000012Z", out); try testing.expectString("2023-05-22T08:09:10.000012Z", out);
} }
{ {
var buf: [30]u8 = undefined; var buf: [30]u8 = undefined;
const out = try std.fmt.bufPrint(&buf, "{s}", .{try DateTime.initUTC(2023, 5, 22, 8, 9, 10, 123)}); const out = try std.fmt.bufPrint(&buf, "{f}", .{try DateTime.initUTC(2023, 5, 22, 8, 9, 10, 123)});
try testing.expectString("2023-05-22T08:09:10.000123Z", out); try testing.expectString("2023-05-22T08:09:10.000123Z", out);
} }
{ {
var buf: [30]u8 = undefined; var buf: [30]u8 = undefined;
const out = try std.fmt.bufPrint(&buf, "{s}", .{try DateTime.initUTC(2023, 5, 22, 8, 9, 10, 1234)}); const out = try std.fmt.bufPrint(&buf, "{f}", .{try DateTime.initUTC(2023, 5, 22, 8, 9, 10, 1234)});
try testing.expectString("2023-05-22T08:09:10.001234Z", out); try testing.expectString("2023-05-22T08:09:10.001234Z", out);
} }
{ {
var buf: [30]u8 = undefined; var buf: [30]u8 = undefined;
const out = try std.fmt.bufPrint(&buf, "{s}", .{try DateTime.initUTC(-102, 12, 9, 8, 9, 10, 12345)}); const out = try std.fmt.bufPrint(&buf, "{f}", .{try DateTime.initUTC(-102, 12, 9, 8, 9, 10, 12345)});
try testing.expectString("-0102-12-09T08:09:10.012345Z", out); try testing.expectString("-0102-12-09T08:09:10.012345Z", out);
} }
{ {
var buf: [30]u8 = undefined; var buf: [30]u8 = undefined;
const out = try std.fmt.bufPrint(&buf, "{s}", .{try DateTime.initUTC(-102, 12, 9, 8, 9, 10, 123456)}); const out = try std.fmt.bufPrint(&buf, "{f}", .{try DateTime.initUTC(-102, 12, 9, 8, 9, 10, 123456)});
try testing.expectString("-0102-12-09T08:09:10.123456Z", out); try testing.expectString("-0102-12-09T08:09:10.123456Z", out);
} }
} }
@@ -2075,7 +2075,7 @@ test "DateTime: sub" {
fn expectDateTime(expected: []const u8, dt: DateTime) !void { fn expectDateTime(expected: []const u8, dt: DateTime) !void {
var buf: [30]u8 = undefined; var buf: [30]u8 = undefined;
const actual = try std.fmt.bufPrint(&buf, "{s}", .{dt}); const actual = try std.fmt.bufPrint(&buf, "{f}", .{dt});
try testing.expectString(expected, actual); try testing.expectString(expected, actual);
} }

View File

@@ -69,9 +69,6 @@ next_request_id: u64 = 0,
// When handles has no more available easys, requests get queued. // When handles has no more available easys, requests get queued.
queue: TransferQueue, queue: TransferQueue,
// Memory pool for Queue nodes.
queue_node_pool: std.heap.MemoryPool(TransferQueue.Node),
// The main app allocator // The main app allocator
allocator: Allocator, allocator: Allocator,
@@ -90,15 +87,12 @@ notification: ?*Notification = null,
// restoring, this originally-configured value is what it goes to. // restoring, this originally-configured value is what it goes to.
http_proxy: ?[:0]const u8 = null, http_proxy: ?[:0]const u8 = null,
const TransferQueue = std.DoublyLinkedList(*Transfer); const TransferQueue = std.DoublyLinkedList;
pub fn init(allocator: Allocator, ca_blob: ?c.curl_blob, opts: Http.Opts) !*Client { pub fn init(allocator: Allocator, ca_blob: ?c.curl_blob, opts: Http.Opts) !*Client {
var transfer_pool = std.heap.MemoryPool(Transfer).init(allocator); var transfer_pool = std.heap.MemoryPool(Transfer).init(allocator);
errdefer transfer_pool.deinit(); errdefer transfer_pool.deinit();
var queue_node_pool = std.heap.MemoryPool(TransferQueue.Node).init(allocator);
errdefer queue_node_pool.deinit();
const client = try allocator.create(Client); const client = try allocator.create(Client);
errdefer allocator.destroy(client); errdefer allocator.destroy(client);
@@ -122,7 +116,6 @@ pub fn init(allocator: Allocator, ca_blob: ?c.curl_blob, opts: Http.Opts) !*Clie
.allocator = allocator, .allocator = allocator,
.http_proxy = opts.http_proxy, .http_proxy = opts.http_proxy,
.transfer_pool = transfer_pool, .transfer_pool = transfer_pool,
.queue_node_pool = queue_node_pool,
}; };
return client; return client;
@@ -136,13 +129,13 @@ pub fn deinit(self: *Client) void {
_ = c.curl_multi_cleanup(self.multi); _ = c.curl_multi_cleanup(self.multi);
self.transfer_pool.deinit(); self.transfer_pool.deinit();
self.queue_node_pool.deinit();
self.allocator.destroy(self); self.allocator.destroy(self);
} }
pub fn abort(self: *Client) void { pub fn abort(self: *Client) void {
while (self.handles.in_use.first) |node| { while (self.handles.in_use.first) |node| {
var transfer = Transfer.fromEasy(node.data.conn.easy) catch |err| { const handle: *Handle = @fieldParentPtr("node", node);
var transfer = Transfer.fromEasy(handle.conn.easy) catch |err| {
log.err(.http, "get private info", .{ .err = err, .source = "abort" }); log.err(.http, "get private info", .{ .err = err, .source = "abort" });
continue; continue;
}; };
@@ -152,15 +145,16 @@ pub fn abort(self: *Client) void {
var n = self.queue.first; var n = self.queue.first;
while (n) |node| { while (n) |node| {
const transfer: *Transfer = @fieldParentPtr("_node", node);
self.transfer_pool.destroy(transfer);
n = node.next; n = node.next;
self.queue_node_pool.destroy(node);
} }
self.queue = .{}; self.queue = .{};
// Maybe a bit of overkill // Maybe a bit of overkill
// We can remove some (all?) of these once we're confident its right. // We can remove some (all?) of these once we're confident its right.
std.debug.assert(self.handles.in_use.first == null); std.debug.assert(self.handles.in_use.first == null);
std.debug.assert(self.handles.available.len == self.handles.handles.len); std.debug.assert(self.handles.available.len() == self.handles.handles.len);
if (builtin.mode == .Debug) { if (builtin.mode == .Debug) {
var running: c_int = undefined; var running: c_int = undefined;
std.debug.assert(c.curl_multi_perform(self.multi, &running) == c.CURLE_OK); std.debug.assert(c.curl_multi_perform(self.multi, &running) == c.CURLE_OK);
@@ -178,12 +172,11 @@ pub fn tick(self: *Client, opts: TickOpts) !bool {
break; break;
} }
const queue_node = self.queue.popFirst() orelse break; const queue_node = self.queue.popFirst() orelse break;
const req = queue_node.data; const transfer: *Transfer = @fieldParentPtr("_node", queue_node);
self.queue_node_pool.destroy(queue_node);
// we know this exists, because we checked isEmpty() above // we know this exists, because we checked isEmpty() above
const handle = self.handles.getFreeHandle().?; const handle = self.handles.getFreeHandle().?;
try self.makeRequest(handle, req); try self.makeRequest(handle, transfer);
} }
return self.perform(opts.timeout_ms, opts.poll_socket); return self.perform(opts.timeout_ms, opts.poll_socket);
} }
@@ -213,9 +206,7 @@ pub fn process(self: *Client, transfer: *Transfer) !void {
return self.makeRequest(handle, transfer); return self.makeRequest(handle, transfer);
} }
const node = try self.queue_node_pool.create(); self.queue.append(&transfer._node);
node.data = transfer;
self.queue.append(node);
} }
// See ScriptManager.blockingGet // See ScriptManager.blockingGet
@@ -442,7 +433,7 @@ fn endTransfer(self: *Client, transfer: *Transfer) void {
log.fatal(.http, "Failed to remove handle", .{ .err = err }); log.fatal(.http, "Failed to remove handle", .{ .err = err });
}; };
self.handles.release(handle); self.handles.release(self, handle);
transfer._handle = null; transfer._handle = null;
self.active -= 1; self.active -= 1;
} }
@@ -458,7 +449,7 @@ const Handles = struct {
in_use: HandleList, in_use: HandleList,
available: HandleList, available: HandleList,
const HandleList = std.DoublyLinkedList(*Handle); const HandleList = std.DoublyLinkedList;
// pointer to opts is not stable, don't hold a reference to it! // pointer to opts is not stable, don't hold a reference to it!
fn init(allocator: Allocator, client: *Client, ca_blob: ?c.curl_blob, opts: *const Http.Opts) !Handles { fn init(allocator: Allocator, client: *Client, ca_blob: ?c.curl_blob, opts: *const Http.Opts) !Handles {
@@ -470,8 +461,7 @@ const Handles = struct {
var available: HandleList = .{}; var available: HandleList = .{};
for (0..count) |i| { for (0..count) |i| {
handles[i] = try Handle.init(client, ca_blob, opts); handles[i] = try Handle.init(client, ca_blob, opts);
handles[i].node = .{ .data = &handles[i] }; available.append(&handles[i].node);
available.append(&handles[i].node.?);
} }
return .{ return .{
@@ -497,16 +487,19 @@ const Handles = struct {
node.prev = null; node.prev = null;
node.next = null; node.next = null;
self.in_use.append(node); self.in_use.append(node);
return node.data; return @as(*Handle, @fieldParentPtr("node", node));
} }
return null; return null;
} }
fn release(self: *Handles, handle: *Handle) void { fn release(self: *Handles, client: *Client, handle: *Handle) void {
// client.blocking is a handle without a node, it doesn't exist in if (handle == &client.blocking) {
// either the in_use or available lists. // the handle we've reserved for blocking request doesn't participate
const node = &(handle.node orelse return); // int he in_use/available pools
return;
}
var node = &handle.node;
self.in_use.remove(node); self.in_use.remove(node);
node.prev = null; node.prev = null;
node.next = null; node.next = null;
@@ -518,7 +511,7 @@ const Handles = struct {
const Handle = struct { const Handle = struct {
client: *Client, client: *Client,
conn: Http.Connection, conn: Http.Connection,
node: ?Handles.HandleList.Node, node: Handles.HandleList.Node,
// pointer to opts is not stable, don't hold a reference to it! // pointer to opts is not stable, don't hold a reference to it!
fn init(client: *Client, ca_blob: ?c.curl_blob, opts: *const Http.Opts) !Handle { fn init(client: *Client, ca_blob: ?c.curl_blob, opts: *const Http.Opts) !Handle {
@@ -534,8 +527,8 @@ const Handle = struct {
try errorCheck(c.curl_easy_setopt(easy, c.CURLOPT_WRITEFUNCTION, Transfer.dataCallback)); try errorCheck(c.curl_easy_setopt(easy, c.CURLOPT_WRITEFUNCTION, Transfer.dataCallback));
return .{ return .{
.node = .{},
.conn = conn, .conn = conn,
.node = null,
.client = client, .client = client,
}; };
} }
@@ -664,6 +657,9 @@ pub const Transfer = struct {
// incremented by reset func. // incremented by reset func.
_tries: u8 = 0, _tries: u8 = 0,
// for when a Transfer is queued in the client.queue
_node: std.DoublyLinkedList.Node = .{},
pub fn reset(self: *Transfer) void { pub fn reset(self: *Transfer) void {
self._redirecting = false; self._redirecting = false;
self._auth_challenge = null; self._auth_challenge = null;
@@ -678,7 +674,7 @@ pub const Transfer = struct {
fn deinit(self: *Transfer) void { fn deinit(self: *Transfer) void {
self.req.headers.deinit(); self.req.headers.deinit();
if (self._handle) |handle| { if (self._handle) |handle| {
self.client.handles.release(handle); self.client.handles.release(self.client, handle);
} }
self.arena.deinit(); self.arena.deinit();
self.client.transfer_pool.destroy(self); self.client.transfer_pool.destroy(self);
@@ -711,7 +707,7 @@ pub const Transfer = struct {
} }
} }
pub fn format(self: *const Transfer, comptime _: []const u8, _: std.fmt.FormatOptions, writer: anytype) !void { pub fn format(self: *Transfer, writer: *std.Io.Writer) !void {
const req = self.req; const req = self.req;
return writer.print("{s} {s}", .{ @tagName(req.method), req.url }); return writer.print("{s} {s}", .{ @tagName(req.method), req.url });
} }
@@ -850,7 +846,7 @@ pub const Transfer = struct {
// libcurl should only ever emit 1 header at a time // libcurl should only ever emit 1 header at a time
std.debug.assert(header_count == 1); std.debug.assert(header_count == 1);
const easy: *c.CURL = @alignCast(@ptrCast(data)); const easy: *c.CURL = @ptrCast(@alignCast(data));
var transfer = fromEasy(easy) catch |err| { var transfer = fromEasy(easy) catch |err| {
log.err(.http, "get private info", .{ .err = err, .source = "header callback" }); log.err(.http, "get private info", .{ .err = err, .source = "header callback" });
return 0; return 0;
@@ -948,7 +944,7 @@ pub const Transfer = struct {
// libcurl should only ever emit 1 chunk at a time // libcurl should only ever emit 1 chunk at a time
std.debug.assert(chunk_count == 1); std.debug.assert(chunk_count == 1);
const easy: *c.CURL = @alignCast(@ptrCast(data)); const easy: *c.CURL = @ptrCast(@alignCast(data));
var transfer = fromEasy(easy) catch |err| { var transfer = fromEasy(easy) catch |err| {
log.err(.http, "get private info", .{ .err = err, .source = "body callback" }); log.err(.http, "get private info", .{ .err = err, .source = "body callback" });
return c.CURL_WRITEFUNC_ERROR; return c.CURL_WRITEFUNC_ERROR;
@@ -999,7 +995,7 @@ pub const Transfer = struct {
pub fn fromEasy(easy: *c.CURL) !*Transfer { pub fn fromEasy(easy: *c.CURL) !*Transfer {
var private: *anyopaque = undefined; var private: *anyopaque = undefined;
try errorCheck(c.curl_easy_getinfo(easy, c.CURLINFO_PRIVATE, &private)); try errorCheck(c.curl_easy_getinfo(easy, c.CURLINFO_PRIVATE, &private));
return @alignCast(@ptrCast(private)); return @ptrCast(@alignCast(private));
} }
pub fn fulfill(transfer: *Transfer, status: u16, headers: []const Http.Header, body: ?[]const u8) !void { pub fn fulfill(transfer: *Transfer, status: u16, headers: []const Http.Header, body: ?[]const u8) !void {

View File

@@ -58,11 +58,7 @@ pub fn init(allocator: Allocator, opts: Opts) !Http {
var adjusted_opts = opts; var adjusted_opts = opts;
if (opts.proxy_bearer_token) |bt| { if (opts.proxy_bearer_token) |bt| {
adjusted_opts.proxy_bearer_token = try std.fmt.allocPrintZ( adjusted_opts.proxy_bearer_token = try std.fmt.allocPrintSentinel(arena.allocator(), "Proxy-Authorization: Bearer {s}", .{bt}, 0);
arena.allocator(),
"Proxy-Authorization: Bearer {s}",
.{bt},
);
} }
var ca_blob: ?c.curl_blob = null; var ca_blob: ?c.curl_blob = null;

View File

@@ -45,7 +45,7 @@ pub fn Incrementing(comptime T: type, comptime prefix: []const u8) type {
const n = counter +% 1; const n = counter +% 1;
defer self.counter = n; defer self.counter = n;
const size = std.fmt.formatIntBuf(self.buffer[NUMERIC_START..], n, 10, .lower, .{}); const size = std.fmt.printInt(self.buffer[NUMERIC_START..], n, 10, .lower, .{});
return self.buffer[0 .. NUMERIC_START + size]; return self.buffer[0 .. NUMERIC_START + size];
} }

View File

@@ -121,12 +121,16 @@ pub fn log(comptime scope: Scope, level: Level, comptime msg: []const u8, data:
std.debug.lockStdErr(); std.debug.lockStdErr();
defer std.debug.unlockStdErr(); defer std.debug.unlockStdErr();
logTo(scope, level, msg, data, std.io.getStdErr().writer()) catch |log_err| { var buf: [4096]u8 = undefined;
var stderr = std.fs.File.stderr();
var writer = stderr.writer(&buf);
logTo(scope, level, msg, data, &writer.interface) catch |log_err| {
std.debug.print("$time={d} $level=fatal $scope={s} $msg=\"log err\" err={s} log_msg=\"{s}\"", .{ timestamp(), @errorName(log_err), @tagName(scope), msg }); std.debug.print("$time={d} $level=fatal $scope={s} $msg=\"log err\" err={s} log_msg=\"{s}\"", .{ timestamp(), @errorName(log_err), @tagName(scope), msg });
}; };
} }
fn logTo(comptime scope: Scope, level: Level, comptime msg: []const u8, data: anytype, out: anytype) !void { fn logTo(comptime scope: Scope, level: Level, comptime msg: []const u8, data: anytype, out: *std.Io.Writer) !void {
comptime { comptime {
if (msg.len > 30) { if (msg.len > 30) {
@compileError("log msg cannot be more than 30 characters: '" ++ msg ++ "'"); @compileError("log msg cannot be more than 30 characters: '" ++ msg ++ "'");
@@ -139,12 +143,11 @@ fn logTo(comptime scope: Scope, level: Level, comptime msg: []const u8, data: an
} }
} }
var bw = std.io.bufferedWriter(out);
switch (opts.format) { switch (opts.format) {
.logfmt => try logLogfmt(scope, level, msg, data, bw.writer()), .logfmt => try logLogfmt(scope, level, msg, data, out),
.pretty => try logPretty(scope, level, msg, data, bw.writer()), .pretty => try logPretty(scope, level, msg, data, out),
} }
bw.flush() catch return; out.flush() catch return;
} }
fn logLogfmt(comptime scope: Scope, level: Level, comptime msg: []const u8, data: anytype, writer: anytype) !void { fn logLogfmt(comptime scope: Scope, level: Level, comptime msg: []const u8, data: anytype, writer: anytype) !void {
@@ -223,6 +226,10 @@ fn logPrettyPrefix(comptime scope: Scope, level: Level, comptime msg: []const u8
pub fn writeValue(comptime format: Format, value: anytype, writer: anytype) !void { pub fn writeValue(comptime format: Format, value: anytype, writer: anytype) !void {
const T = @TypeOf(value); const T = @TypeOf(value);
if (std.meta.hasMethod(T, "format")) {
return writer.print("{f}", .{value});
}
switch (@typeInfo(T)) { switch (@typeInfo(T)) {
.optional => { .optional => {
if (value) |v| { if (value) |v| {
@@ -248,7 +255,7 @@ pub fn writeValue(comptime format: Format, value: anytype, writer: anytype) !voi
.array => |arr| if (arr.child == u8) { .array => |arr| if (arr.child == u8) {
return writeString(format, value, writer); return writeString(format, value, writer);
}, },
else => return writer.print("{}", .{value}), else => return writer.print("{f}", .{value}),
}, },
else => {}, else => {},
}, },
@@ -341,16 +348,16 @@ fn elapsed() struct { time: f64, unit: []const u8 } {
const testing = @import("testing.zig"); const testing = @import("testing.zig");
test "log: data" { test "log: data" {
var buf: std.ArrayListUnmanaged(u8) = .{}; var aw = std.Io.Writer.Allocating.init(testing.allocator);
defer buf.deinit(testing.allocator); defer aw.deinit();
{ {
try logTo(.browser, .err, "nope", .{}, buf.writer(testing.allocator)); try logTo(.browser, .err, "nope", .{}, &aw.writer);
try testing.expectEqual("$time=1739795092929 $scope=browser $level=error $msg=nope\n", buf.items); try testing.expectEqual("$time=1739795092929 $scope=browser $level=error $msg=nope\n", aw.written());
} }
{ {
buf.clearRetainingCapacity(); aw.clearRetainingCapacity();
const string = try testing.allocator.dupe(u8, "spice_must_flow"); const string = try testing.allocator.dupe(u8, "spice_must_flow");
defer testing.allocator.free(string); defer testing.allocator.free(string);
@@ -367,28 +374,28 @@ test "log: data" {
.slice = string, .slice = string,
.err = error.Nope, .err = error.Nope,
.level = Level.warn, .level = Level.warn,
}, buf.writer(testing.allocator)); }, &aw.writer);
try testing.expectEqual("$time=1739795092929 $scope=http $level=warn $msg=\"a msg\" " ++ try testing.expectEqual("$time=1739795092929 $scope=http $level=warn $msg=\"a msg\" " ++
"cint=5 cfloat=3.43 int=-49 float=0.0003232 bt=true bf=false " ++ "cint=5 cfloat=3.43 int=-49 float=0.0003232 bt=true bf=false " ++
"nn=33 n=null lit=over9000! slice=spice_must_flow " ++ "nn=33 n=null lit=over9000! slice=spice_must_flow " ++
"err=Nope level=warn\n", buf.items); "err=Nope level=warn\n", aw.written());
} }
} }
test "log: string escape" { test "log: string escape" {
var buf: std.ArrayListUnmanaged(u8) = .{}; var aw = std.Io.Writer.Allocating.init(testing.allocator);
defer buf.deinit(testing.allocator); defer aw.deinit();
const prefix = "$time=1739795092929 $scope=app $level=error $msg=test "; const prefix = "$time=1739795092929 $scope=app $level=error $msg=test ";
{ {
try logTo(.app, .err, "test", .{ .string = "hello world" }, buf.writer(testing.allocator)); try logTo(.app, .err, "test", .{ .string = "hello world" }, &aw.writer);
try testing.expectEqual(prefix ++ "string=\"hello world\"\n", buf.items); try testing.expectEqual(prefix ++ "string=\"hello world\"\n", aw.written());
} }
{ {
buf.clearRetainingCapacity(); aw.clearRetainingCapacity();
try logTo(.app, .err, "test", .{ .string = "\n \thi \" \" " }, buf.writer(testing.allocator)); try logTo(.app, .err, "test", .{ .string = "\n \thi \" \" " }, &aw.writer);
try testing.expectEqual(prefix ++ "string=\"\\n \thi \\\" \\\" \"\n", buf.items); try testing.expectEqual(prefix ++ "string=\"\\n \thi \\\" \\\" \"\n", aw.written());
} }
} }

View File

@@ -170,11 +170,14 @@ fn run(alloc: Allocator) !void {
// dump // dump
if (opts.dump) { if (opts.dump) {
var stdout = std.fs.File.stdout();
var writer = stdout.writer(&.{});
try page.dump(.{ try page.dump(.{
.page = page, .page = page,
.with_base = opts.withbase, .with_base = opts.withbase,
.exclude_scripts = opts.noscript, .exclude_scripts = opts.noscript,
}, std.io.getStdOut()); }, &writer.interface);
try writer.interface.flush();
} }
}, },
else => unreachable, else => unreachable,
@@ -756,11 +759,14 @@ fn serveHTTP(wg: *std.Thread.WaitGroup) !void {
wg.finish(); wg.finish();
var read_buffer: [1024]u8 = undefined; var buf: [1024]u8 = undefined;
while (true) { while (true) {
var conn = try listener.accept(); var conn = try listener.accept();
defer conn.stream.close(); defer conn.stream.close();
var http_server = std.http.Server.init(conn, &read_buffer); var conn_reader = conn.stream.reader(&buf);
var conn_writer = conn.stream.writer(&buf);
var http_server = std.http.Server.init(conn_reader.interface(), &conn_writer.interface);
var request = http_server.receiveHead() catch |err| switch (err) { var request = http_server.receiveHead() catch |err| switch (err) {
error.HttpConnectionClosing => continue, error.HttpConnectionClosing => continue,

View File

@@ -192,7 +192,7 @@ const Writer = struct {
fail_count: usize = 0, fail_count: usize = 0,
case_pass_count: usize = 0, case_pass_count: usize = 0,
case_fail_count: usize = 0, case_fail_count: usize = 0,
out: std.fs.File.Writer, writer: std.fs.File.Writer,
cases: std.ArrayListUnmanaged(Case) = .{}, cases: std.ArrayListUnmanaged(Case) = .{},
const Format = enum { const Format = enum {
@@ -202,28 +202,31 @@ const Writer = struct {
}; };
fn init(arena: Allocator, format: Format) !Writer { fn init(arena: Allocator, format: Format) !Writer {
const out = std.io.getStdOut().writer(); const out = std.fs.File.stdout();
var writer = out.writer(&.{});
if (format == .json) { if (format == .json) {
try out.writeByte('['); try writer.interface.writeByte('[');
} }
return .{ return .{
.out = out,
.arena = arena, .arena = arena,
.format = format, .format = format,
.writer = writer,
}; };
} }
fn finalize(self: *Writer) !void { fn finalize(self: *Writer) !void {
var writer = &self.writer.interface;
if (self.format == .json) { if (self.format == .json) {
// When we write a test output, we add a trailing comma to act as // When we write a test output, we add a trailing comma to act as
// a separator for the next test. We need to add this dummy entry // a separator for the next test. We need to add this dummy entry
// to make it valid json. // to make it valid json.
// Better option could be to change the formatter to work on JSONL: // Better option could be to change the formatter to work on JSONL:
// https://github.com/lightpanda-io/perf-fmt/blob/main/wpt/wpt.go // https://github.com/lightpanda-io/perf-fmt/blob/main/wpt/wpt.go
try self.out.writeAll("{\"name\":\"empty\",\"pass\": true, \"cases\": []}]"); try writer.writeAll("{\"name\":\"empty\",\"pass\": true, \"cases\": []}]");
} else { } else {
try self.out.print("\n==Summary==\nTests: {d}/{d}\nCases: {d}/{d}\n", .{ try writer.print("\n==Summary==\nTests: {d}/{d}\nCases: {d}/{d}\n", .{
self.pass_count, self.pass_count,
self.pass_count + self.fail_count, self.pass_count + self.fail_count,
self.case_pass_count, self.case_pass_count,
@@ -233,18 +236,19 @@ const Writer = struct {
} }
fn process(self: *Writer, test_file: []const u8, result_: ?[]const u8, err_: ?[]const u8) !void { fn process(self: *Writer, test_file: []const u8, result_: ?[]const u8, err_: ?[]const u8) !void {
var writer = &self.writer.interface;
if (err_) |err| { if (err_) |err| {
self.fail_count += 1; self.fail_count += 1;
switch (self.format) { switch (self.format) {
.text => return self.out.print("Fail\t{s}\n\t{s}\n", .{ test_file, err }), .text => return writer.print("Fail\t{s}\n\t{s}\n", .{ test_file, err }),
.summary => return self.out.print("Fail 0/0\t{s}\n", .{test_file}), .summary => return writer.print("Fail 0/0\t{s}\n", .{test_file}),
.json => { .json => {
try std.json.stringify(Test{ try std.json.Stringify.value(Test{
.pass = false, .pass = false,
.name = test_file, .name = test_file,
.cases = &.{}, .cases = &.{},
}, .{ .whitespace = .indent_2 }, self.out); }, .{ .whitespace = .indent_2 }, writer);
return self.out.writeByte(','); return writer.writeByte(',');
}, },
} }
// just make sure we didn't fall through by mistake // just make sure we didn't fall through by mistake
@@ -316,24 +320,24 @@ const Writer = struct {
self.case_fail_count += case_fail_count; self.case_fail_count += case_fail_count;
switch (self.format) { switch (self.format) {
.summary => try self.out.print("{s} {d}/{d}\t{s}\n", .{ statusText(pass), case_pass_count, case_pass_count + case_fail_count, test_file }), .summary => try writer.print("{s} {d}/{d}\t{s}\n", .{ statusText(pass), case_pass_count, case_pass_count + case_fail_count, test_file }),
.text => { .text => {
try self.out.print("{s}\t{s}\n", .{ statusText(pass), test_file }); try writer.print("{s}\t{s}\n", .{ statusText(pass), test_file });
for (cases.items) |c| { for (cases.items) |c| {
try self.out.print("\t{s}\t{s}\n", .{ statusText(c.pass), c.name }); try writer.print("\t{s}\t{s}\n", .{ statusText(c.pass), c.name });
if (c.message) |msg| { if (c.message) |msg| {
try self.out.print("\t\t{s}\n", .{msg}); try writer.print("\t\t{s}\n", .{msg});
} }
} }
}, },
.json => { .json => {
try std.json.stringify(Test{ try std.json.Stringify.value(Test{
.pass = pass, .pass = pass,
.name = test_file, .name = test_file,
.cases = cases.items, .cases = cases.items,
}, .{ .whitespace = .indent_2 }, self.out); }, .{ .whitespace = .indent_2 }, writer);
// separator, see `finalize` for the hack we use to terminate this // separator, see `finalize` for the hack we use to terminate this
try self.out.writeByte(','); try writer.writeByte(',');
}, },
} }
} }
@@ -362,14 +366,14 @@ fn parseArgs(arena: Allocator) !Command {
var args = try std.process.argsWithAllocator(arena); var args = try std.process.argsWithAllocator(arena);
// get the exec name. // get the exec name.
const execname = args.next().?; const exec_name = args.next().?;
var format = Writer.Format.text; var format = Writer.Format.text;
var filters: std.ArrayListUnmanaged([]const u8) = .{}; var filters: std.ArrayListUnmanaged([]const u8) = .{};
while (args.next()) |arg| { while (args.next()) |arg| {
if (std.mem.eql(u8, "-h", arg) or std.mem.eql(u8, "--help", arg)) { if (std.mem.eql(u8, "-h", arg) or std.mem.eql(u8, "--help", arg)) {
try std.io.getStdErr().writer().print(usage, .{execname}); std.debug.print(usage, .{exec_name});
std.posix.exit(0); std.posix.exit(0);
} }

View File

@@ -8,8 +8,7 @@ const Transfer = @import("http/Client.zig").Transfer;
const Allocator = std.mem.Allocator; const Allocator = std.mem.Allocator;
const List = std.DoublyLinkedList(Listener); const List = std.DoublyLinkedList;
const Node = List.Node;
// Allows code to register for and emit events. // Allows code to register for and emit events.
// Keeps two lists // Keeps two lists
@@ -48,12 +47,12 @@ pub const Notification = struct {
event_listeners: EventListeners, event_listeners: EventListeners,
// list of listeners for a specified receiver // list of listeners for a specified receiver
// @intFromPtr(listener) -> [@intFromPtr(listener1), @intFromPtr(listener2, ...] // @intFromPtr(receiver) -> [listener1, listener2, ...]
// Used when `unregisterAll` is called. // Used when `unregisterAll` is called.
listeners: std.AutoHashMapUnmanaged(usize, std.ArrayListUnmanaged(*Node)), listeners: std.AutoHashMapUnmanaged(usize, std.ArrayListUnmanaged(*Listener)),
allocator: Allocator, allocator: Allocator,
node_pool: std.heap.MemoryPool(Node), mem_pool: std.heap.MemoryPool(Listener),
const EventListeners = struct { const EventListeners = struct {
page_remove: List = .{}, page_remove: List = .{},
@@ -143,7 +142,7 @@ pub const Notification = struct {
.listeners = .{}, .listeners = .{},
.event_listeners = .{}, .event_listeners = .{},
.allocator = allocator, .allocator = allocator,
.node_pool = std.heap.MemoryPool(Node).init(allocator), .mem_pool = std.heap.MemoryPool(Listener).init(allocator),
}; };
if (parent) |pn| { if (parent) |pn| {
@@ -161,21 +160,22 @@ pub const Notification = struct {
listener.deinit(allocator); listener.deinit(allocator);
} }
self.listeners.deinit(allocator); self.listeners.deinit(allocator);
self.node_pool.deinit(); self.mem_pool.deinit();
allocator.destroy(self); allocator.destroy(self);
} }
pub fn register(self: *Notification, comptime event: EventType, receiver: anytype, func: EventFunc(event)) !void { pub fn register(self: *Notification, comptime event: EventType, receiver: anytype, func: EventFunc(event)) !void {
var list = &@field(self.event_listeners, @tagName(event)); var list = &@field(self.event_listeners, @tagName(event));
var node = try self.node_pool.create(); var listener = try self.mem_pool.create();
errdefer self.node_pool.destroy(node); errdefer self.mem_pool.destroy(listener);
node.data = .{ listener.* = .{
.node = .{},
.list = list, .list = list,
.func = @ptrCast(func),
.receiver = receiver, .receiver = receiver,
.event = event, .event = event,
.func = @ptrCast(func),
.struct_name = @typeName(@typeInfo(@TypeOf(receiver)).pointer.child), .struct_name = @typeName(@typeInfo(@TypeOf(receiver)).pointer.child),
}; };
@@ -184,44 +184,40 @@ pub const Notification = struct {
if (gop.found_existing == false) { if (gop.found_existing == false) {
gop.value_ptr.* = .{}; gop.value_ptr.* = .{};
} }
try gop.value_ptr.append(allocator, node); try gop.value_ptr.append(allocator, listener);
// we don't add this until we've successfully added the entry to // we don't add this until we've successfully added the entry to
// self.listeners // self.listeners
list.append(node); list.append(&listener.node);
} }
pub fn unregister(self: *Notification, comptime event: EventType, receiver: anytype) void { pub fn unregister(self: *Notification, comptime event: EventType, receiver: anytype) void {
var nodes = self.listeners.getPtr(@intFromPtr(receiver)) orelse return; var listeners = self.listeners.getPtr(@intFromPtr(receiver)) orelse return;
const node_pool = &self.node_pool;
var i: usize = 0; var i: usize = 0;
while (i < nodes.items.len) { while (i < listeners.items.len) {
const node = nodes.items[i]; const listener = listeners.items[i];
if (node.data.event != event) { if (listener.event != event) {
i += 1; i += 1;
continue; continue;
} }
node.data.list.remove(node); listener.list.remove(&listener.node);
node_pool.destroy(node); self.mem_pool.destroy(listener);
_ = nodes.swapRemove(i); _ = listeners.swapRemove(i);
} }
if (nodes.items.len == 0) { if (listeners.items.len == 0) {
nodes.deinit(self.allocator); listeners.deinit(self.allocator);
const removed = self.listeners.remove(@intFromPtr(receiver)); const removed = self.listeners.remove(@intFromPtr(receiver));
std.debug.assert(removed == true); std.debug.assert(removed == true);
} }
} }
pub fn unregisterAll(self: *Notification, receiver: *anyopaque) void { pub fn unregisterAll(self: *Notification, receiver: *anyopaque) void {
const node_pool = &self.node_pool;
var kv = self.listeners.fetchRemove(@intFromPtr(receiver)) orelse return; var kv = self.listeners.fetchRemove(@intFromPtr(receiver)) orelse return;
for (kv.value.items) |node| { for (kv.value.items) |listener| {
node.data.list.remove(node); listener.list.remove(&listener.node);
node_pool.destroy(node); self.mem_pool.destroy(listener);
} }
kv.value.deinit(self.allocator); kv.value.deinit(self.allocator);
} }
@@ -231,8 +227,8 @@ pub const Notification = struct {
var node = list.first; var node = list.first;
while (node) |n| { while (node) |n| {
const listener = n.data; const listener: *Listener = @fieldParentPtr("node", n);
const func: EventFunc(event) = @alignCast(@ptrCast(listener.func)); const func: EventFunc(event) = @ptrCast(@alignCast(listener.func));
func(listener.receiver, data) catch |err| { func(listener.receiver, data) catch |err| {
log.err(.app, "dispatch error", .{ log.err(.app, "dispatch error", .{
.err = err, .err = err,
@@ -275,6 +271,9 @@ const Listener = struct {
event: Notification.EventType, event: Notification.EventType,
// intrusive linked list node
node: List.Node,
// The event list this listener belongs to. // The event list this listener belongs to.
// We need this in order to be able to remove the node from the list // We need this in order to be able to remove the node from the list
list: *List, list: *List,
@@ -366,12 +365,12 @@ const TestClient = struct {
page_navigated: u32 = 0, page_navigated: u32 = 0,
fn pageNavigate(ptr: *anyopaque, data: *const Notification.PageNavigate) !void { fn pageNavigate(ptr: *anyopaque, data: *const Notification.PageNavigate) !void {
const self: *TestClient = @alignCast(@ptrCast(ptr)); const self: *TestClient = @ptrCast(@alignCast(ptr));
self.page_navigate += data.timestamp; self.page_navigate += data.timestamp;
} }
fn pageNavigated(ptr: *anyopaque, data: *const Notification.PageNavigated) !void { fn pageNavigated(ptr: *anyopaque, data: *const Notification.PageNavigated) !void {
const self: *TestClient = @alignCast(@ptrCast(ptr)); const self: *TestClient = @ptrCast(@alignCast(ptr));
self.page_navigated += data.timestamp; self.page_navigated += data.timestamp;
} }
}; };

View File

@@ -202,7 +202,7 @@ pub fn Env(comptime State: type, comptime WebApis: type) type {
errdefer isolate.exit(); errdefer isolate.exit();
isolate.setHostInitializeImportMetaObjectCallback(struct { isolate.setHostInitializeImportMetaObjectCallback(struct {
fn callback(c_context: ?*v8.C_Context, c_module: ?*v8.C_Module, c_meta: ?*v8.C_Value) callconv(.C) void { fn callback(c_context: ?*v8.C_Context, c_module: ?*v8.C_Module, c_meta: ?*v8.C_Value) callconv(.c) void {
const v8_context = v8.Context{ .handle = c_context.? }; const v8_context = v8.Context{ .handle = c_context.? };
const js_context: *JsContext = @ptrFromInt(v8_context.getEmbedderData(1).castTo(v8.BigInt).getUint64()); const js_context: *JsContext = @ptrFromInt(v8_context.getEmbedderData(1).castTo(v8.BigInt).getUint64());
js_context.initializeImportMeta(v8.Module{ .handle = c_module.? }, v8.Object{ .handle = c_meta.? }) catch |err| { js_context.initializeImportMeta(v8.Module{ .handle = c_module.? }, v8.Object{ .handle = c_meta.? }) catch |err| {
@@ -382,7 +382,7 @@ pub fn Env(comptime State: type, comptime WebApis: type) type {
}; };
// If necessary, turn a void context into something we can safely ptrCast // If necessary, turn a void context into something we can safely ptrCast
const safe_module_loader: *anyopaque = if (ModuleLoader == ErrorModuleLoader) @constCast(@ptrCast(&{})) else module_loader; const safe_module_loader: *anyopaque = if (ModuleLoader == ErrorModuleLoader) @ptrCast(@constCast(&{})) else module_loader;
const env = self.env; const env = self.env;
const isolate = env.isolate; const isolate = env.isolate;
@@ -1008,7 +1008,7 @@ pub fn Env(comptime State: type, comptime WebApis: type) type {
if (ptr.sentinel() == null) { if (ptr.sentinel() == null) {
if (force_u8 or js_value.isUint8Array() or js_value.isUint8ClampedArray()) { if (force_u8 or js_value.isUint8Array() or js_value.isUint8ClampedArray()) {
if (byte_len == 0) return &[_]u8{}; if (byte_len == 0) return &[_]u8{};
const arr_ptr = @as([*]u8, @alignCast(@ptrCast(data))); const arr_ptr = @as([*]u8, @ptrCast(@alignCast(data)));
return arr_ptr[0..byte_len]; return arr_ptr[0..byte_len];
} }
} }
@@ -1016,49 +1016,49 @@ pub fn Env(comptime State: type, comptime WebApis: type) type {
i8 => { i8 => {
if (js_value.isInt8Array()) { if (js_value.isInt8Array()) {
if (byte_len == 0) return &[_]i8{}; if (byte_len == 0) return &[_]i8{};
const arr_ptr = @as([*]i8, @alignCast(@ptrCast(data))); const arr_ptr = @as([*]i8, @ptrCast(@alignCast(data)));
return arr_ptr[0..byte_len]; return arr_ptr[0..byte_len];
} }
}, },
u16 => { u16 => {
if (js_value.isUint16Array()) { if (js_value.isUint16Array()) {
if (byte_len == 0) return &[_]u16{}; if (byte_len == 0) return &[_]u16{};
const arr_ptr = @as([*]u16, @alignCast(@ptrCast(data))); const arr_ptr = @as([*]u16, @ptrCast(@alignCast(data)));
return arr_ptr[0 .. byte_len / 2]; return arr_ptr[0 .. byte_len / 2];
} }
}, },
i16 => { i16 => {
if (js_value.isInt16Array()) { if (js_value.isInt16Array()) {
if (byte_len == 0) return &[_]i16{}; if (byte_len == 0) return &[_]i16{};
const arr_ptr = @as([*]i16, @alignCast(@ptrCast(data))); const arr_ptr = @as([*]i16, @ptrCast(@alignCast(data)));
return arr_ptr[0 .. byte_len / 2]; return arr_ptr[0 .. byte_len / 2];
} }
}, },
u32 => { u32 => {
if (js_value.isUint32Array()) { if (js_value.isUint32Array()) {
if (byte_len == 0) return &[_]u32{}; if (byte_len == 0) return &[_]u32{};
const arr_ptr = @as([*]u32, @alignCast(@ptrCast(data))); const arr_ptr = @as([*]u32, @ptrCast(@alignCast(data)));
return arr_ptr[0 .. byte_len / 4]; return arr_ptr[0 .. byte_len / 4];
} }
}, },
i32 => { i32 => {
if (js_value.isInt32Array()) { if (js_value.isInt32Array()) {
if (byte_len == 0) return &[_]i32{}; if (byte_len == 0) return &[_]i32{};
const arr_ptr = @as([*]i32, @alignCast(@ptrCast(data))); const arr_ptr = @as([*]i32, @ptrCast(@alignCast(data)));
return arr_ptr[0 .. byte_len / 4]; return arr_ptr[0 .. byte_len / 4];
} }
}, },
u64 => { u64 => {
if (js_value.isBigUint64Array()) { if (js_value.isBigUint64Array()) {
if (byte_len == 0) return &[_]u64{}; if (byte_len == 0) return &[_]u64{};
const arr_ptr = @as([*]u64, @alignCast(@ptrCast(data))); const arr_ptr = @as([*]u64, @ptrCast(@alignCast(data)));
return arr_ptr[0 .. byte_len / 8]; return arr_ptr[0 .. byte_len / 8];
} }
}, },
i64 => { i64 => {
if (js_value.isBigInt64Array()) { if (js_value.isBigInt64Array()) {
if (byte_len == 0) return &[_]i64{}; if (byte_len == 0) return &[_]i64{};
const arr_ptr = @as([*]i64, @alignCast(@ptrCast(data))); const arr_ptr = @as([*]i64, @ptrCast(@alignCast(data)));
return arr_ptr[0 .. byte_len / 8]; return arr_ptr[0 .. byte_len / 8];
} }
}, },
@@ -1418,7 +1418,7 @@ pub fn Env(comptime State: type, comptime WebApis: type) type {
c_specifier: ?*const v8.C_String, c_specifier: ?*const v8.C_String,
import_attributes: ?*const v8.C_FixedArray, import_attributes: ?*const v8.C_FixedArray,
c_referrer: ?*const v8.C_Module, c_referrer: ?*const v8.C_Module,
) callconv(.C) ?*const v8.C_Module { ) callconv(.c) ?*const v8.C_Module {
_ = import_attributes; _ = import_attributes;
const v8_context = v8.Context{ .handle = c_context.? }; const v8_context = v8.Context{ .handle = c_context.? };
@@ -1516,12 +1516,12 @@ pub fn Env(comptime State: type, comptime WebApis: type) type {
} }
const op = js_obj.getInternalField(0).castTo(v8.External).get(); const op = js_obj.getInternalField(0).castTo(v8.External).get();
const toa: *TaggedAnyOpaque = @alignCast(@ptrCast(op)); const toa: *TaggedAnyOpaque = @ptrCast(@alignCast(op));
const expected_type_index = @field(TYPE_LOOKUP, type_name); const expected_type_index = @field(TYPE_LOOKUP, type_name);
var type_index = toa.index; var type_index = toa.index;
if (type_index == expected_type_index) { if (type_index == expected_type_index) {
return @alignCast(@ptrCast(toa.ptr)); return @ptrCast(@alignCast(toa.ptr));
} }
const meta_lookup = self.meta_lookup; const meta_lookup = self.meta_lookup;
@@ -1882,8 +1882,9 @@ pub fn Env(comptime State: type, comptime WebApis: type) type {
_: u29 = 0, _: u29 = 0,
}; };
pub fn setIndex(self: JsObject, index: u32, value: anytype, opts: SetOpts) !void { pub fn setIndex(self: JsObject, index: u32, value: anytype, opts: SetOpts) !void {
@setEvalBranchQuota(10000);
const key = switch (index) { const key = switch (index) {
inline 0...50 => |i| std.fmt.comptimePrint("{d}", .{i}), inline 0...20 => |i| std.fmt.comptimePrint("{d}", .{i}),
else => try std.fmt.allocPrint(self.js_context.context_arena, "{d}", .{index}), else => try std.fmt.allocPrint(self.js_context.context_arena, "{d}", .{index}),
}; };
return self.set(key, value, opts); return self.set(key, value, opts);
@@ -2157,7 +2158,7 @@ pub fn Env(comptime State: type, comptime WebApis: type) type {
}; };
// If necessary, turn a void context into something we can safely ptrCast // If necessary, turn a void context into something we can safely ptrCast
const safe_context: *anyopaque = if (ContextT == void) @constCast(@ptrCast(&{})) else ctx; const safe_context: *anyopaque = if (ContextT == void) @ptrCast(@constCast(&{})) else ctx;
const channel = v8.InspectorChannel.init(safe_context, InspectorContainer.onInspectorResponse, InspectorContainer.onInspectorEvent, isolate); const channel = v8.InspectorChannel.init(safe_context, InspectorContainer.onInspectorResponse, InspectorContainer.onInspectorEvent, isolate);
@@ -2981,7 +2982,7 @@ fn Caller(comptime JsContext: type, comptime State: type) type {
// Therefore, we keep a call_depth, and only reset the call_arena // Therefore, we keep a call_depth, and only reset the call_arena
// when a top-level (call_depth == 0) function ends. // when a top-level (call_depth == 0) function ends.
if (call_depth == 0) { if (call_depth == 0) {
const arena: *ArenaAllocator = @alignCast(@ptrCast(js_context.call_arena.ptr)); const arena: *ArenaAllocator = @ptrCast(@alignCast(js_context.call_arena.ptr));
_ = arena.reset(.{ .retain_with_limit = CALL_ARENA_RETAIN }); _ = arena.reset(.{ .retain_with_limit = CALL_ARENA_RETAIN });
} }
@@ -3539,7 +3540,7 @@ fn simpleZigValueToJs(isolate: v8.Isolate, value: anytype, comptime fail: bool)
} else { } else {
const buffer_len = len * bits / 8; const buffer_len = len * bits / 8;
const backing_store = v8.BackingStore.init(isolate, buffer_len); const backing_store = v8.BackingStore.init(isolate, buffer_len);
const data: [*]u8 = @alignCast(@ptrCast(backing_store.getData())); const data: [*]u8 = @ptrCast(@alignCast(backing_store.getData()));
@memcpy(data[0..buffer_len], @as([]const u8, @ptrCast(values))[0..buffer_len]); @memcpy(data[0..buffer_len], @as([]const u8, @ptrCast(values))[0..buffer_len]);
array_buffer = v8.ArrayBuffer.initWithBackingStore(isolate, &backing_store.toSharedPtr()); array_buffer = v8.ArrayBuffer.initWithBackingStore(isolate, &backing_store.toSharedPtr());
} }
@@ -3944,7 +3945,7 @@ fn serializeFunctionArgs(arena: Allocator, isolate: v8.Isolate, context: v8.Cont
pub export fn v8_inspector__Client__IMPL__valueSubtype( pub export fn v8_inspector__Client__IMPL__valueSubtype(
_: *v8.c.InspectorClientImpl, _: *v8.c.InspectorClientImpl,
c_value: *const v8.C_Value, c_value: *const v8.C_Value,
) callconv(.C) [*c]const u8 { ) callconv(.c) [*c]const u8 {
const external_entry = getTaggedAnyOpaque(.{ .handle = c_value }) orelse return null; const external_entry = getTaggedAnyOpaque(.{ .handle = c_value }) orelse return null;
return if (external_entry.subtype) |st| @tagName(st) else null; return if (external_entry.subtype) |st| @tagName(st) else null;
} }
@@ -3957,7 +3958,7 @@ pub export fn v8_inspector__Client__IMPL__descriptionForValueSubtype(
_: *v8.c.InspectorClientImpl, _: *v8.c.InspectorClientImpl,
v8_context: *const v8.C_Context, v8_context: *const v8.C_Context,
c_value: *const v8.C_Value, c_value: *const v8.C_Value,
) callconv(.C) [*c]const u8 { ) callconv(.c) [*c]const u8 {
_ = v8_context; _ = v8_context;
// We _must_ include a non-null description in order for the subtype value // We _must_ include a non-null description in order for the subtype value
@@ -3976,7 +3977,7 @@ fn getTaggedAnyOpaque(value: v8.Value) ?*TaggedAnyOpaque {
} }
const external_data = obj.getInternalField(0).castTo(v8.External).get().?; const external_data = obj.getInternalField(0).castTo(v8.External).get().?;
return @alignCast(@ptrCast(external_data)); return @ptrCast(@alignCast(external_data));
} }
test { test {

View File

@@ -83,7 +83,7 @@ pub const Server = struct {
while (true) { while (true) {
const socket = posix.accept(listener, null, null, posix.SOCK.NONBLOCK) catch |err| { const socket = posix.accept(listener, null, null, posix.SOCK.NONBLOCK) catch |err| {
log.err(.app, "CDP accept", .{ .err = err }); log.err(.app, "CDP accept", .{ .err = err });
std.time.sleep(std.time.ns_per_s); std.Thread.sleep(std.time.ns_per_s);
continue; continue;
}; };
@@ -456,17 +456,15 @@ pub const Client = struct {
// writev, so we need to get creative. We'll JSON serialize to a // writev, so we need to get creative. We'll JSON serialize to a
// buffer, where the first 10 bytes are reserved. We can then backfill // buffer, where the first 10 bytes are reserved. We can then backfill
// the header and send the slice. // the header and send the slice.
pub fn sendJSON(self: *Client, message: anytype, opts: std.json.StringifyOptions) !void { pub fn sendJSON(self: *Client, message: anytype, opts: std.json.Stringify.Options) !void {
const allocator = self.send_arena.allocator(); const allocator = self.send_arena.allocator();
var buf: std.ArrayListUnmanaged(u8) = .{}; var aw = try std.Io.Writer.Allocating.initCapacity(allocator, 512);
try buf.ensureTotalCapacity(allocator, 512);
// reserve space for the maximum possible header // reserve space for the maximum possible header
buf.appendSliceAssumeCapacity(&.{ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }); try aw.writer.writeAll(&.{ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 });
try std.json.Stringify.value(message, opts, &aw.writer);
try std.json.stringify(message, opts, buf.writer(allocator)); const framed = fillWebsocketHeader(aw.toArrayList());
const framed = fillWebsocketHeader(buf);
return self.send(framed); return self.send(framed);
} }
@@ -844,7 +842,7 @@ fn buildJSONVersionResponse(
allocator: Allocator, allocator: Allocator,
address: net.Address, address: net.Address,
) ![]const u8 { ) ![]const u8 {
const body_format = "{{\"webSocketDebuggerUrl\": \"ws://{}/\"}}"; const body_format = "{{\"webSocketDebuggerUrl\": \"ws://{f}/\"}}";
const body_len = std.fmt.count(body_format, .{address}); const body_len = std.fmt.count(body_format, .{address});
// We send a Connection: Close (and actually close the connection) // We send a Connection: Close (and actually close the connection)

View File

@@ -79,7 +79,7 @@ fn TelemetryT(comptime P: type) type {
} }
fn onPageNavigate(ctx: *anyopaque, data: *const Notification.PageNavigate) !void { fn onPageNavigate(ctx: *anyopaque, data: *const Notification.PageNavigate) !void {
const self: *Self = @alignCast(@ptrCast(ctx)); const self: *Self = @ptrCast(@alignCast(ctx));
self.record(.{ .navigate = .{ self.record(.{ .navigate = .{
.proxy = false, .proxy = false,
.tls = std.ascii.startsWithIgnoreCase(data.url, "https://"), .tls = std.ascii.startsWithIgnoreCase(data.url, "https://"),

View File

@@ -67,13 +67,12 @@ pub fn main() !void {
var skip: usize = 0; var skip: usize = 0;
var leak: usize = 0; var leak: usize = 0;
const printer = Printer.init(); Printer.fmt("\r\x1b[0K", .{}); // beginning of line and clear to end of line
printer.fmt("\r\x1b[0K", .{}); // beginning of line and clear to end of line
for (builtin.test_functions) |t| { for (builtin.test_functions) |t| {
if (isSetup(t)) { if (isSetup(t)) {
t.func() catch |err| { t.func() catch |err| {
printer.status(.fail, "\nsetup \"{s}\" failed: {}\n", .{ t.name, err }); Printer.status(.fail, "\nsetup \"{s}\" failed: {}\n", .{ t.name, err });
return err; return err;
}; };
} }
@@ -115,7 +114,7 @@ pub fn main() !void {
if (std.testing.allocator_instance.deinit() == .leak) { if (std.testing.allocator_instance.deinit() == .leak) {
leak += 1; leak += 1;
printer.status(.fail, "\n{s}\n\"{s}\" - Memory Leak\n{s}\n", .{ BORDER, friendly_name, BORDER }); Printer.status(.fail, "\n{s}\n\"{s}\" - Memory Leak\n{s}\n", .{ BORDER, friendly_name, BORDER });
} }
if (result) |_| { if (result) |_| {
@@ -130,7 +129,7 @@ pub fn main() !void {
else => { else => {
status = .fail; status = .fail;
fail += 1; fail += 1;
printer.status(.fail, "\n{s}\n\"{s}\" - {s}\n{s}\n", .{ BORDER, friendly_name, @errorName(err), BORDER }); Printer.status(.fail, "\n{s}\n\"{s}\" - {s}\n{s}\n", .{ BORDER, friendly_name, @errorName(err), BORDER });
if (@errorReturnTrace()) |trace| { if (@errorReturnTrace()) |trace| {
std.debug.dumpStackTrace(trace.*); std.debug.dumpStackTrace(trace.*);
} }
@@ -143,9 +142,9 @@ pub fn main() !void {
if (is_unnamed_test == false) { if (is_unnamed_test == false) {
if (env.verbose) { if (env.verbose) {
const ms = @as(f64, @floatFromInt(ns_taken)) / 1_000_000.0; const ms = @as(f64, @floatFromInt(ns_taken)) / 1_000_000.0;
printer.status(status, "{s} ({d:.2}ms)\n", .{ friendly_name, ms }); Printer.status(status, "{s} ({d:.2}ms)\n", .{ friendly_name, ms });
} else { } else {
printer.status(status, ".", .{}); Printer.status(status, ".", .{});
} }
} }
} }
@@ -153,7 +152,7 @@ pub fn main() !void {
for (builtin.test_functions) |t| { for (builtin.test_functions) |t| {
if (isTeardown(t)) { if (isTeardown(t)) {
t.func() catch |err| { t.func() catch |err| {
printer.status(.fail, "\nteardown \"{s}\" failed: {}\n", .{ t.name, err }); Printer.status(.fail, "\nteardown \"{s}\" failed: {}\n", .{ t.name, err });
return err; return err;
}; };
} }
@@ -161,21 +160,23 @@ pub fn main() !void {
const total_tests = pass + fail; const total_tests = pass + fail;
const status = if (fail == 0) Status.pass else Status.fail; const status = if (fail == 0) Status.pass else Status.fail;
printer.status(status, "\n{d} of {d} test{s} passed\n", .{ pass, total_tests, if (total_tests != 1) "s" else "" }); Printer.status(status, "\n{d} of {d} test{s} passed\n", .{ pass, total_tests, if (total_tests != 1) "s" else "" });
if (skip > 0) { if (skip > 0) {
printer.status(.skip, "{d} test{s} skipped\n", .{ skip, if (skip != 1) "s" else "" }); Printer.status(.skip, "{d} test{s} skipped\n", .{ skip, if (skip != 1) "s" else "" });
} }
if (leak > 0) { if (leak > 0) {
printer.status(.fail, "{d} test{s} leaked\n", .{ leak, if (leak != 1) "s" else "" }); Printer.status(.fail, "{d} test{s} leaked\n", .{ leak, if (leak != 1) "s" else "" });
} }
printer.fmt("\n", .{}); Printer.fmt("\n", .{});
try slowest.display(printer); try slowest.display();
printer.fmt("\n", .{}); Printer.fmt("\n", .{});
// TODO: at the very least, `browser` should return real stats // TODO: at the very least, `browser` should return real stats
if (json_stats) { if (json_stats) {
var stdout = std.fs.File.stdout();
var writer = stdout.writer(&.{});
const stats = tracking_allocator.stats(); const stats = tracking_allocator.stats();
try std.json.stringify(&.{ try std.json.Stringify.value(&.{
.{ .name = "browser", .bench = .{ .{ .name = "browser", .bench = .{
.duration = js_runner_duration, .duration = js_runner_duration,
.alloc_nb = stats.allocation_count, .alloc_nb = stats.allocation_count,
@@ -200,36 +201,25 @@ pub fn main() !void {
.realloc_nb = 0, .realloc_nb = 0,
.alloc_size = 0, .alloc_size = 0,
} }, } },
}, .{ .whitespace = .indent_2 }, std.io.getStdOut().writer()); }, .{ .whitespace = .indent_2 }, &writer.interface);
} }
std.posix.exit(if (fail == 0) 0 else 1); std.posix.exit(if (fail == 0) 0 else 1);
} }
const Printer = struct { const Printer = struct {
out: std.fs.File.Writer, fn fmt(comptime format: []const u8, args: anytype) void {
std.debug.print(format, args);
fn init() Printer {
return .{
.out = std.io.getStdErr().writer(),
};
} }
fn fmt(self: Printer, comptime format: []const u8, args: anytype) void { fn status(s: Status, comptime format: []const u8, args: anytype) void {
std.fmt.format(self.out, format, args) catch unreachable; switch (s) {
} .pass => std.debug.print("\x1b[32m", .{}),
.fail => std.debug.print("\x1b[31m", .{}),
fn status(self: Printer, s: Status, comptime format: []const u8, args: anytype) void { .skip => std.debug.print("\x1b[33m", .{}),
const color = switch (s) { else => {},
.pass => "\x1b[32m", }
.fail => "\x1b[31m", std.debug.print(format ++ "\x1b[0m", args);
.skip => "\x1b[33m",
else => "",
};
const out = self.out;
out.writeAll(color) catch @panic("writeAll failed?!");
std.fmt.format(out, format, args) catch @panic("std.fmt.format failed?!");
self.fmt("\x1b[0m", .{});
} }
}; };
@@ -302,13 +292,13 @@ const SlowTracker = struct {
return ns; return ns;
} }
fn display(self: *SlowTracker, printer: Printer) !void { fn display(self: *SlowTracker) !void {
var slowest = self.slowest; var slowest = self.slowest;
const count = slowest.count(); const count = slowest.count();
printer.fmt("Slowest {d} test{s}: \n", .{ count, if (count != 1) "s" else "" }); Printer.fmt("Slowest {d} test{s}: \n", .{ count, if (count != 1) "s" else "" });
while (slowest.removeMinOrNull()) |info| { while (slowest.removeMinOrNull()) |info| {
const ms = @as(f64, @floatFromInt(info.ns)) / 1_000_000.0; const ms = @as(f64, @floatFromInt(info.ns)) / 1_000_000.0;
printer.fmt(" {d:.2}ms\t{s}\n", .{ ms, info.name }); Printer.fmt(" {d:.2}ms\t{s}\n", .{ ms, info.name });
} }
} }

View File

@@ -254,8 +254,8 @@ pub fn expectJson(a: anytype, b: anytype) !void {
const b_value = try convertToJson(aa, b); const b_value = try convertToJson(aa, b);
errdefer { errdefer {
const a_json = std.json.stringifyAlloc(aa, a_value, .{ .whitespace = .indent_2 }) catch unreachable; const a_json = std.json.Stringify.valueAlloc(aa, a_value, .{ .whitespace = .indent_2 }) catch unreachable;
const b_json = std.json.stringifyAlloc(aa, b_value, .{ .whitespace = .indent_2 }) catch unreachable; const b_json = std.json.Stringify.valueAlloc(aa, b_value, .{ .whitespace = .indent_2 }) catch unreachable;
std.debug.print("== Expected ==\n{s}\n\n== Actual ==\n{s}", .{ a_json, b_json }); std.debug.print("== Expected ==\n{s}\n\n== Actual ==\n{s}", .{ a_json, b_json });
} }
@@ -282,7 +282,7 @@ fn convertToJson(arena: Allocator, value: anytype) !std.json.Value {
if (T == []u8 or T == []const u8 or comptime isStringArray(T)) { if (T == []u8 or T == []const u8 or comptime isStringArray(T)) {
str = value; str = value;
} else { } else {
str = try std.json.stringifyAlloc(arena, value, .{}); str = try std.json.Stringify.valueAlloc(arena, value, .{});
} }
return std.json.parseFromSliceLeaky(std.json.Value, arena, str, .{}); return std.json.parseFromSliceLeaky(std.json.Value, arena, str, .{});
} }

View File

@@ -67,17 +67,11 @@ pub const URL = struct {
return self.uri.scheme; return self.uri.scheme;
} }
pub fn origin(self: *const URL, writer: anytype) !void { pub fn origin(self: *const URL, writer: *std.Io.Writer) !void {
return self.uri.writeToStream(.{ .scheme = true, .authority = true }, writer); return self.uri.writeToStream(writer, .{ .scheme = true, .authority = true });
} }
pub fn resolve(self: *const URL, arena: Allocator, url: []const u8) !URL { pub fn format(self: *const URL, writer: *std.Io.Writer) !void {
var buf = try arena.alloc(u8, 4096);
const new_uri = try self.uri.resolve_inplace(url, &buf);
return fromURI(arena, &new_uri);
}
pub fn format(self: *const URL, comptime _: []const u8, _: std.fmt.FormatOptions, writer: anytype) !void {
return writer.writeAll(self.raw); return writer.writeAll(self.raw);
} }
@@ -111,7 +105,7 @@ pub const URL = struct {
const protocol = base[0..index]; const protocol = base[0..index];
if (comptime opts.null_terminated) { if (comptime opts.null_terminated) {
return std.fmt.allocPrintZ(allocator, "{s}:{s}", .{ protocol, path }); return std.fmt.allocPrintSentinel(allocator, "{s}:{s}", .{ protocol, path }, 0);
} }
return std.fmt.allocPrint(allocator, "{s}:{s}", .{ protocol, path }); return std.fmt.allocPrint(allocator, "{s}:{s}", .{ protocol, path });
} }
@@ -125,7 +119,7 @@ pub const URL = struct {
if (path[0] == '/') { if (path[0] == '/') {
const pos = std.mem.indexOfScalarPos(u8, base, protocol_end, '/') orelse base.len; const pos = std.mem.indexOfScalarPos(u8, base, protocol_end, '/') orelse base.len;
if (comptime opts.null_terminated) { if (comptime opts.null_terminated) {
return std.fmt.allocPrintZ(allocator, "{s}{s}", .{ base[0..pos], path }); return std.fmt.allocPrintSentinel(allocator, "{s}{s}", .{ base[0..pos], path }, 0);
} }
return std.fmt.allocPrint(allocator, "{s}{s}", .{ base[0..pos], path }); return std.fmt.allocPrint(allocator, "{s}{s}", .{ base[0..pos], path });
} }
@@ -262,22 +256,6 @@ test "URL: isComleteHTTPUrl" {
try testing.expectEqual(false, isComleteHTTPUrl("//lightpanda.io/about")); try testing.expectEqual(false, isComleteHTTPUrl("//lightpanda.io/about"));
} }
test "URL: resolve size" {
const base = "https://www.lightpande.io";
const url = try URL.parse(base, null);
var url_string: [511]u8 = undefined; // Currently this is the largest url we support, it is however recommmended to at least support 2000 characters
@memset(&url_string, 'a');
var buf: [8192]u8 = undefined; // This is approximately the required size to support the current largest supported URL
var fba = std.heap.FixedBufferAllocator.init(&buf);
const out_url = try url.resolve(fba.allocator(), &url_string);
try std.testing.expectEqualStrings(out_url.raw[0..25], base);
try std.testing.expectEqual(out_url.raw[25], '/');
try std.testing.expectEqualStrings(out_url.raw[26..], &url_string);
}
test "URL: stitch" { test "URL: stitch" {
defer testing.reset(); defer testing.reset();