add robotsShutdownCallback

This commit is contained in:
Muki Kiboigo
2026-02-09 05:51:42 -08:00
parent 46c73a05a9
commit 65c9b2a5f7

View File

@@ -314,11 +314,14 @@ fn fetchRobotsThenProcessRequest(self: *Client, robots_url: [:0]const u8, req: R
const entry = try self.pending_robots_queue.getOrPut(self.allocator, robots_url); const entry = try self.pending_robots_queue.getOrPut(self.allocator, robots_url);
if (!entry.found_existing) { if (!entry.found_existing) {
errdefer self.allocator.free(robots_url);
// If we aren't already fetching this robots, // If we aren't already fetching this robots,
// we want to create a new queue for it and add this request into it. // we want to create a new queue for it and add this request into it.
entry.value_ptr.* = .empty; entry.value_ptr.* = .empty;
const ctx = try self.allocator.create(RobotsRequestContext); const ctx = try self.allocator.create(RobotsRequestContext);
errdefer self.allocator.destroy(ctx);
ctx.* = .{ .client = self, .req = req, .robots_url = robots_url, .buffer = .empty }; ctx.* = .{ .client = self, .req = req, .robots_url = robots_url, .buffer = .empty };
const headers = try self.newHeaders(); const headers = try self.newHeaders();
@@ -336,6 +339,7 @@ fn fetchRobotsThenProcessRequest(self: *Client, robots_url: [:0]const u8, req: R
.data_callback = robotsDataCallback, .data_callback = robotsDataCallback,
.done_callback = robotsDoneCallback, .done_callback = robotsDoneCallback,
.error_callback = robotsErrorCallback, .error_callback = robotsErrorCallback,
.shutdown_callback = robotsShutdownCallback,
}); });
} else { } else {
// Not using our own robots URL, only using the one from the first request. // Not using our own robots URL, only using the one from the first request.
@@ -422,6 +426,18 @@ fn robotsErrorCallback(ctx_ptr: *anyopaque, err: anyerror) void {
} }
} }
fn robotsShutdownCallback(ctx_ptr: *anyopaque) void {
const ctx: *RobotsRequestContext = @ptrCast(@alignCast(ctx_ptr));
defer ctx.deinit();
log.debug(.http, "robots fetch shutdown", .{});
var queued = ctx.client.pending_robots_queue.fetchRemove(
ctx.robots_url,
) orelse @panic("Client.robotsErrorCallback empty queue");
defer queued.value.deinit(ctx.client.allocator);
}
fn waitForInterceptedResponse(self: *Client, transfer: *Transfer) !bool { fn waitForInterceptedResponse(self: *Client, transfer: *Transfer) !bool {
// The request was intercepted and is blocking. This is messy, but our // The request was intercepted and is blocking. This is messy, but our
// callers, the ScriptManager -> Page, don't have a great way to stop the // callers, the ScriptManager -> Page, don't have a great way to stop the