mirror of
https://github.com/lightpanda-io/browser.git
synced 2025-10-29 15:13:28 +00:00
flatten events, include aarch + os, remove eid
This commit is contained in:
@@ -4,6 +4,11 @@ const Loop = @import("jsruntime").Loop;
|
|||||||
const Allocator = std.mem.Allocator;
|
const Allocator = std.mem.Allocator;
|
||||||
const Telemetry = @import("telemetry/telemetry.zig").Telemetry;
|
const Telemetry = @import("telemetry/telemetry.zig").Telemetry;
|
||||||
|
|
||||||
|
pub const RunMode = enum {
|
||||||
|
serve,
|
||||||
|
fetch,
|
||||||
|
};
|
||||||
|
|
||||||
// Container for global state / objects that various parts of the system
|
// Container for global state / objects that various parts of the system
|
||||||
// might need.
|
// might need.
|
||||||
pub const App = struct {
|
pub const App = struct {
|
||||||
@@ -11,14 +16,14 @@ pub const App = struct {
|
|||||||
allocator: Allocator,
|
allocator: Allocator,
|
||||||
telemetry: Telemetry,
|
telemetry: Telemetry,
|
||||||
|
|
||||||
pub fn init(allocator: Allocator) !App {
|
pub fn init(allocator: Allocator, run_mode: RunMode) !App {
|
||||||
const loop = try allocator.create(Loop);
|
const loop = try allocator.create(Loop);
|
||||||
errdefer allocator.destroy(loop);
|
errdefer allocator.destroy(loop);
|
||||||
|
|
||||||
loop.* = try Loop.init(allocator);
|
loop.* = try Loop.init(allocator);
|
||||||
errdefer loop.deinit();
|
errdefer loop.deinit();
|
||||||
|
|
||||||
const telemetry = Telemetry.init(allocator, loop);
|
const telemetry = Telemetry.init(allocator, loop, run_mode);
|
||||||
errdefer telemetry.deinit();
|
errdefer telemetry.deinit();
|
||||||
|
|
||||||
return .{
|
return .{
|
||||||
|
|||||||
@@ -263,7 +263,7 @@ const TestContext = struct {
|
|||||||
|
|
||||||
pub fn context() TestContext {
|
pub fn context() TestContext {
|
||||||
return .{
|
return .{
|
||||||
.app = App.init(std.testing.allocator) catch unreachable,
|
.app = App.init(std.testing.allocator, .serve) catch unreachable,
|
||||||
.arena = std.heap.ArenaAllocator.init(std.testing.allocator),
|
.arena = std.heap.ArenaAllocator.init(std.testing.allocator),
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -70,9 +70,9 @@ pub fn main() !void {
|
|||||||
return args.printUsageAndExit(false);
|
return args.printUsageAndExit(false);
|
||||||
};
|
};
|
||||||
|
|
||||||
var app = try @import("app.zig").App.init(alloc);
|
var app = try @import("app.zig").App.init(alloc, .serve);
|
||||||
defer app.deinit();
|
defer app.deinit();
|
||||||
app.telemetry.record(.{ .run = .{ .mode = .serve, .version = version } });
|
app.telemetry.record(.{ .run = {} });
|
||||||
|
|
||||||
const timeout = std.time.ns_per_s * @as(u64, opts.timeout);
|
const timeout = std.time.ns_per_s * @as(u64, opts.timeout);
|
||||||
server.run(&app, address, timeout) catch |err| {
|
server.run(&app, address, timeout) catch |err| {
|
||||||
@@ -83,9 +83,9 @@ pub fn main() !void {
|
|||||||
.fetch => |opts| {
|
.fetch => |opts| {
|
||||||
log.debug("Fetch mode: url {s}, dump {any}", .{ opts.url, opts.dump });
|
log.debug("Fetch mode: url {s}, dump {any}", .{ opts.url, opts.dump });
|
||||||
|
|
||||||
var app = try @import("app.zig").App.init(alloc);
|
var app = try @import("app.zig").App.init(alloc, .fetch);
|
||||||
defer app.deinit();
|
defer app.deinit();
|
||||||
app.telemetry.record(.{ .run = .{ .mode = .fetch, .version = version } });
|
app.telemetry.record(.{ .run = {} });
|
||||||
|
|
||||||
// vm
|
// vm
|
||||||
const vm = jsruntime.VM.init();
|
const vm = jsruntime.VM.init();
|
||||||
|
|||||||
@@ -1,196 +1,127 @@
|
|||||||
const std = @import("std");
|
const std = @import("std");
|
||||||
const Allocator = std.mem.Allocator;
|
const builtin = @import("builtin");
|
||||||
const ArenAallocator = std.heap.ArenaAllocator;
|
const build_info = @import("build_info");
|
||||||
|
|
||||||
const Loop = @import("jsruntime").Loop;
|
const Thread = std.Thread;
|
||||||
const Client = @import("asyncio").Client;
|
const Allocator = std.mem.Allocator;
|
||||||
|
|
||||||
|
const telemetry = @import("telemetry.zig");
|
||||||
|
const RunMode = @import("../app.zig").RunMode;
|
||||||
|
|
||||||
const log = std.log.scoped(.telemetry);
|
const log = std.log.scoped(.telemetry);
|
||||||
|
const URL = "https://telemetry.lightpanda.io";
|
||||||
const URL = "https://telemetry.lightpanda.io/";
|
|
||||||
|
|
||||||
pub const LightPanda = struct {
|
pub const LightPanda = struct {
|
||||||
uri: std.Uri,
|
uri: std.Uri,
|
||||||
io: Client.IO,
|
pending: List,
|
||||||
client: Client,
|
running: bool,
|
||||||
|
thread: ?std.Thread,
|
||||||
allocator: Allocator,
|
allocator: Allocator,
|
||||||
sending_pool: std.heap.MemoryPool(Sending),
|
mutex: std.Thread.Mutex,
|
||||||
client_context_pool: std.heap.MemoryPool(Client.Ctx),
|
cond: Thread.Condition,
|
||||||
|
node_pool: std.heap.MemoryPool(List.Node),
|
||||||
|
|
||||||
pub fn init(allocator: Allocator, loop: *Loop) !LightPanda {
|
const List = std.DoublyLinkedList(LightPandaEvent);
|
||||||
|
|
||||||
|
pub fn init(allocator: Allocator) !LightPanda {
|
||||||
return .{
|
return .{
|
||||||
|
.cond = .{},
|
||||||
|
.mutex = .{},
|
||||||
|
.pending = .{},
|
||||||
|
.thread = null,
|
||||||
|
.running = true,
|
||||||
.allocator = allocator,
|
.allocator = allocator,
|
||||||
.io = Client.IO.init(loop),
|
|
||||||
.client = .{ .allocator = allocator },
|
|
||||||
.uri = std.Uri.parse(URL) catch unreachable,
|
.uri = std.Uri.parse(URL) catch unreachable,
|
||||||
.sending_pool = std.heap.MemoryPool(Sending).init(allocator),
|
.node_pool = std.heap.MemoryPool(List.Node).init(allocator),
|
||||||
.client_context_pool = std.heap.MemoryPool(Client.Ctx).init(allocator),
|
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn deinit(self: *LightPanda) void {
|
pub fn deinit(self: *LightPanda) void {
|
||||||
self.client.deinit();
|
if (self.thread) |*thread| {
|
||||||
self.sending_pool.deinit();
|
self.mutex.lock();
|
||||||
self.client_context_pool.deinit();
|
self.running = false;
|
||||||
|
self.mutex.unlock();
|
||||||
|
self.cond.signal();
|
||||||
|
thread.join();
|
||||||
|
}
|
||||||
|
self.node_pool.deinit();
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn send(self: *LightPanda, iid: ?[]const u8, eid: []const u8, event: anytype) !void {
|
pub fn send(self: *LightPanda, iid: ?[]const u8, run_mode: RunMode, raw_event: telemetry.Event) !void {
|
||||||
var arena = std.heap.ArenaAllocator.init(self.allocator);
|
const event = LightPandaEvent{
|
||||||
errdefer arena.deinit();
|
|
||||||
|
|
||||||
const resp_header_buffer = try arena.allocator().alloc(u8, 4096);
|
|
||||||
const body = try std.json.stringifyAlloc(arena.allocator(), .{
|
|
||||||
.iid = iid,
|
.iid = iid,
|
||||||
.eid = eid,
|
.driver = if (std.meta.activeTag(raw_event) == .navigate) "cdp" else null,
|
||||||
.event = event,
|
.mode = run_mode,
|
||||||
}, .{});
|
.os = builtin.os.tag,
|
||||||
|
.arch = builtin.cpu.arch,
|
||||||
const sending = try self.sending_pool.create();
|
.version = build_info.git_commit,
|
||||||
errdefer self.sending_pool.destroy(sending);
|
.event = @tagName(std.meta.activeTag(raw_event)),
|
||||||
|
|
||||||
sending.* = .{
|
|
||||||
.body = body,
|
|
||||||
.arena = arena,
|
|
||||||
.lightpanda = self,
|
|
||||||
.request = try self.client.create(.POST, self.uri, .{
|
|
||||||
.server_header_buffer = resp_header_buffer,
|
|
||||||
}),
|
|
||||||
};
|
};
|
||||||
errdefer sending.request.deinit();
|
|
||||||
|
|
||||||
const ctx = try self.client_context_pool.create();
|
self.mutex.lock();
|
||||||
errdefer self.client_context_pool.destroy(ctx);
|
defer self.mutex.unlock();
|
||||||
|
if (self.thread == null) {
|
||||||
|
self.thread = try std.Thread.spawn(.{}, run, .{self});
|
||||||
|
}
|
||||||
|
|
||||||
ctx.* = try Client.Ctx.init(&self.io, &sending.request);
|
const node = try self.node_pool.create();
|
||||||
ctx.userData = sending;
|
errdefer self.node_pool.destroy(node);
|
||||||
|
node.data = event;
|
||||||
try self.client.async_open(
|
self.pending.append(node);
|
||||||
.POST,
|
self.cond.signal();
|
||||||
self.uri,
|
|
||||||
.{ .server_header_buffer = resp_header_buffer },
|
|
||||||
ctx,
|
|
||||||
onRequestConnect,
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn handleError(sending: *Sending, ctx: *Client.Ctx, err: anyerror) anyerror!void {
|
fn run(self: *LightPanda) void {
|
||||||
const lightpanda = sending.lightpanda;
|
var arr: std.ArrayListUnmanaged(u8) = .{};
|
||||||
|
var client = std.http.Client{ .allocator = self.allocator };
|
||||||
ctx.deinit();
|
|
||||||
lightpanda.client_context_pool.destroy(ctx);
|
|
||||||
|
|
||||||
sending.deinit();
|
|
||||||
lightpanda.sending_pool.destroy(sending);
|
|
||||||
log.info("request failure: {}", .{err});
|
|
||||||
}
|
|
||||||
|
|
||||||
fn onRequestConnect(ctx: *Client.Ctx, res: anyerror!void) anyerror!void {
|
|
||||||
const sending: *Sending = @ptrCast(@alignCast(ctx.userData));
|
|
||||||
res catch |err| return handleError(sending, ctx, err);
|
|
||||||
|
|
||||||
ctx.req.transfer_encoding = .{ .content_length = sending.body.len };
|
|
||||||
return ctx.req.async_send(ctx, onRequestSend) catch |err| {
|
|
||||||
return handleError(sending, ctx, err);
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
fn onRequestSend(ctx: *Client.Ctx, res: anyerror!void) anyerror!void {
|
|
||||||
const sending: *Sending = @ptrCast(@alignCast(ctx.userData));
|
|
||||||
res catch |err| return handleError(sending, ctx, err);
|
|
||||||
|
|
||||||
return ctx.req.async_writeAll(sending.body, ctx, onRequestWrite) catch |err| {
|
|
||||||
return handleError(sending, ctx, err);
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
fn onRequestWrite(ctx: *Client.Ctx, res: anyerror!void) anyerror!void {
|
|
||||||
const sending: *Sending = @ptrCast(@alignCast(ctx.userData));
|
|
||||||
res catch |err| return handleError(sending, ctx, err);
|
|
||||||
return ctx.req.async_finish(ctx, onRequestFinish) catch |err| {
|
|
||||||
return handleError(sending, ctx, err);
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
fn onRequestFinish(ctx: *Client.Ctx, res: anyerror!void) anyerror!void {
|
|
||||||
const sending: *Sending = @ptrCast(@alignCast(ctx.userData));
|
|
||||||
res catch |err| return handleError(sending, ctx, err);
|
|
||||||
return ctx.req.async_wait(ctx, onRequestWait) catch |err| {
|
|
||||||
return handleError(sending, ctx, err);
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
fn onRequestWait(ctx: *Client.Ctx, res: anyerror!void) anyerror!void {
|
|
||||||
const sending: *Sending = @ptrCast(@alignCast(ctx.userData));
|
|
||||||
res catch |err| return handleError(sending, ctx, err);
|
|
||||||
|
|
||||||
const lightpanda = sending.lightpanda;
|
|
||||||
|
|
||||||
defer {
|
defer {
|
||||||
ctx.deinit();
|
arr.deinit(self.allocator);
|
||||||
lightpanda.client_context_pool.destroy(ctx);
|
client.deinit();
|
||||||
|
|
||||||
sending.deinit();
|
|
||||||
lightpanda.sending_pool.destroy(sending);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
var buffer: [2048]u8 = undefined;
|
self.mutex.lock();
|
||||||
const reader = ctx.req.reader();
|
|
||||||
while (true) {
|
while (true) {
|
||||||
const n = reader.read(&buffer) catch 0;
|
while (self.pending.popFirst()) |node| {
|
||||||
if (n == 0) {
|
self.mutex.unlock();
|
||||||
break;
|
self.postEvent(&node.data, &client, &arr) catch |err| {
|
||||||
|
log.warn("Telementry reporting error: {}", .{err});
|
||||||
|
};
|
||||||
|
self.mutex.lock();
|
||||||
|
self.node_pool.destroy(node);
|
||||||
}
|
}
|
||||||
|
if (self.running == false) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
self.cond.wait(&self.mutex);
|
||||||
}
|
}
|
||||||
if (ctx.req.response.status != .ok) {
|
}
|
||||||
log.info("invalid response: {d}", .{@intFromEnum(ctx.req.response.status)});
|
|
||||||
|
fn postEvent(self: *const LightPanda, event: *const LightPandaEvent, client: *std.http.Client, arr: *std.ArrayListUnmanaged(u8)) !void {
|
||||||
|
defer arr.clearRetainingCapacity();
|
||||||
|
try std.json.stringify(event, .{ .emit_null_optional_fields = false }, arr.writer(self.allocator));
|
||||||
|
|
||||||
|
var response_header_buffer: [2048]u8 = undefined;
|
||||||
|
|
||||||
|
const result = try client.fetch(.{
|
||||||
|
.method = .POST,
|
||||||
|
.payload = arr.items,
|
||||||
|
.response_storage = .ignore,
|
||||||
|
.location = .{ .uri = self.uri },
|
||||||
|
.server_header_buffer = &response_header_buffer,
|
||||||
|
});
|
||||||
|
if (result.status != .ok) {
|
||||||
|
log.warn("server error status: {}", .{result.status});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
const Sending = struct {
|
const LightPandaEvent = struct {
|
||||||
body: []const u8,
|
iid: ?[]const u8,
|
||||||
request: Client.Request,
|
mode: RunMode,
|
||||||
lightpanda: *LightPanda,
|
driver: ?[]const u8,
|
||||||
arena: std.heap.ArenaAllocator,
|
os: std.Target.Os.Tag,
|
||||||
|
arch: std.Target.Cpu.Arch,
|
||||||
pub fn deinit(self: *Sending) void {
|
version: []const u8,
|
||||||
self.arena.deinit();
|
event: []const u8,
|
||||||
self.request.deinit();
|
|
||||||
}
|
|
||||||
};
|
};
|
||||||
|
|
||||||
// // wraps a telemetry event so that we can serialize it to plausible's event endpoint
|
|
||||||
// const EventWrap = struct {
|
|
||||||
// iid: ?[]const u8,
|
|
||||||
// eid: []const u8,
|
|
||||||
// event: *const Event,
|
|
||||||
|
|
||||||
// pub fn jsonStringify(self: *const EventWrap, jws: anytype) !void {
|
|
||||||
// try jws.beginObject();
|
|
||||||
// try jws.objectField("iid");
|
|
||||||
// try jws.write(self.iid);
|
|
||||||
// try jws.objectField("eid");
|
|
||||||
// try jws.write(self.eid);
|
|
||||||
// try jws.objectField("event");
|
|
||||||
// try jws.write(@tagName(self.event.*));
|
|
||||||
// try jws.objectField("props");
|
|
||||||
// switch (self.event) {
|
|
||||||
// inline else => |props| try jws.write(props),
|
|
||||||
// }
|
|
||||||
// try jws.endObject();
|
|
||||||
// }
|
|
||||||
// };
|
|
||||||
|
|
||||||
// const testing = std.testing;
|
|
||||||
// test "telemetry: lightpanda json event" {
|
|
||||||
// const json = try std.json.stringifyAlloc(testing.allocator, EventWrap{
|
|
||||||
// .iid = "1234",
|
|
||||||
// .eid = "abc!",
|
|
||||||
// .event = .{ .run = .{ .mode = .serve, .version = "over 9000!" } }
|
|
||||||
// }, .{});
|
|
||||||
// defer testing.allocator.free(json);
|
|
||||||
|
|
||||||
// try testing.expectEqualStrings(
|
|
||||||
// \\{"event":"run","iid""1234","eid":"abc!","props":{"version":"over 9000!","mode":"serve"}}
|
|
||||||
// , json);
|
|
||||||
// }
|
|
||||||
|
|||||||
@@ -5,6 +5,7 @@ const Allocator = std.mem.Allocator;
|
|||||||
|
|
||||||
const Loop = @import("jsruntime").Loop;
|
const Loop = @import("jsruntime").Loop;
|
||||||
const uuidv4 = @import("../id.zig").uuidv4;
|
const uuidv4 = @import("../id.zig").uuidv4;
|
||||||
|
const RunMode = @import("../app.zig").RunMode;
|
||||||
|
|
||||||
const log = std.log.scoped(.telemetry);
|
const log = std.log.scoped(.telemetry);
|
||||||
const ID_FILE = "lightpanda.id";
|
const ID_FILE = "lightpanda.id";
|
||||||
@@ -20,24 +21,21 @@ fn TelemetryT(comptime P: type) type {
|
|||||||
// null on IO error
|
// null on IO error
|
||||||
iid: ?[36]u8,
|
iid: ?[36]u8,
|
||||||
|
|
||||||
// a "execution" id is an id that represents this specific run
|
|
||||||
eid: [36]u8,
|
|
||||||
provider: P,
|
provider: P,
|
||||||
|
|
||||||
disabled: bool,
|
disabled: bool,
|
||||||
|
|
||||||
|
run_mode: RunMode,
|
||||||
|
|
||||||
const Self = @This();
|
const Self = @This();
|
||||||
|
|
||||||
pub fn init(allocator: Allocator, loop: *Loop) Self {
|
pub fn init(allocator: Allocator, loop: *Loop, run_mode: RunMode) Self {
|
||||||
const disabled = std.process.hasEnvVarConstant("LIGHTPANDA_DISABLE_TELEMETRY");
|
const disabled = std.process.hasEnvVarConstant("LIGHTPANDA_DISABLE_TELEMETRY");
|
||||||
|
|
||||||
var eid: [36]u8 = undefined;
|
|
||||||
uuidv4(&eid);
|
|
||||||
|
|
||||||
return .{
|
return .{
|
||||||
.iid = if (disabled) null else getOrCreateId(),
|
|
||||||
.eid = eid,
|
|
||||||
.disabled = disabled,
|
.disabled = disabled,
|
||||||
|
.run_mode = run_mode,
|
||||||
|
.iid = if (disabled) null else getOrCreateId(),
|
||||||
.provider = try P.init(allocator, loop),
|
.provider = try P.init(allocator, loop),
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
@@ -51,7 +49,7 @@ fn TelemetryT(comptime P: type) type {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
const iid: ?[]const u8 = if (self.iid) |*iid| iid else null;
|
const iid: ?[]const u8 = if (self.iid) |*iid| iid else null;
|
||||||
self.provider.send(iid, &self.eid, &event) catch |err| {
|
self.provider.send(iid, self.run_mode, &event) catch |err| {
|
||||||
log.warn("failed to record event: {}", .{err});
|
log.warn("failed to record event: {}", .{err});
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
@@ -83,19 +81,9 @@ fn getOrCreateId() ?[36]u8 {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub const Event = union(enum) {
|
pub const Event = union(enum) {
|
||||||
run: Run,
|
run: void,
|
||||||
navigate: void,
|
navigate: void,
|
||||||
flag: []const u8, // used for testing
|
flag: []const u8, // used for testing
|
||||||
|
|
||||||
const Run = struct {
|
|
||||||
version: []const u8,
|
|
||||||
mode: RunMode,
|
|
||||||
|
|
||||||
const RunMode = enum {
|
|
||||||
fetch,
|
|
||||||
serve,
|
|
||||||
};
|
|
||||||
};
|
|
||||||
};
|
};
|
||||||
|
|
||||||
const NoopProvider = struct {
|
const NoopProvider = struct {
|
||||||
@@ -103,11 +91,12 @@ const NoopProvider = struct {
|
|||||||
return .{};
|
return .{};
|
||||||
}
|
}
|
||||||
fn deinit(_: NoopProvider) void {}
|
fn deinit(_: NoopProvider) void {}
|
||||||
pub fn send(_: NoopProvider, _: ?[]const u8, _: []const u8, _: anytype) !void {}
|
pub fn send(_: NoopProvider, _: ?[]const u8, _: RunMode, _: *const Event) !void {}
|
||||||
};
|
};
|
||||||
|
|
||||||
extern fn setenv(name: [*:0]u8, value: [*:0]u8, override: c_int) c_int;
|
extern fn setenv(name: [*:0]u8, value: [*:0]u8, override: c_int) c_int;
|
||||||
extern fn unsetenv(name: [*:0]u8) c_int;
|
extern fn unsetenv(name: [*:0]u8) c_int;
|
||||||
|
|
||||||
const testing = std.testing;
|
const testing = std.testing;
|
||||||
test "telemetry: disabled by environment" {
|
test "telemetry: disabled by environment" {
|
||||||
_ = setenv(@constCast("LIGHTPANDA_DISABLE_TELEMETRY"), @constCast(""), 0);
|
_ = setenv(@constCast("LIGHTPANDA_DISABLE_TELEMETRY"), @constCast(""), 0);
|
||||||
@@ -118,14 +107,14 @@ test "telemetry: disabled by environment" {
|
|||||||
return .{};
|
return .{};
|
||||||
}
|
}
|
||||||
fn deinit(_: @This()) void {}
|
fn deinit(_: @This()) void {}
|
||||||
pub fn send(_: @This(), _: ?[]const u8, _: []const u8, _: anytype) !void {
|
pub fn send(_: @This(), _: ?[]const u8, _: RunMode, _: *const Event) !void {
|
||||||
unreachable;
|
unreachable;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
var telemetry = TelemetryT(FailingProvider).init(testing.allocator, undefined);
|
var telemetry = TelemetryT(FailingProvider).init(testing.allocator, undefined, .serve);
|
||||||
defer telemetry.deinit();
|
defer telemetry.deinit();
|
||||||
telemetry.record(.{ .run = .{ .mode = .serve, .version = "123" } });
|
telemetry.record(.{ .run = {} });
|
||||||
}
|
}
|
||||||
|
|
||||||
test "telemetry: getOrCreateId" {
|
test "telemetry: getOrCreateId" {
|
||||||
@@ -146,7 +135,7 @@ test "telemetry: sends event to provider" {
|
|||||||
defer std.fs.cwd().deleteFile(ID_FILE) catch {};
|
defer std.fs.cwd().deleteFile(ID_FILE) catch {};
|
||||||
std.fs.cwd().deleteFile(ID_FILE) catch {};
|
std.fs.cwd().deleteFile(ID_FILE) catch {};
|
||||||
|
|
||||||
var telemetry = TelemetryT(MockProvider).init(testing.allocator, undefined);
|
var telemetry = TelemetryT(MockProvider).init(testing.allocator, undefined, .serve);
|
||||||
defer telemetry.deinit();
|
defer telemetry.deinit();
|
||||||
const mock = &telemetry.provider;
|
const mock = &telemetry.provider;
|
||||||
|
|
||||||
@@ -162,14 +151,14 @@ test "telemetry: sends event to provider" {
|
|||||||
|
|
||||||
const MockProvider = struct {
|
const MockProvider = struct {
|
||||||
iid: ?[]const u8,
|
iid: ?[]const u8,
|
||||||
eid: ?[]const u8,
|
run_mode: ?RunMode,
|
||||||
allocator: Allocator,
|
allocator: Allocator,
|
||||||
events: std.ArrayListUnmanaged(Event),
|
events: std.ArrayListUnmanaged(Event),
|
||||||
|
|
||||||
fn init(allocator: Allocator, _: *Loop) !@This() {
|
fn init(allocator: Allocator, _: *Loop) !@This() {
|
||||||
return .{
|
return .{
|
||||||
.iid = null,
|
.iid = null,
|
||||||
.eid = null,
|
.run_mode = null,
|
||||||
.events = .{},
|
.events = .{},
|
||||||
.allocator = allocator,
|
.allocator = allocator,
|
||||||
};
|
};
|
||||||
@@ -177,14 +166,14 @@ const MockProvider = struct {
|
|||||||
fn deinit(self: *MockProvider) void {
|
fn deinit(self: *MockProvider) void {
|
||||||
self.events.deinit(self.allocator);
|
self.events.deinit(self.allocator);
|
||||||
}
|
}
|
||||||
pub fn send(self: *MockProvider, iid: ?[]const u8, eid: []const u8, events: *const Event) !void {
|
pub fn send(self: *MockProvider, iid: ?[]const u8, run_mode: RunMode, events: *const Event) !void {
|
||||||
if (self.iid == null) {
|
if (self.iid == null) {
|
||||||
try testing.expectEqual(null, self.eid);
|
try testing.expectEqual(null, self.run_mode);
|
||||||
self.iid = iid.?;
|
self.iid = iid.?;
|
||||||
self.eid = eid;
|
self.run_mode = run_mode;
|
||||||
} else {
|
} else {
|
||||||
try testing.expectEqualStrings(self.iid.?, iid.?);
|
try testing.expectEqualStrings(self.iid.?, iid.?);
|
||||||
try testing.expectEqualStrings(self.eid.?, eid);
|
try testing.expectEqual(self.run_mode.?, run_mode);
|
||||||
}
|
}
|
||||||
try self.events.append(self.allocator, events.*);
|
try self.events.append(self.allocator, events.*);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -45,7 +45,7 @@ pub fn main() !void {
|
|||||||
var gpa = std.heap.GeneralPurposeAllocator(.{}){};
|
var gpa = std.heap.GeneralPurposeAllocator(.{}){};
|
||||||
const allocator = gpa.allocator();
|
const allocator = gpa.allocator();
|
||||||
|
|
||||||
var app = try App.init(allocator);
|
var app = try App.init(allocator, .serve);
|
||||||
defer app.deinit();
|
defer app.deinit();
|
||||||
|
|
||||||
const env = Env.init(allocator);
|
const env = Env.init(allocator);
|
||||||
|
|||||||
Reference in New Issue
Block a user