Merge pull request #1102 from lightpanda-io/readable_stream_uint8array

Better support for Uint8Array in ReadableStream
This commit is contained in:
Karl Seguin
2025-09-30 19:03:46 +08:00
committed by GitHub
7 changed files with 156 additions and 96 deletions

View File

@@ -181,7 +181,7 @@ pub fn constructor(input: RequestInput, _options: ?RequestInit, page: *Page) !Re
pub fn get_body(self: *const Request, page: *Page) !?*ReadableStream {
if (self.body) |body| {
const stream = try ReadableStream.constructor(null, null, page);
try stream.queue.append(page.arena, body);
try stream.queue.append(page.arena, .{ .string = body });
return stream;
} else return null;
}

View File

@@ -109,7 +109,7 @@ pub fn constructor(_input: ?ResponseBody, _options: ?ResponseOptions, page: *Pag
pub fn get_body(self: *const Response, page: *Page) !*ReadableStream {
const stream = try ReadableStream.constructor(null, null, page);
if (self.body) |body| {
try stream.queue.append(page.arena, body);
try stream.queue.append(page.arena, .{ .string = body });
}
return stream;
}

View File

@@ -19,8 +19,9 @@
const std = @import("std");
const log = @import("../../log.zig");
const Page = @import("../page.zig").Page;
const Allocator = std.mem.Allocator;
const Env = @import("../env.zig").Env;
const Page = @import("../page.zig").Page;
const ReadableStream = @This();
const ReadableStreamDefaultReader = @import("ReadableStreamDefaultReader.zig");
@@ -45,16 +46,42 @@ cancel_fn: ?Env.Function = null,
pull_fn: ?Env.Function = null,
strategy: QueueingStrategy,
queue: std.ArrayListUnmanaged([]const u8) = .empty,
queue: std.ArrayListUnmanaged(Chunk) = .empty,
pub const Chunk = union(enum) {
// the order matters, sorry.
uint8array: Env.TypedArray(u8),
string: []const u8,
pub fn dupe(self: Chunk, allocator: Allocator) !Chunk {
return switch (self) {
.string => |str| .{ .string = try allocator.dupe(u8, str) },
.uint8array => |arr| .{ .uint8array = try arr.dupe(allocator) },
};
}
};
pub const ReadableStreamReadResult = struct {
const ValueUnion =
union(enum) { data: []const u8, empty: void };
value: ValueUnion,
done: bool,
value: Value = .empty,
pub fn get_value(self: *const ReadableStreamReadResult) ValueUnion {
const Value = union(enum) {
empty,
data: Chunk,
};
pub fn init(chunk: Chunk, done: bool) ReadableStreamReadResult {
if (done) {
return .{ .done = true, .value = .empty };
}
return .{
.done = false,
.value = .{ .data = chunk },
};
}
pub fn get_value(self: *const ReadableStreamReadResult) Value {
return self.value;
}

View File

@@ -51,17 +51,17 @@ pub fn _close(self: *ReadableStreamDefaultController, _reason: ?[]const u8, page
// to discard, must use cancel.
}
pub fn _enqueue(self: *ReadableStreamDefaultController, chunk: []const u8, page: *Page) !void {
pub fn _enqueue(self: *ReadableStreamDefaultController, chunk: ReadableStream.Chunk, page: *Page) !void {
const stream = self.stream;
if (stream.state != .readable) {
return error.TypeError;
}
const duped_chunk = try page.arena.dupe(u8, chunk);
const duped_chunk = try chunk.dupe(page.arena);
if (self.stream.reader_resolver) |*rr| {
try rr.resolve(ReadableStreamReadResult{ .value = .{ .data = duped_chunk }, .done = false });
try rr.resolve(ReadableStreamReadResult.init(duped_chunk, false));
self.stream.reader_resolver = null;
}

View File

@@ -49,7 +49,7 @@ pub fn _read(self: *const ReadableStreamDefaultReader, page: *Page) !Env.Promise
const data = self.stream.queue.orderedRemove(0);
const resolver = page.main_context.createPromiseResolver();
try resolver.resolve(ReadableStreamReadResult{ .value = .{ .data = data }, .done = false });
try resolver.resolve(ReadableStreamReadResult.init(data, false));
try self.stream.pullIf();
return resolver.promise();
} else {
@@ -67,9 +67,9 @@ pub fn _read(self: *const ReadableStreamDefaultReader, page: *Page) !Env.Promise
if (stream.queue.items.len > 0) {
const data = self.stream.queue.orderedRemove(0);
try resolver.resolve(ReadableStreamReadResult{ .value = .{ .data = data }, .done = false });
try resolver.resolve(ReadableStreamReadResult.init(data, false));
} else {
try resolver.resolve(ReadableStreamReadResult{ .value = .empty, .done = true });
try resolver.resolve(ReadableStreamReadResult{ .done = true });
}
return resolver.promise();

View File

@@ -1094,88 +1094,10 @@ pub fn Env(comptime State: type, comptime WebApis: type) type {
}
},
.slice => {
var force_u8 = false;
var array_buffer: ?v8.ArrayBuffer = null;
if (js_value.isTypedArray()) {
const buffer_view = js_value.castTo(v8.ArrayBufferView);
array_buffer = buffer_view.getBuffer();
} else if (js_value.isArrayBufferView()) {
force_u8 = true;
const buffer_view = js_value.castTo(v8.ArrayBufferView);
array_buffer = buffer_view.getBuffer();
} else if (js_value.isArrayBuffer()) {
force_u8 = true;
array_buffer = js_value.castTo(v8.ArrayBuffer);
}
if (array_buffer) |buffer| {
const backing_store = v8.BackingStore.sharedPtrGet(&buffer.getBackingStore());
const data = backing_store.getData();
const byte_len = backing_store.getByteLength();
switch (ptr.child) {
u8 => {
// need this sentinel check to keep the compiler happy
if (ptr.sentinel() == null) {
if (force_u8 or js_value.isUint8Array() or js_value.isUint8ClampedArray()) {
if (byte_len == 0) return &[_]u8{};
const arr_ptr = @as([*]u8, @ptrCast(@alignCast(data)));
return arr_ptr[0..byte_len];
}
}
},
i8 => {
if (js_value.isInt8Array()) {
if (byte_len == 0) return &[_]i8{};
const arr_ptr = @as([*]i8, @ptrCast(@alignCast(data)));
return arr_ptr[0..byte_len];
}
},
u16 => {
if (js_value.isUint16Array()) {
if (byte_len == 0) return &[_]u16{};
const arr_ptr = @as([*]u16, @ptrCast(@alignCast(data)));
return arr_ptr[0 .. byte_len / 2];
}
},
i16 => {
if (js_value.isInt16Array()) {
if (byte_len == 0) return &[_]i16{};
const arr_ptr = @as([*]i16, @ptrCast(@alignCast(data)));
return arr_ptr[0 .. byte_len / 2];
}
},
u32 => {
if (js_value.isUint32Array()) {
if (byte_len == 0) return &[_]u32{};
const arr_ptr = @as([*]u32, @ptrCast(@alignCast(data)));
return arr_ptr[0 .. byte_len / 4];
}
},
i32 => {
if (js_value.isInt32Array()) {
if (byte_len == 0) return &[_]i32{};
const arr_ptr = @as([*]i32, @ptrCast(@alignCast(data)));
return arr_ptr[0 .. byte_len / 4];
}
},
u64 => {
if (js_value.isBigUint64Array()) {
if (byte_len == 0) return &[_]u64{};
const arr_ptr = @as([*]u64, @ptrCast(@alignCast(data)));
return arr_ptr[0 .. byte_len / 8];
}
},
i64 => {
if (js_value.isBigInt64Array()) {
if (byte_len == 0) return &[_]i64{};
const arr_ptr = @as([*]i64, @ptrCast(@alignCast(data)));
return arr_ptr[0 .. byte_len / 8];
}
},
else => {},
if (ptr.sentinel() == null) {
if (try self.jsValueToTypedArray(ptr.child, js_value)) |value| {
return value;
}
return error.InvalidArgument;
}
if (ptr.child == u8) {
@@ -1282,6 +1204,12 @@ pub fn Env(comptime State: type, comptime WebApis: type) type {
return try self.createFunction(js_value);
}
if (@hasDecl(T, "_TYPED_ARRAY_ID_KLUDGE")) {
const VT = @typeInfo(std.meta.fieldInfo(T, .values).type).pointer.child;
const arr = (try self.jsValueToTypedArray(VT, js_value)) orelse return null;
return .{ .values = arr };
}
if (T == String) {
return .{ .string = try valueToString(self.context_arena, js_value, self.isolate, self.v8_context) };
}
@@ -1320,6 +1248,90 @@ pub fn Env(comptime State: type, comptime WebApis: type) type {
return value;
}
fn jsValueToTypedArray(_: *JsContext, comptime T: type, js_value: v8.Value) !?[]T {
var force_u8 = false;
var array_buffer: ?v8.ArrayBuffer = null;
if (js_value.isTypedArray()) {
const buffer_view = js_value.castTo(v8.ArrayBufferView);
array_buffer = buffer_view.getBuffer();
} else if (js_value.isArrayBufferView()) {
force_u8 = true;
const buffer_view = js_value.castTo(v8.ArrayBufferView);
array_buffer = buffer_view.getBuffer();
} else if (js_value.isArrayBuffer()) {
force_u8 = true;
array_buffer = js_value.castTo(v8.ArrayBuffer);
}
const buffer = array_buffer orelse return null;
const backing_store = v8.BackingStore.sharedPtrGet(&buffer.getBackingStore());
const data = backing_store.getData();
const byte_len = backing_store.getByteLength();
switch (T) {
u8 => {
// need this sentinel check to keep the compiler happy
if (force_u8 or js_value.isUint8Array() or js_value.isUint8ClampedArray()) {
if (byte_len == 0) return &[_]u8{};
const arr_ptr = @as([*]u8, @ptrCast(@alignCast(data)));
return arr_ptr[0..byte_len];
}
},
i8 => {
if (js_value.isInt8Array()) {
if (byte_len == 0) return &[_]i8{};
const arr_ptr = @as([*]i8, @ptrCast(@alignCast(data)));
return arr_ptr[0..byte_len];
}
},
u16 => {
if (js_value.isUint16Array()) {
if (byte_len == 0) return &[_]u16{};
const arr_ptr = @as([*]u16, @ptrCast(@alignCast(data)));
return arr_ptr[0 .. byte_len / 2];
}
},
i16 => {
if (js_value.isInt16Array()) {
if (byte_len == 0) return &[_]i16{};
const arr_ptr = @as([*]i16, @ptrCast(@alignCast(data)));
return arr_ptr[0 .. byte_len / 2];
}
},
u32 => {
if (js_value.isUint32Array()) {
if (byte_len == 0) return &[_]u32{};
const arr_ptr = @as([*]u32, @ptrCast(@alignCast(data)));
return arr_ptr[0 .. byte_len / 4];
}
},
i32 => {
if (js_value.isInt32Array()) {
if (byte_len == 0) return &[_]i32{};
const arr_ptr = @as([*]i32, @ptrCast(@alignCast(data)));
return arr_ptr[0 .. byte_len / 4];
}
},
u64 => {
if (js_value.isBigUint64Array()) {
if (byte_len == 0) return &[_]u64{};
const arr_ptr = @as([*]u64, @ptrCast(@alignCast(data)));
return arr_ptr[0 .. byte_len / 8];
}
},
i64 => {
if (js_value.isBigInt64Array()) {
if (byte_len == 0) return &[_]i64{};
const arr_ptr = @as([*]i64, @ptrCast(@alignCast(data)));
return arr_ptr[0 .. byte_len / 8];
}
},
else => {},
}
return error.InvalidArgument;
}
fn createFunction(self: *JsContext, js_value: v8.Value) !Function {
// caller should have made sure this was a function
std.debug.assert(js_value.isFunction());
@@ -2387,6 +2399,10 @@ pub fn Env(comptime State: type, comptime WebApis: type) type {
const _TYPED_ARRAY_ID_KLUDGE = true;
values: []const T,
pub fn dupe(self: TypedArray(T), allocator: Allocator) !TypedArray(T) {
return .{ .values = try allocator.dupe(T, self.values) };
}
};
}

View File

@@ -1,3 +1,4 @@
<!DOCTYPE html>
<script src="../testing.js"></script>
<script id=readable_stream>
@@ -17,6 +18,22 @@
});
</script>
<script id=readable_stream_binary>
const input = new TextEncoder().encode('over 9000!');
const binStream = new ReadableStream({
start(controller) {
controller.enqueue(input);
controller.enqueue("world");
controller.close();
}
});
testing.async(binStream.getReader().read(), (data) => {
testing.expectEqual(input, data.value);
testing.expectEqual(false, data.done);
});
</script>
<script id=readable_stream_close>
var closeResult;