85 Commits

Author SHA1 Message Date
Pierre Tachoire
e42cbe3336 ci: add a web bot auth signature test 2026-03-10 10:25:06 +01:00
Pierre Tachoire
1f2dd7e6e5 ci: add e2e tests w/ web bot auth 2026-03-10 10:24:58 +01:00
Muki Kiboigo
02f3b8899b add WebBotAuth unit tests 2026-03-05 21:38:16 -08:00
Muki Kiboigo
b18c0311d0 fix cli argument for WebBotAuth domain 2026-03-05 19:29:33 -08:00
Muki Kiboigo
9754c2830c simplify parsePemPrivateKey 2026-03-05 19:29:32 -08:00
Muki Kiboigo
e4b32a1a91 make pem private key buffers smaller with comments 2026-03-05 19:29:32 -08:00
Muki Kiboigo
6161c0d701 use transfer arena to sign webbotauth request 2026-03-05 19:29:32 -08:00
Muki Kiboigo
5107395917 auth challenge only on use_proxy 2026-03-05 19:29:32 -08:00
Muki Kiboigo
91254eb365 properly deinit web bot auth in app 2026-03-05 19:29:32 -08:00
Muki Kiboigo
79c6b1ed0a add support for WebBotAuth in Client 2026-03-05 19:29:32 -08:00
Muki Kiboigo
48b00634c6 add WebBotAuth and support for ed25119 to crypto 2026-03-05 19:29:32 -08:00
Muki Kiboigo
201e445ca8 add web bot auth args 2026-03-05 19:29:31 -08:00
Karl Seguin
7322f90af4 Merge pull request #1722 from lightpanda-io/fetch_wait_for_background
Some checks failed
e2e-test / zig build release (push) Has been cancelled
e2e-test / demo-scripts (push) Has been cancelled
e2e-test / cdp-and-hyperfine-bench (push) Has been cancelled
e2e-test / perf-fmt (push) Has been cancelled
e2e-test / browser fetch (push) Has been cancelled
zig-test / zig test using v8 in debug mode (push) Has been cancelled
zig-test / zig test (push) Has been cancelled
zig-test / perf-fmt (push) Has been cancelled
Run the message loop more!
2026-03-06 08:22:41 +08:00
Karl Seguin
e869df98c9 Merge pull request #1723 from lightpanda-io/cleanup-treewalker-helpers
TreeWalker: remove unused methods
2026-03-06 08:19:03 +08:00
Pierre Tachoire
e499d36126 Merge pull request #1724 from lightpanda-io/dockerfile-remove-submodules
Some checks failed
nightly build / build-linux-x86_64 (push) Has been cancelled
nightly build / build-linux-aarch64 (push) Has been cancelled
nightly build / build-macos-aarch64 (push) Has been cancelled
nightly build / build-macos-x86_64 (push) Has been cancelled
wpt / zig build release (push) Has been cancelled
wpt / build wpt runner (push) Has been cancelled
wpt / web platform tests json output (push) Has been cancelled
wpt / perf-fmt (push) Has been cancelled
e2e-integration-test / zig build release (push) Has been cancelled
e2e-integration-test / demo-integration-scripts (push) Has been cancelled
Dockerfile: remove git submodule initialization
2026-03-05 15:19:42 +01:00
Adrià Arrufat
cac66d7fad Dockerfile: remove git submodule initialization 2026-03-05 22:18:38 +09:00
Adrià Arrufat
320aaf0e33 TreeWalker: remove unused methods
They were introduced in:

- https://github.com/lightpanda-io/browser/pull/1718
2026-03-05 21:51:22 +09:00
Karl Seguin
178a175e99 Merge pull request #1698 from lightpanda-io/readablestream-pool-arena
Some checks failed
e2e-test / zig build release (push) Has been cancelled
e2e-test / demo-scripts (push) Has been cancelled
e2e-test / cdp-and-hyperfine-bench (push) Has been cancelled
e2e-test / perf-fmt (push) Has been cancelled
e2e-test / browser fetch (push) Has been cancelled
zig-test / zig test using v8 in debug mode (push) Has been cancelled
zig-test / zig test (push) Has been cancelled
zig-test / perf-fmt (push) Has been cancelled
use a pool arena with ReadableStream
2026-03-05 18:57:06 +08:00
Karl Seguin
5fdf1cb2d1 Run the message loop more!
In https://github.com/lightpanda-io/browser/pull/1651 we started to run the
message loop a lot more. One specific case we added for `fetch` was when there
were no scheduled tasks or HTTP, but background tasks, we'd wait for them to
complete.

One case we missed though is if WE do have a schedule tasks, but it's too far
into the future. In that case, we would just exit. This now adds the same logic
for checking and waiting for any background tasks in that case.
2026-03-05 18:51:34 +08:00
Pierre Tachoire
c64500dd85 update ref counting for new ReadableStream usages 2026-03-05 11:47:48 +01:00
Pierre Tachoire
812ad3f49e add reference counting for ReadableStream 2026-03-05 11:47:48 +01:00
Pierre Tachoire
8e8a1a7541 use a pool arena with ReadableStream 2026-03-05 11:47:47 +01:00
Karl Seguin
4863b3df6e Merge pull request #1721 from lightpanda-io/fix_mcp_unintialized_memory
Ensure that mcp.Server is correctly initialized
2026-03-05 17:11:57 +08:00
Karl Seguin
3dea554e9e Ensure that mcp.Server is correctly initialized
It relies on default field values, e.g. for mutex: std.Thread.Mutex = .{}, but
doesn't initialize the structure, just the pointer on the heap resulting in a
crash.
2026-03-05 16:32:25 +08:00
Karl Seguin
16d4f6e4e1 Merge pull request #1718 from lightpanda-io/enhance-treewalker
Enhance TreeWalker
2026-03-05 15:28:04 +08:00
Adrià Arrufat
26db481d46 markdown: refactor content discovery to use TreeWalker 2026-03-05 14:36:15 +09:00
Adrià Arrufat
3256a57230 TreeWalker: add sibling navigation and skipChildren 2026-03-05 14:29:42 +09:00
Karl Seguin
cbc30587ff Merge pull request #1717 from lightpanda-io/improve-markdown-links
Improve markdown links
2026-03-05 13:09:16 +08:00
Adrià Arrufat
a27de38c03 markdown: encode resolved URLs in links and images 2026-03-05 13:57:42 +09:00
Adrià Arrufat
e2f1609116 markdown: use aria-label or title for empty links 2026-03-05 11:27:51 +09:00
Adrià Arrufat
ea66a91a95 markdown: resolve absolute URLs and skip empty links 2026-03-05 10:48:18 +09:00
Pierre Tachoire
0d87c352b2 Merge pull request #1716 from lightpanda-io/wpt-again
Some checks failed
e2e-test / zig build release (push) Has been cancelled
e2e-test / demo-scripts (push) Has been cancelled
e2e-test / cdp-and-hyperfine-bench (push) Has been cancelled
e2e-test / perf-fmt (push) Has been cancelled
e2e-test / browser fetch (push) Has been cancelled
zig-test / zig test using v8 in debug mode (push) Has been cancelled
zig-test / zig test (push) Has been cancelled
zig-test / perf-fmt (push) Has been cancelled
nightly build / build-linux-x86_64 (push) Has been cancelled
nightly build / build-linux-aarch64 (push) Has been cancelled
nightly build / build-macos-aarch64 (push) Has been cancelled
nightly build / build-macos-x86_64 (push) Has been cancelled
wpt / zig build release (push) Has been cancelled
wpt / build wpt runner (push) Has been cancelled
wpt / web platform tests json output (push) Has been cancelled
wpt / perf-fmt (push) Has been cancelled
e2e-integration-test / zig build release (push) Has been cancelled
e2e-integration-test / demo-integration-scripts (push) Has been cancelled
ci: for wpt run with --concurrency=3
2026-03-04 18:04:07 +01:00
Pierre Tachoire
918f6ce0e6 ci: for wpt run with --concurrency=3 2026-03-04 15:54:48 +01:00
Karl Seguin
6c5efe6ce0 Merge pull request #1715 from lightpanda-io/cdp-frame-navigate
cdp: don't dispatch executionContextsCleared on frame navigation
2026-03-04 22:02:30 +08:00
Karl Seguin
f0be6675e7 Merge pull request #1714 from lightpanda-io/fix-req-id
cdp: fix req id resolver, they are REQ- not RID-
2026-03-04 21:59:04 +08:00
Pierre Tachoire
6a8174a15c cdp: don't dispatch executionContextsCleared on frame navigation 2026-03-04 14:45:21 +01:00
Pierre Tachoire
40c3f1b618 cdp: fix req id resolver, they are REQ- not RID- 2026-03-04 13:00:16 +01:00
Pierre Tachoire
6dd2dac049 Merge pull request #1704 from lightpanda-io/non-ascii-css-key
Some checks failed
e2e-test / zig build release (push) Has been cancelled
e2e-test / demo-scripts (push) Has been cancelled
e2e-test / cdp-and-hyperfine-bench (push) Has been cancelled
e2e-test / perf-fmt (push) Has been cancelled
e2e-test / browser fetch (push) Has been cancelled
zig-test / zig test using v8 in debug mode (push) Has been cancelled
zig-test / zig test (push) Has been cancelled
zig-test / perf-fmt (push) Has been cancelled
css: fix crash in consumeName() on UTF-8 multibyte sequences
2026-03-04 12:35:14 +01:00
Karl Seguin
b39bbb557f Merge pull request #1713 from lightpanda-io/dynamic_module_instantiation
Force dynamic module instantiation if not already instantiated
2026-03-04 16:27:06 +08:00
Karl Seguin
f7682cba67 Force dynamic module instantiation if not already instantiated
I couldn't come up with a reproducible case where this was needed, but we're
seeing some crash reports indicate that this is happening.
2026-03-04 16:12:11 +08:00
Pierre Tachoire
f94c07160a Merge pull request #1712 from lightpanda-io/css-selector-quote
Handle commas inside quoted attributes
2026-03-04 09:00:01 +01:00
Karl Seguin
bbe6692580 Merge pull request #1711 from lightpanda-io/iframe_about_blank
iframe handling for src = "about:blank"
2026-03-04 15:56:26 +08:00
Karl Seguin
9266a1c4d9 Merge pull request #1709 from lightpanda-io/expand_event_dispatch_handle_scope
Use a single HandleScope for event dispatch
2026-03-04 15:56:13 +08:00
Pierre Tachoire
220d80f05f Handle commas inside quoted attributes
In CSS selector, commas inside quoted attribute are not selector separators, but part of
the attribute value.
2026-03-04 08:49:33 +01:00
Karl Seguin
9144c909dd Merge pull request #1710 from lightpanda-io/custom_element_clone
Support for clone custom elements that attach them self in their cons…
2026-03-04 15:47:39 +08:00
Karl Seguin
7981fcec84 iframe handling for src = "about:blank"
Don't try to resolve an iframe's source if it's about:blank

Extend the page's handling of about:blank to render an empty document
2026-03-04 15:43:07 +08:00
Pierre Tachoire
71264c56fc Merge pull request #1696 from lightpanda-io/textencoder-stream
Add TextEncoderStream and TextDecoderStream implementation
2026-03-04 07:58:56 +01:00
Karl Seguin
ca0f77bdee Support for clone custom elements that attach them self in their constructor
When we createElement, we assume the element is detached. This is usually true
except for Custom Elements where the constructor can do anything, including
connecting the element. This broken assumption results in cloneNode crashing.
2026-03-04 14:54:34 +08:00
Karl Seguin
fc8b1b8549 Use a single HandleScope for event dispatch
https://github.com/lightpanda-io/browser/pull/1690 narrowed the lifetime of
HandleScopes to once per listener. I think that was just an accident of
refactoring, and not some intentional choice.

The narrower HandleScope lifetime makes it so that when we do run microtask
queue at the end of event dispatching, some locals in the queue may not longer
be valid.

HS1
  HS2
    queueMicrotask(func)
  runMicrotask

In the above flow, `func` is only valid while HS2 is alive, so when we run
the microtask queue in HS1, it is no longer valid.
2026-03-04 11:43:09 +08:00
Karl Seguin
bc8c44f62f Merge pull request #1707 from lightpanda-io/nikneym/details
Some checks failed
e2e-test / zig build release (push) Has been cancelled
e2e-test / demo-scripts (push) Has been cancelled
e2e-test / cdp-and-hyperfine-bench (push) Has been cancelled
e2e-test / perf-fmt (push) Has been cancelled
e2e-test / browser fetch (push) Has been cancelled
zig-test / zig test using v8 in debug mode (push) Has been cancelled
zig-test / zig test (push) Has been cancelled
zig-test / perf-fmt (push) Has been cancelled
e2e-integration-test / zig build release (push) Has been cancelled
e2e-integration-test / demo-integration-scripts (push) Has been cancelled
Add `HTMLDetailsElement`
2026-03-04 07:44:11 +08:00
Karl Seguin
01fab5c92a Merge pull request #1706 from lightpanda-io/cdp-attach-to-browser
cdp: fix send CDP raw command with Playwright
2026-03-04 07:40:05 +08:00
Karl Seguin
1c07d786a0 Merge pull request #1705 from lightpanda-io/nikneym/track
` Track`: implement kind and constants
2026-03-04 07:34:12 +08:00
Karl Seguin
6f0cd87d1c Merge pull request #1703 from lightpanda-io/client_and_script_manager
Fix a few issues in Client
2026-03-04 07:32:14 +08:00
Karl Seguin
e44308cba2 Merge pull request #1695 from lightpanda-io/iframe_src_nav
Iframe src nav
2026-03-04 07:27:23 +08:00
Karl Seguin
50245c5157 Merge pull request #1667 from lightpanda-io/terminate_isolate
On Client.stop, terminate the isolate
2026-03-04 07:27:10 +08:00
Pierre Tachoire
9ca5188e12 cdp: set consistent target's default
with about:blank for url and empty title.
2026-03-03 17:24:08 +01:00
Pierre Tachoire
56cc881ac0 Fcdp: fix attachtToTarget and attachToBrowserTarget resp 2026-03-03 15:01:53 +01:00
Halil Durak
50896bdc9d HTMLDetailsElement: add tests 2026-03-03 15:12:12 +03:00
Halil Durak
8dd4567828 HTMLDetailsElement: implement HTMLDetailsElement 2026-03-03 15:12:02 +03:00
Pierre Tachoire
06ef6d3e6a cdp: attachToTarget must add the session id 2026-03-03 12:58:00 +01:00
Pierre Tachoire
14b58e8062 add target.attachToBrowserTarget 2026-03-03 12:58:00 +01:00
Pierre Tachoire
eee232c12c cdp: allow multiple calls to attachToTarget
Playwright, when creating a new CDPSession, sends an
attachToBrowserTarget followed by another attachToTarget to re-attach
itself to the existing target.

see playwright/axtree.js from demo/ repository.
2026-03-03 12:58:00 +01:00
Halil Durak
febe321aef Track: add tests 2026-03-03 14:41:05 +03:00
Halil Durak
28777ac717 Track: implement kind and constants 2026-03-03 14:40:53 +03:00
Pierre Tachoire
13b008b56c css: fix crash in consumeName() on UTF-8 multibyte sequences
advance() asserts that each byte it steps over is either an ASCII byte
or a UTF-8 sequence leader, never a continuation byte (0x80–0xBF).
consumeName() was calling advance(1) for all non-ASCII bytes
('\x80'...'\xFF'), processing multi-byte sequences one byte at a time.
For a two-byte sequence like é (0xC3 0xA9), the second iteration landed
on the continuation byte 0xA9 and triggered the assertion, crashing the
browser in Debug mode.

Fix: replace advance(1) with consumeChar() for all non-ASCII bytes.
consumeChar() reads the lead byte, derives the sequence length via
utf8ByteSequenceLength, and advances the full code point in one step,
so the position never rests on a continuation byte.

Observed on saintcyrlecole.caliceo.com, whose root element carries an
inline style with custom property names containing French accented
characters (--color-store-bulles-été-fg, etc.). The crash aborted JS
execution before the Angular app could render any dynamic content.
2026-03-03 11:13:30 +01:00
Karl Seguin
523efbd85a Fix a few issues in Client
Most significantly, if removing from the multi fails, the connection
is added to a "dirty" list for the removal to be retried later. Looking at
the curl source code, remove fails on a recursive call, and we've struggled with
recursive calls before, so I _think_ this might be happening (it fails in other
cases, but I suspect if it _is_ happening, it's for this reason). The retry
happens _after_ `perform`, so it cannot fail for due to recursiveness. If it
fails at this point, we @panic. This is harsh, but it isn't easily recoverable
and before putting effort into it, I'd like to know that it's actually happening.

Fix potential use of undefined when a 401-407 request is received, but no
'WWW-Authenticate' or 'Proxy-Authenticate' header is received.

Don't call `curl_multi_remove_handle` on an easy that hasn't been added yet do
to error. Specifically, if `makeRequest` fails during setup, transfer_conn is
nulled so that `transfer.deinit()` doesn't try to remove the connection. And the
conn is removed from the `in_use` queue and made `available` again.

On Abort, if getting the private fails (extremely unlikely), we now still try
to remove the connection from the multi.

Added a few more fields to the famous "ScriptManager.Header recall" assertion.
2026-03-03 18:02:06 +08:00
Pierre Tachoire
fcacc8bfc6 remove the isString type check into TransformStream write 2026-03-03 09:40:32 +01:00
Pierre Tachoire
252b3c3bf6 Ignore BOM only when the option is set on TextDecoderStream 2026-03-03 09:04:41 +01:00
Pierre Tachoire
24221748e1 Merge pull request #1699 from lightpanda-io/textencoder-stream-enhancements
Textencoder stream enhancements
2026-03-03 08:12:07 +01:00
Karl Seguin
141ae053db leverage JS bridge's type mapping 2026-03-03 11:43:13 +08:00
Karl Seguin
10ec4ff814 Create Zig wrapper generator for js.Function creation
This allows us to leverage the Caller.Function.call method, which does type
mapping, caching, etc... and allows the Zig function callback to be written like
any other Zig WebAPI function.
2026-03-03 11:41:00 +08:00
Pierre Tachoire
d2da0b7c0e remove useless _page field from WritableStream* 2026-03-02 18:09:46 +01:00
Pierre Tachoire
7d0548406e Move V8 pipe callback helpers into js/ layer
ReadableStream.zig was the only webapi file importing v8 directly.
Extract the repeated newFunctionWithData / callback boilerplate into
js/Local (newFunctionWithData) and js/Caller (initFromHandle,
FunctionCallbackInfo.getData), and update ReadableStream and Context
to use them.
2026-03-02 17:33:56 +01:00
Pierre Tachoire
c121dbbd67 Add desiredSize accessor to WritableStreamDefaultWriter
Returns 1 when writable (default high water mark), 0 when closed,
and null when errored, matching the spec behavior for streams
without a custom queuing strategy.
2026-03-02 14:41:03 +01:00
Pierre Tachoire
c1c0a7d494 Skip enqueue of empty chunks in TextDecoderStream
After BOM stripping or when receiving an empty Uint8Array, the
decoded input can be zero-length. Per spec, empty chunks should
produce no output rather than enqueuing an empty string.
2026-03-02 14:30:39 +01:00
Pierre Tachoire
0749f60702 Preserve chunk value types through ReadableStream enqueue/read
When JS called controller.enqueue(42), the value was coerced to the
string "42" because Chunk only had uint8array and string variants.
Add a js_value variant that persists the raw JS value handle, and
expose enqueueValue(js.Value) as the JS-facing enqueue method so
numbers, booleans, and objects round-trip with their original types.
2026-03-02 14:24:49 +01:00
Pierre Tachoire
ca0ef18bdf Implement async piping for ReadableStream.pipeThrough/pipeTo
Replace synchronous queue-draining approach with async promise-based
piping using V8's thenAndCatch callbacks. PipeState struct manages the
async read loop: reader.read() returns a Promise, onReadFulfilled
extracts {done, value}, writes chunks to the writable side, and
recurses via pumpRead() until the stream closes.
2026-03-02 12:17:17 +01:00
Pierre Tachoire
6ed011e2f8 Add pipeThrough and pipeTo to ReadableStream
Implement synchronous piping that drains queued chunks from a
ReadableStream into a WritableStream. pipeThrough accepts any
{readable, writable} pair (TransformStream, TextDecoderStream, etc.)
and returns the readable side. pipeTo writes all chunks to a
WritableStream and resolves when complete.
2026-03-02 12:06:18 +01:00
Pierre Tachoire
23d322452a Add TextDecoderStream to decode UTF-8 byte streams into strings
Mirrors TextEncoderStream: wraps a TransformStream with a Zig-level
transform that converts Uint8Array chunks to strings. Supports the
same constructor options as TextDecoder (label, fatal, ignoreBOM).
2026-03-02 11:49:01 +01:00
Pierre Tachoire
5d3b965d28 Implement WritableStream, TransformStream, and TextEncoderStream
Add the missing Streams API types needed for TextEncoderStream support:
- WritableStream with locked/getWriter, supporting both JS sink callbacks
and internal TransformStream routing
- WritableStreamDefaultWriter with write/close/releaseLock/closed/ready
- WritableStreamDefaultController with error()
- TransformStream with readable/writable accessors, JS transformer
callbacks (start/transform/flush), and Zig-level transform support
- TransformStreamDefaultController with enqueue/error/terminate
- TextEncoderStream that encodes string chunks to UTF-8 Uint8Array
via a Zig-level transform function
2026-03-02 11:49:01 +01:00
Karl Seguin
d9794d72c7 fix bad rebase 2026-03-02 18:39:02 +08:00
Karl Seguin
524b5be937 on iframe re-navigation, keep pending_loads in sync 2026-03-02 18:14:36 +08:00
Karl Seguin
ac2e276a6a try to make test more stable 2026-03-02 18:14:36 +08:00
Karl Seguin
4f4dbc0c22 Allow iframe.src to renavigate the page
Unlike a script, an iframe can be re-navigated simply by setting the src.
2026-03-02 18:14:34 +08:00
Karl Seguin
d56e63a91b On Client.stop, terminate the isolate
Shutdown on MacOS doesn't work properly. The process appears to shutdown, but
will continue to run in the background. It can run infinitely if it's stuck in
a JS loop. To help it along, Client.stop now force-terminates the isolate.

I also don't think shutdown is working as intended on Linux, but the problem
seems less serious there. On Linux, it appears to properly kill the process
(which is the important thing), but I don't think it necessarily does a clean
shutdown.
2026-02-27 08:20:31 +08:00
57 changed files with 2702 additions and 223 deletions

View File

@@ -117,6 +117,98 @@ jobs:
BASE_URL=https://demo-browser.lightpanda.io/ node playwright/proxy_auth.js
kill `cat LPD.pid` `cat PROXY.id`
# e2e tests w/ web-bot-auth configuration on.
wba-demo-scripts:
name: wba-demo-scripts
needs: zig-build-release
runs-on: ubuntu-latest
timeout-minutes: 15
steps:
- uses: actions/checkout@v4
with:
repository: 'lightpanda-io/demo'
fetch-depth: 0
- run: npm install
- name: download artifact
uses: actions/download-artifact@v4
with:
name: lightpanda-build-release
- run: chmod a+x ./lightpanda
- run: echo "${{ secrets.WBA_PRIVATE_KEY_PEM }}" > private_key.pem
- name: run end to end tests
run: |
./lightpanda serve \
--web_bot_auth_key_file private_key.pem \
--web_bot_auth_keyid ${{ vars.WBA_KEY_ID }} \
--web_bot_auth_domain ${{ vars.WBA_DOMAIN }} \
& echo $! > LPD.pid
go run runner/main.go
kill `cat LPD.pid`
- name: build proxy
run: |
cd proxy
go build
- name: run end to end tests through proxy
run: |
./proxy/proxy & echo $! > PROXY.id
./lightpanda serve \
--web_bot_auth_key_file private_key.pem \
--web_bot_auth_keyid ${{ vars.WBA_KEY_ID }} \
--web_bot_auth_domain ${{ vars.WBA_DOMAIN }} \
--http_proxy 'http://127.0.0.1:3000' \
& echo $! > LPD.pid
go run runner/main.go
kill `cat LPD.pid` `cat PROXY.id`
- name: run request interception through proxy
run: |
export PROXY_USERNAME=username PROXY_PASSWORD=password
./proxy/proxy & echo $! > PROXY.id
./lightpanda serve & echo $! > LPD.pid
URL=https://demo-browser.lightpanda.io/campfire-commerce/ node puppeteer/proxy_auth.js
BASE_URL=https://demo-browser.lightpanda.io/ node playwright/proxy_auth.js
kill `cat LPD.pid` `cat PROXY.id`
wba-test:
name: wba-test
needs: zig-build-release
env:
LIGHTPANDA_DISABLE_TELEMETRY: true
runs-on: ubuntu-latest
timeout-minutes: 15
steps:
- name: download artifact
uses: actions/download-artifact@v4
with:
name: lightpanda-build-release
- run: chmod a+x ./lightpanda
- run: echo "${{ secrets.WBA_PRIVATE_KEY_PEM }}" > private_key.pem
- run: |
./lightpanda fetch https://crawltest.com/cdn-cgi/web-bot-auth \
--log_level error \
--web_bot_auth_key_file private_key.pem \
--web_bot_auth_keyid ${{ vars.WBA_KEY_ID }} \
--web_bot_auth_domain ${{ vars.WBA_DOMAIN }} \
--dump markdown \
| tee output.log
- run: cat output.log | grep -q "unknown public key or unknown verified bot ID for keyid"
cdp-and-hyperfine-bench:
name: cdp-and-hyperfine-bench
needs: zig-build-release

View File

@@ -107,7 +107,7 @@ jobs:
run: |
./wpt serve 2> /dev/null & echo $! > WPT.pid
sleep 10s
./wptrunner -lpd-path ./lightpanda -json -concurrency 1 > wpt.json
./wptrunner -lpd-path ./lightpanda -json -concurrency 3 > wpt.json
kill `cat WPT.pid`
- name: write commit

View File

@@ -36,10 +36,6 @@ RUN ZIG=$(grep '\.minimum_zig_version = "' "build.zig.zon" | cut -d'"' -f2) && \
mv zig-${ARCH}-linux-${ZIG} /usr/local/lib && \
ln -s /usr/local/lib/zig-${ARCH}-linux-${ZIG}/zig /usr/local/bin/zig
# install deps
RUN git submodule init && \
git submodule update --recursive
# download and install v8
RUN case $TARGETPLATFORM in \
"linux/arm64") ARCH="aarch64" ;; \

View File

@@ -26,6 +26,7 @@ const Snapshot = @import("browser/js/Snapshot.zig");
const Platform = @import("browser/js/Platform.zig");
const Telemetry = @import("telemetry/telemetry.zig").Telemetry;
const RobotStore = @import("browser/Robots.zig").RobotStore;
const WebBotAuth = @import("browser/WebBotAuth.zig");
pub const Http = @import("http/Http.zig");
pub const ArenaPool = @import("ArenaPool.zig");
@@ -40,6 +41,7 @@ telemetry: Telemetry,
allocator: Allocator,
arena_pool: ArenaPool,
robots: RobotStore,
web_bot_auth: ?WebBotAuth,
app_dir_path: ?[]const u8,
shutdown: bool = false,
@@ -52,7 +54,14 @@ pub fn init(allocator: Allocator, config: *const Config) !*App {
app.robots = RobotStore.init(allocator);
app.http = try Http.init(allocator, &app.robots, config);
if (config.webBotAuth()) |wba_cfg| {
app.web_bot_auth = try WebBotAuth.fromConfig(allocator, &wba_cfg);
} else {
app.web_bot_auth = null;
}
errdefer if (app.web_bot_auth) |wba| wba.deinit(allocator);
app.http = try Http.init(allocator, &app.robots, &app.web_bot_auth, config);
errdefer app.http.deinit();
app.platform = try Platform.init();
@@ -84,6 +93,9 @@ pub fn deinit(self: *App) void {
}
self.telemetry.deinit();
self.robots.deinit();
if (self.web_bot_auth) |wba| {
wba.deinit(allocator);
}
self.http.deinit();
self.snapshot.deinit();
self.platform.deinit();

View File

@@ -23,6 +23,8 @@ const Allocator = std.mem.Allocator;
const log = @import("log.zig");
const dump = @import("browser/dump.zig");
const WebBotAuthConfig = @import("browser/WebBotAuth.zig").Config;
pub const RunMode = enum {
help,
fetch,
@@ -153,6 +155,17 @@ pub fn userAgentSuffix(self: *const Config) ?[]const u8 {
};
}
pub fn webBotAuth(self: *const Config) ?WebBotAuthConfig {
return switch (self.mode) {
inline .serve, .fetch, .mcp => |opts| WebBotAuthConfig{
.key_file = opts.common.web_bot_auth_key_file orelse return null,
.keyid = opts.common.web_bot_auth_keyid orelse return null,
.domain = opts.common.web_bot_auth_domain orelse return null,
},
.help, .version => null,
};
}
pub fn maxConnections(self: *const Config) u16 {
return switch (self.mode) {
.serve => |opts| opts.cdp_max_connections,
@@ -217,6 +230,10 @@ pub const Common = struct {
log_format: ?log.Format = null,
log_filter_scopes: ?[]log.Scope = null,
user_agent_suffix: ?[]const u8 = null,
web_bot_auth_key_file: ?[]const u8 = null,
web_bot_auth_keyid: ?[]const u8 = null,
web_bot_auth_domain: ?[]const u8 = null,
};
/// Pre-formatted HTTP headers for reuse across Http and Client.
@@ -324,6 +341,14 @@ pub fn printUsageAndExit(self: *const Config, success: bool) void {
\\--user_agent_suffix
\\ Suffix to append to the Lightpanda/X.Y User-Agent
\\
\\--web_bot_auth_key_file
\\ Path to the Ed25519 private key PEM file.
\\
\\--web_bot_auth_keyid
\\ The JWK thumbprint of your public key.
\\
\\--web_bot_auth_domain
\\ Your domain e.g. yourdomain.com
;
// MAX_HELP_LEN|
@@ -845,5 +870,32 @@ fn parseCommonArg(
return true;
}
if (std.mem.eql(u8, "--web_bot_auth_key_file", opt)) {
const str = args.next() orelse {
log.fatal(.app, "missing argument value", .{ .arg = "--web_bot_auth_key_file" });
return error.InvalidArgument;
};
common.web_bot_auth_key_file = try allocator.dupe(u8, str);
return true;
}
if (std.mem.eql(u8, "--web_bot_auth_keyid", opt)) {
const str = args.next() orelse {
log.fatal(.app, "missing argument value", .{ .arg = "--web_bot_auth_keyid" });
return error.InvalidArgument;
};
common.web_bot_auth_keyid = try allocator.dupe(u8, str);
return true;
}
if (std.mem.eql(u8, "--web_bot_auth_domain", opt)) {
const str = args.next() orelse {
log.fatal(.app, "missing argument value", .{ .arg = "--web_bot_auth_domain" });
return error.InvalidArgument;
};
common.web_bot_auth_domain = try allocator.dupe(u8, str);
return true;
}
return false;
}

View File

@@ -174,16 +174,16 @@ const HeaderValue = struct {
pub const AuthChallenge = struct {
status: u16,
source: enum { server, proxy },
scheme: enum { basic, digest },
realm: []const u8,
source: ?enum { server, proxy },
scheme: ?enum { basic, digest },
realm: ?[]const u8,
pub fn parse(status: u16, header: []const u8) !AuthChallenge {
var ac: AuthChallenge = .{
.status = status,
.source = undefined,
.realm = "TODO", // TODO parser and set realm
.scheme = undefined,
.source = null,
.realm = null,
.scheme = null,
};
const sep = std.mem.indexOfPos(u8, header, 0, ": ") orelse return error.InvalidHeader;
@@ -471,6 +471,7 @@ pub const Connection = struct {
pub const Handles = struct {
connections: []Connection,
dirty: HandleList,
in_use: HandleList,
available: HandleList,
multi: *libcurl.CurlM,
@@ -501,6 +502,7 @@ pub const Handles = struct {
}
return .{
.dirty = .{},
.in_use = .{},
.connections = connections,
.available = available,
@@ -522,8 +524,6 @@ pub const Handles = struct {
pub fn get(self: *Handles) ?*Connection {
if (self.available.popFirst()) |node| {
node.prev = null;
node.next = null;
self.in_use.append(node);
return @as(*Connection, @fieldParentPtr("node", node));
}
@@ -535,21 +535,46 @@ pub const Handles = struct {
}
pub fn remove(self: *Handles, conn: *Connection) void {
libcurl.curl_multi_remove_handle(self.multi, conn.easy) catch |err| {
log.fatal(.http, "multi remove handle", .{ .err = err });
};
var node = &conn.node;
if (libcurl.curl_multi_remove_handle(self.multi, conn.easy)) {
self.isAvailable(conn);
} else |err| {
// can happen if we're in a perform() call, so we'll queue this
// for cleanup later.
const node = &conn.node;
self.in_use.remove(node);
self.dirty.append(node);
log.warn(.http, "multi remove handle", .{ .err = err });
}
}
pub fn isAvailable(self: *Handles, conn: *Connection) void {
const node = &conn.node;
self.in_use.remove(node);
node.prev = null;
node.next = null;
self.available.append(node);
}
pub fn perform(self: *Handles) !c_int {
var running: c_int = undefined;
self.performing = true;
defer self.performing = false;
const multi = self.multi;
var running: c_int = undefined;
try libcurl.curl_multi_perform(self.multi, &running);
{
const list = &self.dirty;
while (list.first) |node| {
list.remove(node);
const conn: *Connection = @fieldParentPtr("node", node);
if (libcurl.curl_multi_remove_handle(multi, conn.easy)) {
self.available.append(node);
} else |err| {
log.fatal(.http, "multi remove handle", .{ .err = err, .src = "perform" });
@panic("multi_remove_handle");
}
}
}
return running;
}

View File

@@ -296,6 +296,10 @@ pub const Client = struct {
}
fn stop(self: *Client) void {
switch (self.mode) {
.http => {},
.cdp => |*cdp| cdp.browser.env.terminate(),
}
self.ws.shutdown();
}

View File

@@ -17,6 +17,7 @@
// along with this program. If not, see <https://www.gnu.org/licenses/>.
const std = @import("std");
const URL = @import("browser/URL.zig");
const TestHTTPServer = @This();
@@ -97,7 +98,10 @@ fn handleConnection(self: *TestHTTPServer, conn: std.net.Server.Connection) !voi
}
pub fn sendFile(req: *std.http.Server.Request, file_path: []const u8) !void {
var file = std.fs.cwd().openFile(file_path, .{}) catch |err| switch (err) {
var url_buf: [1024]u8 = undefined;
var fba = std.heap.FixedBufferAllocator.init(&url_buf);
const unescaped_file_path = try URL.unescape(fba.allocator(), file_path);
var file = std.fs.cwd().openFile(unescaped_file_path, .{}) catch |err| switch (err) {
error.FileNotFound => return req.respond("server error", .{ .status = .not_found }),
else => return err,
};

View File

@@ -377,12 +377,17 @@ fn dispatchNode(self: *EventManager, target: *Node, event: *Event, comptime opts
const page = self.page;
var was_handled = false;
defer if (was_handled) {
var ls: js.Local.Scope = undefined;
page.js.localScope(&ls);
defer ls.deinit();
ls.local.runMicrotasks();
};
// Create a single scope for all event handlers in this dispatch.
// This ensures function handles passed to queueMicrotask remain valid
// throughout the entire dispatch, preventing crashes when microtasks run.
var ls: js.Local.Scope = undefined;
page.js.localScope(&ls);
defer {
if (was_handled) {
ls.local.runMicrotasks();
}
ls.deinit();
}
const activation_state = ActivationState.create(event, target, page);
@@ -461,7 +466,7 @@ fn dispatchNode(self: *EventManager, target: *Node, event: *Event, comptime opts
.event_target = @intFromPtr(current_target),
.type_string = event._type_string,
})) |list| {
try self.dispatchPhase(list, current_target, event, &was_handled, comptime .init(true, opts));
try self.dispatchPhase(list, current_target, event, &was_handled, &ls.local, comptime .init(true, opts));
}
}
@@ -476,10 +481,6 @@ fn dispatchNode(self: *EventManager, target: *Node, event: *Event, comptime opts
was_handled = true;
event._current_target = target_et;
var ls: js.Local.Scope = undefined;
self.page.js.localScope(&ls);
defer ls.deinit();
try ls.toLocal(inline_handler).callWithThis(void, target_et, .{event});
if (event._stop_propagation) {
@@ -495,7 +496,7 @@ fn dispatchNode(self: *EventManager, target: *Node, event: *Event, comptime opts
.type_string = event._type_string,
.event_target = @intFromPtr(target_et),
})) |list| {
try self.dispatchPhase(list, target_et, event, &was_handled, comptime .init(null, opts));
try self.dispatchPhase(list, target_et, event, &was_handled, &ls.local, comptime .init(null, opts));
if (event._stop_propagation) {
return;
}
@@ -512,7 +513,7 @@ fn dispatchNode(self: *EventManager, target: *Node, event: *Event, comptime opts
.type_string = event._type_string,
.event_target = @intFromPtr(current_target),
})) |list| {
try self.dispatchPhase(list, current_target, event, &was_handled, comptime .init(false, opts));
try self.dispatchPhase(list, current_target, event, &was_handled, &ls.local, comptime .init(false, opts));
}
}
}
@@ -530,7 +531,7 @@ const DispatchPhaseOpts = struct {
}
};
fn dispatchPhase(self: *EventManager, list: *std.DoublyLinkedList, current_target: *EventTarget, event: *Event, was_handled: *bool, comptime opts: DispatchPhaseOpts) !void {
fn dispatchPhase(self: *EventManager, list: *std.DoublyLinkedList, current_target: *EventTarget, event: *Event, was_handled: *bool, local: *const js.Local, comptime opts: DispatchPhaseOpts) !void {
const page = self.page;
// Track dispatch depth for deferred removal
@@ -607,18 +608,14 @@ fn dispatchPhase(self: *EventManager, list: *std.DoublyLinkedList, current_targe
event._target = getAdjustedTarget(original_target, current_target);
}
var ls: js.Local.Scope = undefined;
page.js.localScope(&ls);
defer ls.deinit();
switch (listener.function) {
.value => |value| try ls.toLocal(value).callWithThis(void, current_target, .{event}),
.value => |value| try local.toLocal(value).callWithThis(void, current_target, .{event}),
.string => |string| {
const str = try page.call_arena.dupeZ(u8, string.str());
try ls.local.eval(str, null);
try local.eval(str, null);
},
.object => |obj_global| {
const obj = ls.toLocal(obj_global);
const obj = local.toLocal(obj_global);
if (try obj.getFunction("handleEvent")) |handleEvent| {
try handleEvent.callWithThis(void, obj, .{event});
}

View File

@@ -236,7 +236,7 @@ version: usize = 0,
// ScriptManager, so all scripts just count as 1 pending load.
_pending_loads: u32,
_parent_notified: if (IS_DEBUG) bool else void = if (IS_DEBUG) false else {},
_parent_notified: bool = false,
_type: enum { root, frame }, // only used for logs right now
_req_id: u32 = 0,
@@ -346,7 +346,10 @@ pub fn deinit(self: *Page) void {
session.browser.env.destroyContext(self.js);
self._script_manager.shutdown = true;
session.browser.http_client.abort();
if (self.parent == null) {
// only the root frame needs to abort this. It's more efficient this way
session.browser.http_client.abort();
}
self._script_manager.deinit();
if (comptime IS_DEBUG) {
@@ -460,8 +463,12 @@ pub fn navigate(self: *Page, request_url: [:0]const u8, opts: NavigateOpts) !voi
// It's important to force a reset during the following navigation.
self._parse_state = .complete;
// We do not processHTMLDoc here as we know we don't have any scripts
// This assumption may be false when CDP Page.addScriptToEvaluateOnNewDocument is implemented
{
const parse_arena = try self.getArena(.{ .debug = "about:blank parse" });
defer self.releaseArena(parse_arena);
var parser = Parser.init(parse_arena, self.document.asNode(), self);
parser.parse("<html><head></head><body></body></html>");
}
self.documentIsComplete();
session.notification.dispatch(.page_navigate, &.{
@@ -707,17 +714,18 @@ pub fn documentIsComplete(self: *Page) void {
log.err(.page, "document is complete", .{ .err = err, .type = self._type, .url = self.url });
};
if (IS_DEBUG) {
std.debug.assert(self._navigated_options != null);
if (self._navigated_options) |no| {
// _navigated_options will be null in special short-circuit cases, like
// "navigating" to about:blank, in which case this notification has
// already been sent
self._session.notification.dispatch(.page_navigated, &.{
.frame_id = self._frame_id,
.req_id = self._req_id,
.opts = no,
.url = self.url,
.timestamp = timestamp(.monotonic),
});
}
self._session.notification.dispatch(.page_navigated, &.{
.frame_id = self._frame_id,
.req_id = self._req_id,
.opts = self._navigated_options.?,
.url = self.url,
.timestamp = timestamp(.monotonic),
});
}
fn _documentIsComplete(self: *Page) !void {
@@ -750,11 +758,15 @@ fn _documentIsComplete(self: *Page) !void {
}
fn notifyParentLoadComplete(self: *Page) void {
if (comptime IS_DEBUG) {
std.debug.assert(self._parent_notified == false);
self._parent_notified = true;
if (self._parent_notified == true) {
if (comptime IS_DEBUG) {
std.debug.assert(false);
}
// shouldn't happen, don't want to crash a release build over it
return;
}
self._parent_notified = true;
if (self.parent) |p| {
p.iframeCompletedLoading(self.iframe.?);
}
@@ -796,7 +808,12 @@ fn pageDataCallback(transfer: *Http.Transfer, data: []const u8) !void {
} orelse .unknown;
if (comptime IS_DEBUG) {
log.debug(.page, "navigate first chunk", .{ .content_type = mime.content_type, .len = data.len, .type = self._type, .url = self.url });
log.debug(.page, "navigate first chunk", .{
.content_type = mime.content_type,
.len = data.len,
.type = self._type,
.url = self.url,
});
}
switch (mime.content_type) {
@@ -850,7 +867,11 @@ fn pageDoneCallback(ctx: *anyopaque) !void {
try self._session.navigation.commitNavigation(self);
defer if (comptime IS_DEBUG) {
log.debug(.page, "page.load.complete", .{ .url = self.url, .type = self._type });
log.debug(.page, "page load complete", .{
.url = self.url,
.type = self._type,
.state = std.meta.activeTag(self._parse_state),
});
};
const parse_arena = try self.getArena(.{ .debug = "Page.parse" });
@@ -962,29 +983,49 @@ pub fn iframeAddedCallback(self: *Page, iframe: *Element.Html.IFrame) !void {
}
iframe._executed = true;
const session = self._session;
const frame_id = session.nextFrameId();
// A frame can be re-navigated by setting the src.
const existing_window = iframe._content_window;
const page_frame = try self.arena.create(Page);
const frame_id = blk: {
if (existing_window) |w| {
const existing_frame_id = w._page._frame_id;
session.browser.http_client.abortFrame(existing_frame_id);
break :blk existing_frame_id;
}
break :blk session.nextFrameId();
};
try Page.init(page_frame, frame_id, session, self);
errdefer page_frame.deinit();
self._pending_loads += 1;
page_frame.iframe = iframe;
iframe._content_window = page_frame.window;
errdefer iframe._content_window = null;
self._session.notification.dispatch(.page_frame_created, &.{
.frame_id = frame_id,
.parent_id = self._frame_id,
.timestamp = timestamp(.monotonic),
});
const url = blk: {
if (std.mem.eql(u8, src, "about:blank")) {
break :blk "about:blank"; // navigate will handle this special case
}
break :blk try URL.resolve(
self.call_arena, // ok to use, page.navigate dupes this
self.base(),
src,
.{ .encode = true },
);
};
// navigate will dupe the url
const url = try URL.resolve(
self.call_arena,
self.base(),
src,
.{ .encode = true },
);
if (existing_window == null) {
// on first load, dispatch frame_created evnet
self._session.notification.dispatch(.page_frame_created, &.{
.frame_id = frame_id,
.parent_id = self._frame_id,
.timestamp = timestamp(.monotonic),
});
}
page_frame.navigate(url, .{ .reason = .initialFrameNavigation }) catch |err| {
log.warn(.page, "iframe navigate failure", .{ .url = url, .err = err });
@@ -994,6 +1035,25 @@ pub fn iframeAddedCallback(self: *Page, iframe: *Element.Html.IFrame) !void {
return error.IFrameLoadError;
};
if (existing_window) |w| {
const existing_page = w._page;
if (existing_page._parent_notified == false) {
self._pending_loads -= 1;
}
for (self.frames.items, 0..) |p, i| {
if (p == existing_page) {
self.frames.items[i] = page_frame;
break;
}
} else {
lp.assert(false, "Existing frame not found", .{ .len = self.frames.items.len });
}
existing_page.deinit();
return;
}
// window[N] is based on document order. For now we'll just append the frame
// at the end of our list and set frames_sorted == false. window.getFrame
// will check this flag to decide if it needs to sort the frames or not.
@@ -1825,7 +1885,7 @@ pub fn createElementNS(self: *Page, namespace: Element.Namespace, name: []const
Element.Html.Track,
namespace,
attribute_iterator,
.{ ._proto = undefined },
.{ ._proto = undefined, ._kind = comptime .wrap("subtitles"), ._ready_state = .none },
),
else => {},
},
@@ -1942,10 +2002,10 @@ pub fn createElementNS(self: *Page, namespace: Element.Namespace, name: []const
.{ ._proto = undefined, ._tag_name = String.init(undefined, "article", .{}) catch unreachable, ._tag = .article },
),
asUint("details") => return self.createHtmlElementT(
Element.Html.Generic,
Element.Html.Details,
namespace,
attribute_iterator,
.{ ._proto = undefined, ._tag_name = String.init(undefined, "details", .{}) catch unreachable, ._tag = .details },
.{ ._proto = undefined },
),
asUint("summary") => return self.createHtmlElementT(
Element.Html.Generic,
@@ -2469,7 +2529,7 @@ pub fn insertNodeRelative(self: *Page, parent: *Node, child: *Node, relative: In
pub fn _insertNodeRelative(self: *Page, comptime from_parser: bool, parent: *Node, child: *Node, relative: InsertNodeRelative, opts: InsertNodeOpts) !void {
// caller should have made sure this was the case
lp.assert(child._parent == null, "Page.insertNodeRelative parent", .{ .url = self.url });
lp.assert(child._parent == null, "Page.insertNodeRelative parent", .{});
const children = blk: {
// expand parent._children so that it can take another child

View File

@@ -634,6 +634,8 @@ pub const Script = struct {
debug_transfer_notified_fail: bool = false,
debug_transfer_redirecting: bool = false,
debug_transfer_intercept_state: u8 = 0,
debug_transfer_auth_challenge: bool = false,
debug_transfer_easy_id: usize = 0,
const Kind = enum {
module,
@@ -711,6 +713,8 @@ pub const Script = struct {
.a5 = self.debug_transfer_notified_fail,
.a6 = self.debug_transfer_redirecting,
.a7 = self.debug_transfer_intercept_state,
.a8 = self.debug_transfer_auth_challenge,
.a9 = self.debug_transfer_easy_id,
.b1 = transfer.id,
.b2 = transfer._tries,
.b3 = transfer.aborted,
@@ -718,6 +722,8 @@ pub const Script = struct {
.b5 = transfer._notified_fail,
.b6 = transfer._redirecting,
.b7 = @intFromEnum(transfer._intercept_state),
.b8 = transfer._auth_challenge != null,
.b9 = if (transfer._conn) |c| @intFromPtr(c.easy) else 0,
});
self.header_callback_called = true;
self.debug_transfer_id = transfer.id;
@@ -727,6 +733,8 @@ pub const Script = struct {
self.debug_transfer_notified_fail = transfer._notified_fail;
self.debug_transfer_redirecting = transfer._redirecting;
self.debug_transfer_intercept_state = @intFromEnum(transfer._intercept_state);
self.debug_transfer_auth_challenge = transfer._auth_challenge != null;
self.debug_transfer_easy_id = if (transfer._conn) |c| @intFromPtr(c.easy) else 0;
}
lp.assert(self.source.remote.capacity == 0, "ScriptManager.Header buffer", .{ .capacity = self.source.remote.capacity });

View File

@@ -261,7 +261,7 @@ fn _wait(self: *Session, page: *Page, wait_ms: u32) !WaitResult {
std.debug.assert(http_client.intercepted == 0);
}
const ms: u64 = ms_to_next_task orelse blk: {
var ms: u64 = ms_to_next_task orelse blk: {
if (wait_ms - ms_remaining < 100) {
if (comptime builtin.is_test) {
return .done;
@@ -288,7 +288,13 @@ fn _wait(self: *Session, page: *Page, wait_ms: u32) !WaitResult {
// Same as above, except we have a scheduled task,
// it just happens to be too far into the future
// compared to how long we were told to wait.
return .done;
if (!browser.hasBackgroundTasks()) {
return .done;
}
// _we_ have nothing to run, but v8 is working on
// background tasks. We'll wait for them.
browser.waitForBackgroundTasks();
ms = 20;
}
// We have a task to run in the not-so-distant future.

View File

@@ -961,6 +961,10 @@ test "URL: ensureEncoded" {
.url = "https://example.com/path?value=100% done",
.expected = "https://example.com/path?value=100%25%20done",
},
.{
.url = "about:blank",
.expected = "about:blank",
},
};
for (cases) |case| {

284
src/browser/WebBotAuth.zig Normal file
View File

@@ -0,0 +1,284 @@
// Copyright (C) 2023-2026 Lightpanda (Selecy SAS)
//
// Francis Bouvier <francis@lightpanda.io>
// Pierre Tachoire <pierre@lightpanda.io>
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Affero General Public License as
// published by the Free Software Foundation, either version 3 of the
// License, or (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Affero General Public License for more details.
//
// You should have received a copy of the GNU Affero General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
const std = @import("std");
const crypto = @import("../crypto.zig");
const Http = @import("../http/Http.zig");
const WebBotAuth = @This();
pkey: *crypto.EVP_PKEY,
keyid: []const u8,
directory_url: [:0]const u8,
pub const Config = struct {
key_file: []const u8,
keyid: []const u8,
domain: []const u8,
};
fn parsePemPrivateKey(pem: []const u8) !*crypto.EVP_PKEY {
const begin = "-----BEGIN PRIVATE KEY-----";
const end = "-----END PRIVATE KEY-----";
const start_idx = std.mem.indexOf(u8, pem, begin) orelse return error.InvalidPem;
const end_idx = std.mem.indexOf(u8, pem, end) orelse return error.InvalidPem;
const b64 = std.mem.trim(u8, pem[start_idx + begin.len .. end_idx], &std.ascii.whitespace);
// decode base64 into 48-byte DER buffer
var der: [48]u8 = undefined;
try std.base64.standard.Decoder.decode(der[0..48], b64);
// Ed25519 PKCS#8 structure always places the 32-byte raw private key at offset 16.
const key_bytes = der[16..48];
const pkey = crypto.EVP_PKEY_new_raw_private_key(crypto.EVP_PKEY_ED25519, null, key_bytes.ptr, 32);
return pkey orelse error.InvalidKey;
}
fn signEd25519(pkey: *crypto.EVP_PKEY, message: []const u8, out: *[64]u8) !void {
const ctx = crypto.EVP_MD_CTX_new() orelse return error.OutOfMemory;
defer crypto.EVP_MD_CTX_free(ctx);
if (crypto.EVP_DigestSignInit(ctx, null, null, null, pkey) != 1)
return error.SignInit;
var sig_len: usize = 64;
if (crypto.EVP_DigestSign(ctx, out.ptr, &sig_len, message.ptr, message.len) != 1)
return error.SignFailed;
}
pub fn fromConfig(allocator: std.mem.Allocator, config: *const Config) !WebBotAuth {
const pem = try std.fs.cwd().readFileAlloc(allocator, config.key_file, 1024 * 4);
defer allocator.free(pem);
const pkey = try parsePemPrivateKey(pem);
errdefer crypto.EVP_PKEY_free(pkey);
const directory_url = try std.fmt.allocPrintSentinel(
allocator,
"https://{s}/.well-known/http-message-signatures-directory",
.{config.domain},
0,
);
errdefer allocator.free(directory_url);
return .{
.pkey = pkey,
// Owned by the Config so it's okay.
.keyid = config.keyid,
.directory_url = directory_url,
};
}
pub fn signRequest(
self: *const WebBotAuth,
allocator: std.mem.Allocator,
headers: *Http.Headers,
authority: []const u8,
) !void {
const now = std.time.timestamp();
const expires = now + 60;
// build the signature-input value (without the sig1= label)
const sig_input_value = try std.fmt.allocPrint(
allocator,
"(\"@authority\" \"signature-agent\");created={d};expires={d};keyid=\"{s}\";alg=\"ed25519\";tag=\"web-bot-auth\"",
.{ now, expires, self.keyid },
);
defer allocator.free(sig_input_value);
// build the canonical string to sign
const canonical = try std.fmt.allocPrint(
allocator,
"\"@authority\": {s}\n\"signature-agent\": \"{s}\"\n\"@signature-params\": {s}",
.{ authority, self.directory_url, sig_input_value },
);
defer allocator.free(canonical);
// sign it
var sig: [64]u8 = undefined;
try signEd25519(self.pkey, canonical, &sig);
// base64 encode
const encoded_len = std.base64.standard.Encoder.calcSize(sig.len);
const encoded = try allocator.alloc(u8, encoded_len);
defer allocator.free(encoded);
_ = std.base64.standard.Encoder.encode(encoded, &sig);
// build the 3 headers and add them
const sig_agent = try std.fmt.allocPrintSentinel(
allocator,
"Signature-Agent: \"{s}\"",
.{self.directory_url},
0,
);
defer allocator.free(sig_agent);
const sig_input = try std.fmt.allocPrintSentinel(
allocator,
"Signature-Input: sig1={s}",
.{sig_input_value},
0,
);
defer allocator.free(sig_input);
const signature = try std.fmt.allocPrintSentinel(
allocator,
"Signature: sig1=:{s}:",
.{encoded},
0,
);
defer allocator.free(signature);
try headers.add(sig_agent);
try headers.add(sig_input);
try headers.add(signature);
}
pub fn deinit(self: WebBotAuth, allocator: std.mem.Allocator) void {
crypto.EVP_PKEY_free(self.pkey);
allocator.free(self.directory_url);
}
test "parsePemPrivateKey: valid Ed25519 PKCS#8 PEM" {
const pem =
\\-----BEGIN PRIVATE KEY-----
\\MC4CAQAwBQYDK2VwBCIEIBuCRBIEFNtXcMBsyOOkFBFTJcEWTkbgSwKExhOjKFHT
\\-----END PRIVATE KEY-----
\\
;
const pkey = try parsePemPrivateKey(pem);
defer crypto.EVP_PKEY_free(pkey);
}
test "parsePemPrivateKey: missing BEGIN marker returns error" {
const bad_pem = "-----END PRIVATE KEY-----\n";
try std.testing.expectError(error.InvalidPem, parsePemPrivateKey(bad_pem));
}
test "parsePemPrivateKey: missing END marker returns error" {
const bad_pem = "-----BEGIN PRIVATE KEY-----\nMC4CAQA=\n";
try std.testing.expectError(error.InvalidPem, parsePemPrivateKey(bad_pem));
}
test "signEd25519: signature length is always 64 bytes" {
const pem =
\\-----BEGIN PRIVATE KEY-----
\\MC4CAQAwBQYDK2VwBCIEIBuCRBIEFNtXcMBsyOOkFBFTJcEWTkbgSwKExhOjKFHT
\\-----END PRIVATE KEY-----
\\
;
const pkey = try parsePemPrivateKey(pem);
defer crypto.EVP_PKEY_free(pkey);
var sig: [64]u8 = @splat(0);
try signEd25519(pkey, "hello world", &sig);
var all_zero = true;
for (sig) |b| if (b != 0) {
all_zero = false;
break;
};
try std.testing.expect(!all_zero);
}
test "signEd25519: same key + message produces same signature (deterministic)" {
const pem =
\\-----BEGIN PRIVATE KEY-----
\\MC4CAQAwBQYDK2VwBCIEIBuCRBIEFNtXcMBsyOOkFBFTJcEWTkbgSwKExhOjKFHT
\\-----END PRIVATE KEY-----
\\
;
const pkey = try parsePemPrivateKey(pem);
defer crypto.EVP_PKEY_free(pkey);
var sig1: [64]u8 = undefined;
var sig2: [64]u8 = undefined;
try signEd25519(pkey, "deterministic test", &sig1);
try signEd25519(pkey, "deterministic test", &sig2);
try std.testing.expectEqualSlices(u8, &sig1, &sig2);
}
test "signEd25519: same key + diff message produces different signature (deterministic)" {
const pem =
\\-----BEGIN PRIVATE KEY-----
\\MC4CAQAwBQYDK2VwBCIEIBuCRBIEFNtXcMBsyOOkFBFTJcEWTkbgSwKExhOjKFHT
\\-----END PRIVATE KEY-----
\\
;
const pkey = try parsePemPrivateKey(pem);
defer crypto.EVP_PKEY_free(pkey);
var sig1: [64]u8 = undefined;
var sig2: [64]u8 = undefined;
try signEd25519(pkey, "msg 1", &sig1);
try signEd25519(pkey, "msg 2", &sig2);
try std.testing.expect(!std.mem.eql(u8, &sig1, &sig2));
}
test "signRequest: adds headers with correct names" {
const allocator = std.testing.allocator;
const pem =
\\-----BEGIN PRIVATE KEY-----
\\MC4CAQAwBQYDK2VwBCIEIBuCRBIEFNtXcMBsyOOkFBFTJcEWTkbgSwKExhOjKFHT
\\-----END PRIVATE KEY-----
\\
;
const pkey = try parsePemPrivateKey(pem);
const directory_url = try allocator.dupeZ(
u8,
"https://example.com/.well-known/http-message-signatures-directory",
);
var auth = WebBotAuth{
.pkey = pkey,
.keyid = "test-key-id",
.directory_url = directory_url,
};
defer auth.deinit(allocator);
var headers = try Http.Headers.init("User-Agent: Test-Agent");
defer headers.deinit();
try auth.signRequest(allocator, &headers, "example.com");
var it = headers.iterator();
var found_sig_agent = false;
var found_sig_input = false;
var found_signature = false;
var count: usize = 0;
while (it.next()) |h| {
count += 1;
if (std.ascii.eqlIgnoreCase(h.name, "Signature-Agent")) found_sig_agent = true;
if (std.ascii.eqlIgnoreCase(h.name, "Signature-Input")) found_sig_input = true;
if (std.ascii.eqlIgnoreCase(h.name, "Signature")) found_signature = true;
}
try std.testing.expect(count >= 3);
try std.testing.expect(found_sig_agent);
try std.testing.expect(found_sig_input);
try std.testing.expect(found_signature);
}

View File

@@ -480,10 +480,11 @@ fn consumeName(self: *Tokenizer) []const u8 {
self.consumeEscape();
},
0x0 => self.advance(1),
'\x80'...'\xBF', '\xC0'...'\xEF', '\xF0'...'\xFF' => {
// This byte *is* part of a multi-byte code point,
// well end up copying the whole code point before this loop does something else.
self.advance(1);
'\x80'...'\xFF' => {
// Non-ASCII: advance over the complete UTF-8 code point in one step.
// Using consumeChar() instead of advance(1) ensures we never land on
// a continuation byte, which advance() asserts against.
self.consumeChar();
},
else => {
if (self.hasNonAsciiAt(0)) {

View File

@@ -60,6 +60,11 @@ fn initWithContext(self: *Caller, ctx: *Context, v8_context: *const v8.Context)
ctx.local = &self.local;
}
pub fn initFromHandle(self: *Caller, handle: ?*const v8.FunctionCallbackInfo) void {
const isolate = v8.v8__FunctionCallbackInfo__GetIsolate(handle).?;
self.init(isolate);
}
pub fn deinit(self: *Caller) void {
const ctx = self.local.ctx;
const call_depth = ctx.call_depth - 1;
@@ -441,6 +446,11 @@ pub const FunctionCallbackInfo = struct {
return .{ .local = local, .handle = v8.v8__FunctionCallbackInfo__INDEX(self.handle, @intCast(index)).? };
}
pub fn getData(self: FunctionCallbackInfo) ?*anyopaque {
const data = v8.v8__FunctionCallbackInfo__Data(self.handle) orelse return null;
return v8.v8__External__Value(@ptrCast(data));
}
pub fn getThis(self: FunctionCallbackInfo) *const v8.Object {
return v8.v8__FunctionCallbackInfo__This(self.handle).?;
}
@@ -499,6 +509,7 @@ pub const Function = struct {
as_typed_array: bool = false,
null_as_undefined: bool = false,
cache: ?Caching = null,
embedded_receiver: bool = false,
// We support two ways to cache a value directly into a v8::Object. The
// difference between the two is like the difference between a Map
@@ -569,6 +580,9 @@ pub const Function = struct {
var args: ParameterTypes(F) = undefined;
if (comptime opts.static) {
args = try getArgs(F, 0, local, info);
} else if (comptime opts.embedded_receiver) {
args = try getArgs(F, 1, local, info);
@field(args, "0") = @ptrCast(@alignCast(info.getData() orelse unreachable));
} else {
args = try getArgs(F, 1, local, info);
@field(args, "0") = try TaggedOpaque.fromJS(*T, info.getThis());

View File

@@ -786,9 +786,16 @@ fn _dynamicModuleCallback(self: *Context, specifier: [:0]const u8, referrer: []c
entry.module_promise = try module_resolver.promise().persist();
} else {
// the module was loaded, but not evaluated, we _have_ to evaluate it now
if (status == .kUninstantiated) {
if (try mod.instantiate(resolveModuleCallback) == false) {
_ = resolver.reject("module instantiation", local.newString("Module instantiation failed"));
return promise;
}
}
const evaluated = mod.evaluate() catch {
if (comptime IS_DEBUG) {
std.debug.assert(status == .kErrored);
std.debug.assert(mod.getStatus() == .kErrored);
}
_ = resolver.reject("module evaluation", local.newString("Module evaluation failed"));
return promise;
@@ -868,13 +875,12 @@ fn resolveDynamicModule(self: *Context, state: *DynamicModuleResolveState, modul
const then_callback = newFunctionWithData(local, struct {
pub fn callback(callback_handle: ?*const v8.FunctionCallbackInfo) callconv(.c) void {
const isolate = v8.v8__FunctionCallbackInfo__GetIsolate(callback_handle).?;
var c: Caller = undefined;
c.init(isolate);
c.initFromHandle(callback_handle);
defer c.deinit();
const info_data = v8.v8__FunctionCallbackInfo__Data(callback_handle).?;
const s: *DynamicModuleResolveState = @ptrCast(@alignCast(v8.v8__External__Value(@ptrCast(info_data))));
const info = Caller.FunctionCallbackInfo{ .handle = callback_handle.? };
const s: *DynamicModuleResolveState = @ptrCast(@alignCast(info.getData() orelse return));
if (s.context_id != c.local.ctx.id) {
// The microtask is tied to the isolate, not the context
@@ -893,17 +899,15 @@ fn resolveDynamicModule(self: *Context, state: *DynamicModuleResolveState, modul
const catch_callback = newFunctionWithData(local, struct {
pub fn callback(callback_handle: ?*const v8.FunctionCallbackInfo) callconv(.c) void {
const isolate = v8.v8__FunctionCallbackInfo__GetIsolate(callback_handle).?;
var c: Caller = undefined;
c.init(isolate);
c.initFromHandle(callback_handle);
defer c.deinit();
const info_data = v8.v8__FunctionCallbackInfo__Data(callback_handle).?;
const s: *DynamicModuleResolveState = @ptrCast(@alignCast(v8.v8__External__Value(@ptrCast(info_data))));
const info = Caller.FunctionCallbackInfo{ .handle = callback_handle.? };
const s: *DynamicModuleResolveState = @ptrCast(@alignCast(info.getData() orelse return));
const l = &c.local;
const ctx = l.ctx;
if (s.context_id != ctx.id) {
if (s.context_id != l.ctx.id) {
return;
}
@@ -1007,6 +1011,13 @@ fn enqueueMicrotask(self: *Context, callback: anytype) void {
}.run, self);
}
// There's an assumption here: the js.Function will be alive when microtasks are
// run. If we're Env.runMicrotasks in all the places that we're supposed to, then
// this should be safe (I think). In whatever HandleScope a microtask is enqueued,
// PerformCheckpoint should be run. So the v8::Local<v8::Function> should remain
// valid. If we have problems with this, a simple solution is to provide a Zig
// wrapper for these callbacks which references a js.Function.Temp, on callback
// it executes the function and then releases the global.
pub fn queueMicrotaskFunc(self: *Context, cb: js.Function) void {
// Use context-specific microtask queue instead of isolate queue
v8.v8__MicrotaskQueue__EnqueueMicrotaskFunc(self.microtask_queue, self.isolate.handle, cb.handle);

View File

@@ -470,6 +470,10 @@ pub fn dumpMemoryStats(self: *Env) void {
, .{ stats.total_heap_size, stats.total_heap_size_executable, stats.total_physical_size, stats.total_available_size, stats.used_heap_size, stats.heap_size_limit, stats.malloced_memory, stats.external_memory, stats.peak_malloced_memory, stats.number_of_native_contexts, stats.number_of_detached_contexts, stats.total_global_handles_size, stats.used_global_handles_size, stats.does_zap_garbage });
}
pub fn terminate(self: *const Env) void {
v8.v8__Isolate__TerminateExecution(self.isolate.handle);
}
fn promiseRejectCallback(message_handle: v8.PromiseRejectMessage) callconv(.c) void {
const promise_handle = v8.v8__PromiseRejectMessage__GetPromise(&message_handle).?;
const v8_isolate = v8.v8__Object__GetIsolate(@ptrCast(promise_handle)).?;

View File

@@ -82,6 +82,20 @@ pub fn createTypedArray(self: *const Local, comptime array_type: js.ArrayType, s
return .init(self, size);
}
pub fn newCallback(
self: *const Local,
callback: anytype,
data: anytype,
) js.Function {
const external = self.isolate.createExternal(data);
const handle = v8.v8__Function__New__DEFAULT2(self.handle, struct {
fn wrap(info_handle: ?*const js.v8.FunctionCallbackInfo) callconv(.c) void {
Caller.Function.call(@TypeOf(data), info_handle.?, callback, .{ .embedded_receiver = true });
}
}.wrap, @ptrCast(external)).?;
return .{ .local = self, .handle = handle };
}
pub fn runMacrotasks(self: *const Local) void {
const env = self.ctx.env;
env.pumpMessageLoop();

View File

@@ -767,6 +767,7 @@ pub const JsApis = flattenTypes(&.{
@import("../webapi/element/html/Custom.zig"),
@import("../webapi/element/html/Data.zig"),
@import("../webapi/element/html/DataList.zig"),
@import("../webapi/element/html/Details.zig"),
@import("../webapi/element/html/Dialog.zig"),
@import("../webapi/element/html/Directory.zig"),
@import("../webapi/element/html/DList.zig"),
@@ -826,6 +827,8 @@ pub const JsApis = flattenTypes(&.{
@import("../webapi/element/svg/Generic.zig"),
@import("../webapi/encoding/TextDecoder.zig"),
@import("../webapi/encoding/TextEncoder.zig"),
@import("../webapi/encoding/TextEncoderStream.zig"),
@import("../webapi/encoding/TextDecoderStream.zig"),
@import("../webapi/Event.zig"),
@import("../webapi/event/CompositionEvent.zig"),
@import("../webapi/event/CustomEvent.zig"),
@@ -862,6 +865,10 @@ pub const JsApis = flattenTypes(&.{
@import("../webapi/streams/ReadableStream.zig"),
@import("../webapi/streams/ReadableStreamDefaultReader.zig"),
@import("../webapi/streams/ReadableStreamDefaultController.zig"),
@import("../webapi/streams/WritableStream.zig"),
@import("../webapi/streams/WritableStreamDefaultWriter.zig"),
@import("../webapi/streams/WritableStreamDefaultController.zig"),
@import("../webapi/streams/TransformStream.zig"),
@import("../webapi/Node.zig"),
@import("../webapi/storage/storage.zig"),
@import("../webapi/URL.zig"),

View File

@@ -19,6 +19,8 @@
const std = @import("std");
const Page = @import("Page.zig");
const URL = @import("URL.zig");
const TreeWalker = @import("webapi/TreeWalker.zig");
const CData = @import("webapi/CData.zig");
const Element = @import("webapi/Element.zig");
const Node = @import("webapi/Node.zig");
@@ -103,20 +105,37 @@ fn isVisibleElement(el: *Element) bool {
};
}
fn getAnchorLabel(el: *Element) ?[]const u8 {
return el.getAttributeSafe(comptime .wrap("aria-label")) orelse el.getAttributeSafe(comptime .wrap("title"));
}
fn isAllWhitespace(text: []const u8) bool {
return for (text) |c| {
if (!std.ascii.isWhitespace(c)) break false;
} else true;
}
fn hasBlockDescendant(node: *Node) bool {
var it = node.childrenIterator();
return while (it.next()) |child| {
if (child.is(Element)) |el| {
if (isBlock(el.getTag())) break true;
if (hasBlockDescendant(child)) break true;
fn hasBlockDescendant(root: *Node) bool {
var tw = TreeWalker.FullExcludeSelf.Elements.init(root, .{});
while (tw.next()) |el| {
if (isBlock(el.getTag())) return true;
}
return false;
}
fn hasVisibleContent(root: *Node) bool {
var tw = TreeWalker.FullExcludeSelf.init(root, .{});
while (tw.next()) |node| {
if (isSignificantText(node)) return true;
if (node.is(Element)) |el| {
if (!isVisibleElement(el)) {
tw.skipChildren();
} else if (el.getTag() == .img) {
return true;
}
}
} else false;
}
return false;
}
fn ensureNewline(state: *State, writer: *std.Io.Writer) !void {
@@ -278,20 +297,29 @@ fn renderElement(el: *Element, state: *State, writer: *std.Io.Writer, page: *Pag
}
try writer.writeAll("](");
if (el.getAttributeSafe(comptime .wrap("src"))) |src| {
try writer.writeAll(src);
const absolute_src = URL.resolve(page.call_arena, page.base(), src, .{ .encode = true }) catch src;
try writer.writeAll(absolute_src);
}
try writer.writeAll(")");
state.last_char_was_newline = false;
return;
},
.anchor => {
const has_content = hasVisibleContent(el.asNode());
const label = getAnchorLabel(el);
const href_raw = el.getAttributeSafe(comptime .wrap("href"));
if (!has_content and label == null and href_raw == null) return;
const has_block = hasBlockDescendant(el.asNode());
const href = if (href_raw) |h| URL.resolve(page.call_arena, page.base(), h, .{ .encode = true }) catch h else null;
if (has_block) {
try renderChildren(el.asNode(), state, writer, page);
if (el.getAttributeSafe(comptime .wrap("href"))) |href| {
if (href) |h| {
if (!state.last_char_was_newline) try writer.writeByte('\n');
try writer.writeAll("([Link](");
try writer.writeAll(href);
try writer.writeAll("([](");
try writer.writeAll(h);
try writer.writeAll("))\n");
state.last_char_was_newline = true;
}
@@ -301,10 +329,14 @@ fn renderElement(el: *Element, state: *State, writer: *std.Io.Writer, page: *Pag
if (isStandaloneAnchor(el)) {
if (!state.last_char_was_newline) try writer.writeByte('\n');
try writer.writeByte('[');
try renderChildren(el.asNode(), state, writer, page);
if (has_content) {
try renderChildren(el.asNode(), state, writer, page);
} else {
try writer.writeAll(label orelse "");
}
try writer.writeAll("](");
if (el.getAttributeSafe(comptime .wrap("href"))) |href| {
try writer.writeAll(href);
if (href) |h| {
try writer.writeAll(h);
}
try writer.writeAll(")\n");
state.last_char_was_newline = true;
@@ -312,10 +344,14 @@ fn renderElement(el: *Element, state: *State, writer: *std.Io.Writer, page: *Pag
}
try writer.writeByte('[');
try renderChildren(el.asNode(), state, writer, page);
if (has_content) {
try renderChildren(el.asNode(), state, writer, page);
} else {
try writer.writeAll(label orelse "");
}
try writer.writeAll("](");
if (el.getAttributeSafe(comptime .wrap("href"))) |href| {
try writer.writeAll(href);
if (href) |h| {
try writer.writeAll(h);
}
try writer.writeByte(')');
state.last_char_was_newline = false;
@@ -452,6 +488,8 @@ fn testMarkdownHTML(html: []const u8, expected: []const u8) !void {
const testing = @import("../testing.zig");
const page = try testing.test_session.createPage();
defer testing.test_session.removePage();
page.url = "http://localhost/";
const doc = page.window._document;
const div = try doc.createElement("div", null, page);
@@ -520,11 +558,11 @@ test "browser.markdown: blockquote" {
}
test "browser.markdown: links" {
try testMarkdownHTML("<a href=\"https://lightpanda.io\">Lightpanda</a>", "[Lightpanda](https://lightpanda.io)\n");
try testMarkdownHTML("<a href=\"/relative\">Link</a>", "[Link](http://localhost/relative)\n");
}
test "browser.markdown: images" {
try testMarkdownHTML("<img src=\"logo.png\" alt=\"Logo\">", "![Logo](logo.png)\n");
try testMarkdownHTML("<img src=\"logo.png\" alt=\"Logo\">", "![Logo](http://localhost/logo.png)\n");
}
test "browser.markdown: headings" {
@@ -565,7 +603,7 @@ test "browser.markdown: block link" {
\\### Title
\\
\\Description
\\([Link](https://example.com))
\\([](https://example.com))
\\
);
}
@@ -588,8 +626,8 @@ test "browser.markdown: standalone anchors" {
\\ <a href="2">Link 2</a>
\\</main>
,
\\[Link 1](1)
\\[Link 2](2)
\\[Link 1](http://localhost/1)
\\[Link 2](http://localhost/2)
\\
);
}
@@ -601,7 +639,58 @@ test "browser.markdown: mixed anchors in main" {
\\ Welcome <a href="1">Link 1</a>.
\\</main>
,
\\Welcome [Link 1](1).
\\Welcome [Link 1](http://localhost/1).
\\
);
}
test "browser.markdown: skip empty links" {
try testMarkdownHTML(
\\<a href="/"></a>
\\<a href="/"><svg></svg></a>
,
\\[](http://localhost/)
\\[](http://localhost/)
\\
);
}
test "browser.markdown: resolve links" {
const testing = @import("../testing.zig");
const page = try testing.test_session.createPage();
defer testing.test_session.removePage();
page.url = "https://example.com/a/index.html";
const doc = page.window._document;
const div = try doc.createElement("div", null, page);
try page.parseHtmlAsChildren(div.asNode(),
\\<a href="b">Link</a>
\\<img src="../c.png" alt="Img">
\\<a href="/my page">Space</a>
);
var aw: std.Io.Writer.Allocating = .init(testing.allocator);
defer aw.deinit();
try dump(div.asNode(), .{}, &aw.writer, page);
try testing.expectString(
\\[Link](https://example.com/a/b)
\\![Img](https://example.com/c.png)
\\[Space](https://example.com/my%20page)
\\
, aw.written());
}
test "browser.markdown: anchor fallback label" {
try testMarkdownHTML(
\\<a href="/discord" aria-label="Discord Server"><svg></svg></a>
, "[Discord Server](http://localhost/discord)\n");
try testMarkdownHTML(
\\<a href="/search" title="Search Site"><svg></svg></a>
, "[Search Site](http://localhost/search)\n");
try testMarkdownHTML(
\\<a href="/no-label"><svg></svg></a>
, "[](http://localhost/no-label)\n");
}

View File

@@ -256,3 +256,22 @@
testing.expectTrue(!html.includes('opacity:0'));
}
</script>
<script id="CSSStyleDeclaration_non_ascii_custom_property">
{
// Regression test: accessing element.style must not crash when the inline
// style attribute contains CSS custom properties with non-ASCII (UTF-8
// multibyte) names, such as French accented characters.
// The CSS Tokenizer's consumeName() must advance over whole UTF-8 sequences
// rather than byte-by-byte to avoid landing on a continuation byte.
const div = document.createElement('div');
div.setAttribute('style',
'--color-store-bulles-\u00e9t\u00e9-fg: #6a818f;' +
'--color-store-soir\u00e9es-odl-fg: #56b3b3;' +
'color: red;'
);
// Must not crash, and ASCII properties that follow non-ASCII ones must be readable.
testing.expectEqual('red', div.style.getPropertyValue('color'));
}
</script>

View File

@@ -53,3 +53,22 @@
testing.expectEqual('NO-CONSTRUCTOR-ELEMENT', el.tagName);
}
</script>
<div id=clone_container></div>
<script id=clone>
{
let calls = 0;
class MyCloneElementA extends HTMLElement {
constructor() {
super();
calls += 1;
$('#clone_container').appendChild(this);
}
}
customElements.define('my-clone_element_a', MyCloneElementA);
const original = document.createElement('my-clone_element_a');
$('#clone_container').cloneNode(true);
testing.expectEqual(2, calls);
}
</script>

View File

@@ -111,3 +111,15 @@
const containerDataTest = document.querySelector('#container [data-test]');
testing.expectEqual('First', containerDataTest.innerText);
</script>
<link rel="preload" as="image" imagesrcset="url1.png 1x, url2.png 2x" id="preload-link">
<script id="commaInAttrValue">
// Commas inside quoted attribute values must not be treated as selector separators
const el = document.querySelector('link[rel="preload"][as="image"][imagesrcset="url1.png 1x, url2.png 2x"]');
testing.expectEqual('preload-link', el.id);
// Also test with single quotes inside selector
const el2 = document.querySelector("link[imagesrcset='url1.png 1x, url2.png 2x']");
testing.expectEqual('preload-link', el2.id);
</script>

View File

@@ -0,0 +1,63 @@
<!DOCTYPE html>
<script src="../../testing.js"></script>
<!-- Details elements -->
<details id="details1">
<summary>Summary</summary>
Content
</details>
<details id="details2" open>
<summary>Open Summary</summary>
Content
</details>
<script id="instanceof">
{
const details = document.createElement('details')
testing.expectTrue(details instanceof HTMLDetailsElement)
}
</script>
<script id="open_initial">
testing.expectEqual(false, $('#details1').open)
testing.expectEqual(true, $('#details2').open)
</script>
<script id="open_set">
{
$('#details1').open = true
testing.expectEqual(true, $('#details1').open)
$('#details2').open = false
testing.expectEqual(false, $('#details2').open)
}
</script>
<script id="open_reflects_attribute">
{
const details = document.createElement('details')
testing.expectEqual(null, details.getAttribute('open'))
details.open = true
testing.expectEqual('', details.getAttribute('open'))
details.open = false
testing.expectEqual(null, details.getAttribute('open'))
}
</script>
<script id="name_initial">
{
const details = document.createElement('details')
testing.expectEqual('', details.name)
}
</script>
<script id="name_set">
{
const details = document.createElement('details')
details.name = 'group1'
testing.expectEqual('group1', details.name)
testing.expectEqual('group1', details.getAttribute('name'))
}
</script>

View File

@@ -0,0 +1,75 @@
<!DOCTYPE html>
<script src="../../testing.js"></script>
<video id="video1">
<track id="track1" kind="subtitles">
<track id="track2" kind="captions">
<track id="track3" kind="invalid-kind">
</video>
<script id="instanceof">
{
const track = document.createElement("track");
testing.expectEqual(true, track instanceof HTMLTrackElement);
testing.expectEqual("[object HTMLTrackElement]", track.toString());
}
</script>
<script id="kind_default">
{
const track = document.createElement("track");
testing.expectEqual("subtitles", track.kind);
}
</script>
<script id="kind_valid_values">
{
const track = document.createElement("track");
track.kind = "captions";
testing.expectEqual("captions", track.kind);
track.kind = "descriptions";
testing.expectEqual("descriptions", track.kind);
track.kind = "chapters";
testing.expectEqual("chapters", track.kind);
track.kind = "metadata";
testing.expectEqual("metadata", track.kind);
}
</script>
<script id="kind_invalid">
{
const track = document.createElement("track");
track.kind = null;
testing.expectEqual("metadata", track.kind);
track.kind = "Subtitles";
testing.expectEqual("subtitles", track.kind);
track.kind = "";
testing.expectEqual("metadata", track.kind);
}
</script>
<script id="constants">
{
const track = document.createElement("track");
testing.expectEqual(0, track.NONE);
testing.expectEqual(1, track.LOADING);
testing.expectEqual(2, track.LOADED);
testing.expectEqual(3, track.ERROR);
}
</script>
<script id="constants_static">
{
testing.expectEqual(0, HTMLTrackElement.NONE);
testing.expectEqual(1, HTMLTrackElement.LOADING);
testing.expectEqual(2, HTMLTrackElement.LOADED);
testing.expectEqual(3, HTMLTrackElement.ERROR);
}
</script>

View File

@@ -3,7 +3,7 @@
<script>
function frame1Onload() {
window.f1_onload = true;
window.f1_onload = 'f1_onload_loaded';
}
</script>
@@ -11,6 +11,9 @@
<iframe id=f2 src="support/sub2.html"></iframe>
<script id="basic">
// reload it
$('#f2').src = 'support/sub2.html';
testing.eventually(() => {
testing.expectEqual(undefined, window[10]);
@@ -47,8 +50,11 @@
// child frame's top.parent is itself (root has no parent)
testing.expectEqual(window, window[0].top.parent);
// Todo: Context security tokens
// testing.expectEqual(true, window.sub1_loaded);
// testing.expectEqual(true, window.sub2_loaded);
// testing.expectEqual(1, window.sub1_count);
// testing.expectEqual(2, window.sub2_count);
});
</script>
@@ -63,14 +69,26 @@
document.documentElement.appendChild(f3);
testing.eventually(() => {
testing.expectEqual(true, window.f1_onload);
testing.expectEqual('f1_onload_loaded', window.f1_onload);
testing.expectEqual(true, f3_load_event);
});
}
</script>
<script id=onload>
{
let f4 = document.createElement('iframe');
f4.src = "about:blank";
document.documentElement.appendChild(f4);
testing.eventually(() => {
testing.expectEqual("<html><head></head><body></body></html>", f4.contentDocument.documentElement.outerHTML);
});
}
</script>
<script id=count>
testing.eventually(() => {
testing.expectEqual(3, window.length);
testing.expectEqual(4, window.length);
});
</script>

View File

@@ -3,4 +3,5 @@
<script>
// should not have access to the parent's JS context
window.top.sub1_loaded = window.testing == undefined;
window.top.sub1_count = (window.top.sub1_count || 0) + 1;
</script>

View File

@@ -4,4 +4,5 @@
<script>
// should not have access to the parent's JS context
window.top.sub2_loaded = window.testing == undefined;
window.top.sub2_count = (window.top.sub2_count || 0) + 1;
</script>

View File

@@ -301,3 +301,74 @@
testing.expectEqual(false, data3.done);
})();
</script>
<script id=enqueue_preserves_number>
(async function() {
const stream = new ReadableStream({
start(controller) {
controller.enqueue(42);
controller.enqueue(0);
controller.enqueue(3.14);
controller.close();
}
});
const reader = stream.getReader();
const r1 = await reader.read();
testing.expectEqual(false, r1.done);
testing.expectEqual('number', typeof r1.value);
testing.expectEqual(42, r1.value);
const r2 = await reader.read();
testing.expectEqual('number', typeof r2.value);
testing.expectEqual(0, r2.value);
const r3 = await reader.read();
testing.expectEqual('number', typeof r3.value);
testing.expectEqual(3.14, r3.value);
const r4 = await reader.read();
testing.expectEqual(true, r4.done);
})();
</script>
<script id=enqueue_preserves_bool>
(async function() {
const stream = new ReadableStream({
start(controller) {
controller.enqueue(true);
controller.enqueue(false);
controller.close();
}
});
const reader = stream.getReader();
const r1 = await reader.read();
testing.expectEqual('boolean', typeof r1.value);
testing.expectEqual(true, r1.value);
const r2 = await reader.read();
testing.expectEqual('boolean', typeof r2.value);
testing.expectEqual(false, r2.value);
})();
</script>
<script id=enqueue_preserves_object>
(async function() {
const stream = new ReadableStream({
start(controller) {
controller.enqueue({ key: 'value', num: 7 });
controller.close();
}
});
const reader = stream.getReader();
const r1 = await reader.read();
testing.expectEqual('object', typeof r1.value);
testing.expectEqual('value', r1.value.key);
testing.expectEqual(7, r1.value.num);
})();
</script>

View File

@@ -0,0 +1,82 @@
<!DOCTYPE html>
<script src="../testing.js"></script>
<script id=text_decoder_stream_encoding>
{
const tds = new TextDecoderStream();
testing.expectEqual('utf-8', tds.encoding);
testing.expectEqual('object', typeof tds.readable);
testing.expectEqual('object', typeof tds.writable);
testing.expectEqual(false, tds.fatal);
testing.expectEqual(false, tds.ignoreBOM);
}
</script>
<script id=text_decoder_stream_with_label>
{
const tds = new TextDecoderStream('utf-8');
testing.expectEqual('utf-8', tds.encoding);
}
</script>
<script id=text_decoder_stream_with_opts>
{
const tds = new TextDecoderStream('utf-8', { fatal: true, ignoreBOM: true });
testing.expectEqual(true, tds.fatal);
testing.expectEqual(true, tds.ignoreBOM);
}
</script>
<script id=text_decoder_stream_invalid_label>
{
let errorThrown = false;
try {
new TextDecoderStream('windows-1252');
} catch (e) {
errorThrown = true;
}
testing.expectEqual(true, errorThrown);
}
</script>
<script id=text_decoder_stream_decode>
(async function() {
const tds = new TextDecoderStream();
const writer = tds.writable.getWriter();
const reader = tds.readable.getReader();
// 'hello' in UTF-8 bytes
const bytes = new Uint8Array([104, 101, 108, 108, 111]);
await writer.write(bytes);
await writer.close();
const result = await reader.read();
testing.expectEqual(false, result.done);
testing.expectEqual('hello', result.value);
const result2 = await reader.read();
testing.expectEqual(true, result2.done);
})();
</script>
<script id=text_decoder_stream_empty_chunk>
(async function() {
const tds = new TextDecoderStream();
const writer = tds.writable.getWriter();
const reader = tds.readable.getReader();
// Write an empty chunk followed by real data
await writer.write(new Uint8Array([]));
await writer.write(new Uint8Array([104, 105]));
await writer.close();
// Empty chunk should be filtered out; first read gets "hi"
const result = await reader.read();
testing.expectEqual(false, result.done);
testing.expectEqual('hi', result.value);
const result2 = await reader.read();
testing.expectEqual(true, result2.done);
})();
</script>

View File

@@ -0,0 +1,164 @@
<!DOCTYPE html>
<script src="../testing.js"></script>
<script id=transform_stream_basic>
{
const ts = new TransformStream();
testing.expectEqual('object', typeof ts);
testing.expectEqual('object', typeof ts.readable);
testing.expectEqual('object', typeof ts.writable);
}
</script>
<script id=transform_stream_with_transformer>
(async function() {
const ts = new TransformStream({
transform(chunk, controller) {
controller.enqueue(chunk.toUpperCase());
}
});
const writer = ts.writable.getWriter();
const reader = ts.readable.getReader();
await writer.write('hello');
await writer.close();
const result = await reader.read();
testing.expectEqual(false, result.done);
testing.expectEqual('HELLO', result.value);
const result2 = await reader.read();
testing.expectEqual(true, result2.done);
})();
</script>
<script id=writable_stream_basic>
{
const ws = new WritableStream();
testing.expectEqual('object', typeof ws);
testing.expectEqual(false, ws.locked);
}
</script>
<script id=writable_stream_writer>
{
const ws = new WritableStream();
const writer = ws.getWriter();
testing.expectEqual('object', typeof writer);
testing.expectEqual(true, ws.locked);
}
</script>
<script id=writable_stream_writer_desired_size>
{
const ws = new WritableStream();
const writer = ws.getWriter();
testing.expectEqual(1, writer.desiredSize);
}
</script>
<script id=text_encoder_stream_encoding>
{
const tes = new TextEncoderStream();
testing.expectEqual('utf-8', tes.encoding);
testing.expectEqual('object', typeof tes.readable);
testing.expectEqual('object', typeof tes.writable);
}
</script>
<script id=text_encoder_stream_encode>
(async function() {
const tes = new TextEncoderStream();
const writer = tes.writable.getWriter();
const reader = tes.readable.getReader();
await writer.write('hi');
await writer.close();
const result = await reader.read();
testing.expectEqual(false, result.done);
testing.expectEqual(true, result.value instanceof Uint8Array);
// 'hi' in UTF-8 is [104, 105]
testing.expectEqual(104, result.value[0]);
testing.expectEqual(105, result.value[1]);
testing.expectEqual(2, result.value.length);
const result2 = await reader.read();
testing.expectEqual(true, result2.done);
})();
</script>
<script id=pipe_through_basic>
(async function() {
const input = new ReadableStream({
start(controller) {
controller.enqueue('hello');
controller.close();
}
});
const ts = new TransformStream({
transform(chunk, controller) {
controller.enqueue(chunk.toUpperCase());
}
});
const output = input.pipeThrough(ts);
const reader = output.getReader();
const result = await reader.read();
testing.expectEqual(false, result.done);
testing.expectEqual('HELLO', result.value);
const result2 = await reader.read();
testing.expectEqual(true, result2.done);
})();
</script>
<script id=pipe_to_basic>
(async function() {
const chunks = [];
const input = new ReadableStream({
start(controller) {
controller.enqueue('a');
controller.enqueue('b');
controller.close();
}
});
const ws = new WritableStream({
write(chunk) {
chunks.push(chunk);
}
});
await input.pipeTo(ws);
testing.expectEqual(2, chunks.length);
testing.expectEqual('a', chunks[0]);
testing.expectEqual('b', chunks[1]);
})();
</script>
<script id=pipe_through_text_decoder>
(async function() {
const bytes = new Uint8Array([104, 101, 108, 108, 111]);
const input = new ReadableStream({
start(controller) {
controller.enqueue(bytes);
controller.close();
}
});
const output = input.pipeThrough(new TextDecoderStream());
const reader = output.getReader();
const result = await reader.read();
testing.expectEqual(false, result.done);
testing.expectEqual('hello', result.value);
const result2 = await reader.read();
testing.expectEqual(true, result2.done);
})();
</script>

View File

@@ -209,6 +209,7 @@ pub fn getTagNameLower(self: *const Element) []const u8 {
.custom => |e| e._tag_name.str(),
.data => "data",
.datalist => "datalist",
.details => "details",
.dialog => "dialog",
.directory => "dir",
.div => "div",
@@ -287,6 +288,7 @@ pub fn getTagNameSpec(self: *const Element, buf: []u8) []const u8 {
.custom => |e| upperTagName(&e._tag_name, buf),
.data => "DATA",
.datalist => "DATALIST",
.details => "DETAILS",
.dialog => "DIALOG",
.directory => "DIR",
.div => "DIV",
@@ -1327,9 +1329,18 @@ pub fn clone(self: *Element, deep: bool, page: *Page) !*Node {
var child_it = self.asNode().childrenIterator();
while (child_it.next()) |child| {
const cloned_child = try child.cloneNode(true, page);
if (cloned_child._parent != null) {
// This is almost always false, the only case where a cloned
// node would already have a parent is with a custom element
// that has a constructor (which is called during cloning) which
// inserts it somewhere. In that case, whatever parent was set
// in the constructor should not be changed.
continue;
}
// We pass `true` to `child_already_connected` as a hacky optimization
// We _know_ this child isn't connected (Becasue the parent isn't connected)
// setting this to `true` skips all connection checks and just assumes t
// We _know_ this child isn't connected (Because the parent isn't connected)
// setting this to `true` skips all connection checks.
try page.appendNode(node, cloned_child, .{ .child_already_connected = true });
}
}
@@ -1385,6 +1396,7 @@ pub fn getTag(self: *const Element) Tag {
.custom => .custom,
.data => .data,
.datalist => .datalist,
.details => .details,
.dialog => .dialog,
.directory => .directory,
.iframe => .iframe,

View File

@@ -31,6 +31,7 @@ const Mode = enum {
pub fn TreeWalker(comptime mode: Mode) type {
return struct {
_current: ?*Node = null,
_next: ?*Node,
_root: *Node,
@@ -47,37 +48,46 @@ pub fn TreeWalker(comptime mode: Mode) type {
pub fn next(self: *Self) ?*Node {
const node = self._next orelse return null;
self._current = node;
if (comptime mode == .children) {
self._next = Node.linkToNodeOrNull(node._child_link.next);
self._next = node.nextSibling();
return node;
}
if (node._children) |children| {
self._next = children.first();
} else if (node._child_link.next) |n| {
self._next = Node.linkToNode(n);
if (node.firstChild()) |child| {
self._next = child;
} else {
// No children, no next sibling - walk up until we find a next sibling or hit root
var current = node._parent;
while (current) |parent| {
if (parent == self._root) {
self._next = null;
break;
var current: *Node = node;
while (current != self._root) {
if (current.nextSibling()) |sibling| {
self._next = sibling;
return node;
}
if (parent._child_link.next) |next_sibling| {
self._next = Node.linkToNode(next_sibling);
break;
}
current = parent._parent;
} else {
self._next = null;
current = current._parent orelse break;
}
self._next = null;
}
return node;
}
pub fn skipChildren(self: *Self) void {
if (comptime mode == .children) return;
const current_node = self._current orelse return;
var current: *Node = current_node;
while (current != self._root) {
if (current.nextSibling()) |sibling| {
self._next = sibling;
return;
}
current = current._parent orelse break;
}
self._next = null;
}
pub fn reset(self: *Self) void {
self._current = null;
self._next = firstNext(self._root);
}
@@ -147,3 +157,38 @@ pub fn TreeWalker(comptime mode: Mode) type {
};
};
}
test "TreeWalker: skipChildren" {
const testing = @import("../../testing.zig");
const page = try testing.test_session.createPage();
defer testing.test_session.removePage();
const doc = page.window._document;
// <div>
// <span>
// <b>A</b>
// </span>
// <p>B</p>
// </div>
const div = try doc.createElement("div", null, page);
const span = try doc.createElement("span", null, page);
const b = try doc.createElement("b", null, page);
const p = try doc.createElement("p", null, page);
_ = try span.asNode().appendChild(b.asNode(), page);
_ = try div.asNode().appendChild(span.asNode(), page);
_ = try div.asNode().appendChild(p.asNode(), page);
var tw = Full.init(div.asNode(), .{});
// root (div)
try testing.expect(tw.next() == div.asNode());
// span
try testing.expect(tw.next() == span.asNode());
// skip children of span (should jump over <b> to <p>)
tw.skipChildren();
try testing.expect(tw.next() == p.asNode());
try testing.expect(tw.next() == null);
}

View File

@@ -255,7 +255,7 @@ fn getDefaultDisplay(element: *const Element) []const u8 {
.html => |html| {
return switch (html._type) {
.anchor, .br, .span, .label, .time, .font, .mod, .quote => "inline",
.body, .div, .dl, .p, .heading, .form, .button, .canvas, .dialog, .embed, .head, .html, .hr, .iframe, .img, .input, .li, .link, .meta, .ol, .option, .script, .select, .slot, .style, .template, .textarea, .title, .ul, .media, .area, .base, .datalist, .directory, .fieldset, .legend, .map, .meter, .object, .optgroup, .output, .param, .picture, .pre, .progress, .source, .table, .table_caption, .table_cell, .table_col, .table_row, .table_section, .track => "block",
.body, .div, .dl, .p, .heading, .form, .button, .canvas, .details, .dialog, .embed, .head, .html, .hr, .iframe, .img, .input, .li, .link, .meta, .ol, .option, .script, .select, .slot, .style, .template, .textarea, .title, .ul, .media, .area, .base, .datalist, .directory, .fieldset, .legend, .map, .meter, .object, .optgroup, .output, .param, .picture, .pre, .progress, .source, .table, .table_caption, .table_cell, .table_col, .table_row, .table_section, .track => "block",
.generic, .custom, .unknown, .data => blk: {
const tag = element.getTagNameLower();
if (isInlineTag(tag)) break :blk "inline";

View File

@@ -39,6 +39,7 @@ pub const Canvas = @import("html/Canvas.zig");
pub const Custom = @import("html/Custom.zig");
pub const Data = @import("html/Data.zig");
pub const DataList = @import("html/DataList.zig");
pub const Details = @import("html/Details.zig");
pub const Dialog = @import("html/Dialog.zig");
pub const Directory = @import("html/Directory.zig");
pub const Div = @import("html/Div.zig");
@@ -119,6 +120,7 @@ pub const Type = union(enum) {
custom: *Custom,
data: *Data,
datalist: *DataList,
details: *Details,
dialog: *Dialog,
directory: *Directory,
div: *Div,

View File

@@ -0,0 +1,58 @@
const js = @import("../../../js/js.zig");
const Page = @import("../../../Page.zig");
const Node = @import("../../Node.zig");
const Element = @import("../../Element.zig");
const HtmlElement = @import("../Html.zig");
const Details = @This();
_proto: *HtmlElement,
pub fn asElement(self: *Details) *Element {
return self._proto._proto;
}
pub fn asConstElement(self: *const Details) *const Element {
return self._proto._proto;
}
pub fn asNode(self: *Details) *Node {
return self.asElement().asNode();
}
pub fn getOpen(self: *const Details) bool {
return self.asConstElement().getAttributeSafe(comptime .wrap("open")) != null;
}
pub fn setOpen(self: *Details, open: bool, page: *Page) !void {
if (open) {
try self.asElement().setAttributeSafe(comptime .wrap("open"), .wrap(""), page);
} else {
try self.asElement().removeAttribute(comptime .wrap("open"), page);
}
}
pub fn getName(self: *const Details) []const u8 {
return self.asConstElement().getAttributeSafe(comptime .wrap("name")) orelse "";
}
pub fn setName(self: *Details, value: []const u8, page: *Page) !void {
try self.asElement().setAttributeSafe(comptime .wrap("name"), .wrap(value), page);
}
pub const JsApi = struct {
pub const bridge = js.Bridge(Details);
pub const Meta = struct {
pub const name = "HTMLDetailsElement";
pub const prototype_chain = bridge.prototypeChain();
pub var class_id: bridge.ClassId = undefined;
};
pub const open = bridge.accessor(Details.getOpen, Details.setOpen, .{});
pub const name = bridge.accessor(Details.getName, Details.setName, .{});
};
const testing = @import("../../../../testing.zig");
test "WebApi: HTML.Details" {
try testing.htmlRunner("element/html/details.html", .{});
}

View File

@@ -58,6 +58,9 @@ pub fn setSrc(self: *IFrame, src: []const u8, page: *Page) !void {
try element.setAttributeSafe(comptime .wrap("src"), .wrap(src), page);
self._src = element.getAttributeSafe(comptime .wrap("src")) orelse unreachable;
if (element.asNode().isConnected()) {
// unlike script, an iframe is reloaded every time the src is set
// even if it's set to the same URL.
self._executed = false;
try page.iframeAddedCallback(self);
}
}

View File

@@ -1,4 +1,26 @@
// Copyright (C) 2023-2026 Lightpanda (Selecy SAS)
//
// Francis Bouvier <francis@lightpanda.io>
// Pierre Tachoire <pierre@lightpanda.io>
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Affero General Public License as
// published by the Free Software Foundation, either version 3 of the
// License, or (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Affero General Public License for more details.
//
// You should have received a copy of the GNU Affero General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
const std = @import("std");
const js = @import("../../../js/js.zig");
const String = @import("../../../../string.zig").String;
const Node = @import("../../Node.zig");
const Element = @import("../../Element.zig");
const HtmlElement = @import("../Html.zig");
@@ -6,6 +28,10 @@ const HtmlElement = @import("../Html.zig");
const Track = @This();
_proto: *HtmlElement,
_kind: String,
_ready_state: ReadyState,
const ReadyState = enum(u8) { none, loading, loaded, @"error" };
pub fn asElement(self: *Track) *Element {
return self._proto._proto;
@@ -14,6 +40,38 @@ pub fn asNode(self: *Track) *Node {
return self.asElement().asNode();
}
pub fn setKind(self: *Track, maybe_kind: ?String) void {
const kind = maybe_kind orelse {
self._kind = comptime .wrap("metadata");
return;
};
// Special case, for some reason, FF does this case-insensitive.
if (std.ascii.eqlIgnoreCase(kind.str(), "subtitles")) {
self._kind = comptime .wrap("subtitles");
return;
}
if (kind.eql(comptime .wrap("captions"))) {
self._kind = comptime .wrap("captions");
return;
}
if (kind.eql(comptime .wrap("descriptions"))) {
self._kind = comptime .wrap("descriptions");
return;
}
if (kind.eql(comptime .wrap("chapters"))) {
self._kind = comptime .wrap("chapters");
return;
}
// Anything else must be considered as `metadata`.
self._kind = comptime .wrap("metadata");
}
pub fn getKind(self: *const Track) String {
return self._kind;
}
pub const JsApi = struct {
pub const bridge = js.Bridge(Track);
@@ -22,4 +80,16 @@ pub const JsApi = struct {
pub const prototype_chain = bridge.prototypeChain();
pub var class_id: bridge.ClassId = undefined;
};
pub const kind = bridge.accessor(Track.getKind, Track.setKind, .{});
pub const NONE = bridge.property(@as(u16, @intFromEnum(ReadyState.none)), .{ .template = true });
pub const LOADING = bridge.property(@as(u16, @intFromEnum(ReadyState.loading)), .{ .template = true });
pub const LOADED = bridge.property(@as(u16, @intFromEnum(ReadyState.loaded)), .{ .template = true });
pub const ERROR = bridge.property(@as(u16, @intFromEnum(ReadyState.@"error")), .{ .template = true });
};
const testing = @import("../../../../testing.zig");
test "WebApi: HTML.Track" {
try testing.htmlRunner("element/html/track.html", .{});
}

View File

@@ -0,0 +1,137 @@
// Copyright (C) 2023-2026 Lightpanda (Selecy SAS)
//
// Francis Bouvier <francis@lightpanda.io>
// Pierre Tachoire <pierre@lightpanda.io>
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Affero General Public License as
// published by the Free Software Foundation, either version 3 of the
// License, or (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Affero General Public License for more details.
//
// You should have received a copy of the GNU Affero General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
const std = @import("std");
const js = @import("../../js/js.zig");
const Page = @import("../../Page.zig");
const ReadableStream = @import("../streams/ReadableStream.zig");
const WritableStream = @import("../streams/WritableStream.zig");
const TransformStream = @import("../streams/TransformStream.zig");
const TextDecoderStream = @This();
_transform: *TransformStream,
_fatal: bool,
_ignore_bom: bool,
const Label = enum {
utf8,
@"utf-8",
@"unicode-1-1-utf-8",
};
const InitOpts = struct {
fatal: bool = false,
ignoreBOM: bool = false,
};
pub fn init(label_: ?[]const u8, opts_: ?InitOpts, page: *Page) !TextDecoderStream {
if (label_) |label| {
_ = std.meta.stringToEnum(Label, label) orelse return error.RangeError;
}
const opts = opts_ orelse InitOpts{};
const decodeFn: TransformStream.ZigTransformFn = blk: {
if (opts.ignoreBOM) {
break :blk struct {
fn decode(controller: *TransformStream.DefaultController, chunk: js.Value) !void {
return decodeTransform(controller, chunk, true);
}
}.decode;
} else {
break :blk struct {
fn decode(controller: *TransformStream.DefaultController, chunk: js.Value) !void {
return decodeTransform(controller, chunk, false);
}
}.decode;
}
};
const transform = try TransformStream.initWithZigTransform(decodeFn, page);
return .{
._transform = transform,
._fatal = opts.fatal,
._ignore_bom = opts.ignoreBOM,
};
}
pub fn acquireRef(self: *TextDecoderStream) void {
self._transform.acquireRef();
}
pub fn deinit(self: *TextDecoderStream, shutdown: bool, page: *Page) void {
self._transform.deinit(shutdown, page);
}
fn decodeTransform(controller: *TransformStream.DefaultController, chunk: js.Value, ignoreBOM: bool) !void {
// chunk should be a Uint8Array; decode it as UTF-8 string
const typed_array = try chunk.toZig(js.TypedArray(u8));
var input = typed_array.values;
// Strip UTF-8 BOM if present
if (ignoreBOM == false and std.mem.startsWith(u8, input, &.{ 0xEF, 0xBB, 0xBF })) {
input = input[3..];
}
// Per spec, empty chunks produce no output
if (input.len == 0) return;
try controller.enqueue(.{ .string = input });
}
pub fn getReadable(self: *const TextDecoderStream) *ReadableStream {
return self._transform.getReadable();
}
pub fn getWritable(self: *const TextDecoderStream) *WritableStream {
return self._transform.getWritable();
}
pub fn getFatal(self: *const TextDecoderStream) bool {
return self._fatal;
}
pub fn getIgnoreBOM(self: *const TextDecoderStream) bool {
return self._ignore_bom;
}
pub const JsApi = struct {
pub const bridge = js.Bridge(TextDecoderStream);
pub const Meta = struct {
pub const name = "TextDecoderStream";
pub const prototype_chain = bridge.prototypeChain();
pub var class_id: bridge.ClassId = undefined;
pub const weak = true;
pub const finalizer = bridge.finalizer(TextDecoderStream.deinit);
};
pub const constructor = bridge.constructor(TextDecoderStream.init, .{});
pub const encoding = bridge.property("utf-8", .{ .template = false });
pub const readable = bridge.accessor(TextDecoderStream.getReadable, null, .{});
pub const writable = bridge.accessor(TextDecoderStream.getWritable, null, .{});
pub const fatal = bridge.accessor(TextDecoderStream.getFatal, null, .{});
pub const ignoreBOM = bridge.accessor(TextDecoderStream.getIgnoreBOM, null, .{});
};
const testing = @import("../../../testing.zig");
test "WebApi: TextDecoderStream" {
try testing.htmlRunner("streams/text_decoder_stream.html", .{});
}

View File

@@ -0,0 +1,80 @@
// Copyright (C) 2023-2026 Lightpanda (Selecy SAS)
//
// Francis Bouvier <francis@lightpanda.io>
// Pierre Tachoire <pierre@lightpanda.io>
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Affero General Public License as
// published by the Free Software Foundation, either version 3 of the
// License, or (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Affero General Public License for more details.
//
// You should have received a copy of the GNU Affero General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
const js = @import("../../js/js.zig");
const Page = @import("../../Page.zig");
const ReadableStream = @import("../streams/ReadableStream.zig");
const WritableStream = @import("../streams/WritableStream.zig");
const TransformStream = @import("../streams/TransformStream.zig");
const TextEncoderStream = @This();
_transform: *TransformStream,
pub fn init(page: *Page) !TextEncoderStream {
const transform = try TransformStream.initWithZigTransform(&encodeTransform, page);
return .{
._transform = transform,
};
}
pub fn acquireRef(self: *TextEncoderStream) void {
self._transform.acquireRef();
}
pub fn deinit(self: *TextEncoderStream, shutdown: bool, page: *Page) void {
self._transform.deinit(shutdown, page);
}
fn encodeTransform(controller: *TransformStream.DefaultController, chunk: js.Value) !void {
// chunk should be a JS string; encode it as UTF-8 bytes (Uint8Array)
const str = chunk.isString() orelse return error.InvalidChunk;
const slice = try str.toSlice();
try controller.enqueue(.{ .uint8array = .{ .values = slice } });
}
pub fn getReadable(self: *const TextEncoderStream) *ReadableStream {
return self._transform.getReadable();
}
pub fn getWritable(self: *const TextEncoderStream) *WritableStream {
return self._transform.getWritable();
}
pub const JsApi = struct {
pub const bridge = js.Bridge(TextEncoderStream);
pub const Meta = struct {
pub const name = "TextEncoderStream";
pub const prototype_chain = bridge.prototypeChain();
pub var class_id: bridge.ClassId = undefined;
pub const weak = true;
pub const finalizer = bridge.finalizer(TextEncoderStream.deinit);
};
pub const constructor = bridge.constructor(TextEncoderStream.init, .{});
pub const encoding = bridge.property("utf-8", .{ .template = false });
pub const readable = bridge.accessor(TextEncoderStream.getReadable, null, .{});
pub const writable = bridge.accessor(TextEncoderStream.getWritable, null, .{});
};
const testing = @import("../../../testing.zig");
test "WebApi: TextEncoderStream" {
try testing.htmlRunner("streams/transform_stream.html", .{});
}

View File

@@ -87,15 +87,35 @@ pub fn parseList(arena: Allocator, input: []const u8, page: *Page) ParseError![]
var comma_pos: usize = trimmed.len;
var depth: usize = 0;
var in_quote: u8 = 0; // 0 = not in quotes, '"' or '\'' = in that quote type
var i: usize = 0;
while (i < trimmed.len) {
const c = trimmed[i];
if (in_quote != 0) {
// Inside a quoted string
if (c == '\\') {
// Skip escape sequence inside quotes
i += 1;
if (i < trimmed.len) i += 1;
} else if (c == in_quote) {
// Closing quote
in_quote = 0;
i += 1;
} else {
i += 1;
}
continue;
}
switch (c) {
'\\' => {
// Skip escape sequence (backslash + next character)
i += 1;
if (i < trimmed.len) i += 1;
},
'"', '\'' => {
in_quote = c;
i += 1;
},
'(' => {
depth += 1;
i += 1;

View File

@@ -24,6 +24,7 @@ const Page = @import("../../Page.zig");
const ReadableStreamDefaultReader = @import("ReadableStreamDefaultReader.zig");
const ReadableStreamDefaultController = @import("ReadableStreamDefaultController.zig");
const WritableStream = @import("WritableStream.zig");
const IS_DEBUG = @import("builtin").mode == .Debug;
@@ -51,6 +52,8 @@ _pull_fn: ?js.Function.Global = null,
_pulling: bool = false,
_pull_again: bool = false,
_cancel: ?Cancel = null,
_arena: std.mem.Allocator,
_rc: usize = 0,
const UnderlyingSource = struct {
start: ?js.Function = null,
@@ -67,13 +70,18 @@ const QueueingStrategy = struct {
pub fn init(src_: ?UnderlyingSource, strategy_: ?QueueingStrategy, page: *Page) !*ReadableStream {
const strategy: QueueingStrategy = strategy_ orelse .{};
const self = try page._factory.create(ReadableStream{
const arena = try page.getArena(.{ .debug = "ReadableStream" });
errdefer page.releaseArena(arena);
const self = try arena.create(ReadableStream);
self.* = .{
._page = page,
._state = .readable,
._arena = arena,
._reader = null,
._controller = undefined,
._stored_error = null,
});
};
self._controller = try ReadableStreamDefaultController.init(self, strategy.highWaterMark, page);
@@ -107,6 +115,23 @@ pub fn initWithData(data: []const u8, page: *Page) !*ReadableStream {
return stream;
}
pub fn deinit(self: *ReadableStream, _: bool, page: *Page) void {
const rc = self._rc;
if (comptime IS_DEBUG) {
std.debug.assert(rc != 0);
}
if (rc == 1) {
page.releaseArena(self._arena);
} else {
self._rc = rc - 1;
}
}
pub fn acquireRef(self: *ReadableStream) void {
self._rc += 1;
}
pub fn getReader(self: *ReadableStream, page: *Page) !*ReadableStreamDefaultReader {
if (self.getLocked()) {
return error.ReaderLocked;
@@ -119,6 +144,12 @@ pub fn getReader(self: *ReadableStream, page: *Page) !*ReadableStreamDefaultRead
pub fn releaseReader(self: *ReadableStream) void {
self._reader = null;
const rc = self._rc;
if (comptime IS_DEBUG) {
std.debug.assert(rc != 0);
}
self._rc = rc - 1;
}
pub fn getAsyncIterator(self: *ReadableStream, page: *Page) !*AsyncIterator {
@@ -233,6 +264,126 @@ pub fn cancel(self: *ReadableStream, reason: ?[]const u8, page: *Page) !js.Promi
return resolver.promise();
}
/// pipeThrough(transform) — pipes this readable stream through a transform stream,
/// returning the readable side. `transform` is a JS object with `readable` and `writable` properties.
const PipeTransform = struct {
writable: *WritableStream,
readable: *ReadableStream,
};
pub fn pipeThrough(self: *ReadableStream, transform: PipeTransform, page: *Page) !*ReadableStream {
if (self.getLocked()) {
return error.ReaderLocked;
}
// Start async piping from this stream to the writable side
try PipeState.startPipe(self, transform.writable, null, page);
return transform.readable;
}
/// pipeTo(writable) — pipes this readable stream to a writable stream.
/// Returns a promise that resolves when piping is complete.
pub fn pipeTo(self: *ReadableStream, destination: *WritableStream, page: *Page) !js.Promise {
if (self.getLocked()) {
return page.js.local.?.rejectPromise("ReadableStream is locked");
}
const local = page.js.local.?;
var pipe_resolver = local.createPromiseResolver();
const promise = pipe_resolver.promise();
const persisted_resolver = try pipe_resolver.persist();
try PipeState.startPipe(self, destination, persisted_resolver, page);
return promise;
}
/// State for an async pipe operation.
const PipeState = struct {
reader: *ReadableStreamDefaultReader,
writable: *WritableStream,
context_id: usize,
resolver: ?js.PromiseResolver.Global,
fn startPipe(
stream: *ReadableStream,
writable: *WritableStream,
resolver: ?js.PromiseResolver.Global,
page: *Page,
) !void {
const reader = try stream.getReader(page);
const state = try page.arena.create(PipeState);
state.* = .{
.reader = reader,
.writable = writable,
.context_id = page.js.id,
.resolver = resolver,
};
try state.pumpRead(page);
}
fn pumpRead(state: *PipeState, page: *Page) !void {
const local = page.js.local.?;
// Call reader.read() which returns a Promise
const read_promise = try state.reader.read(page);
// Create JS callback functions for .then() and .catch()
const then_fn = local.newCallback(onReadFulfilled, state);
const catch_fn = local.newCallback(onReadRejected, state);
_ = read_promise.thenAndCatch(then_fn, catch_fn) catch {
state.finish(local);
};
}
const ReadData = struct {
done: bool,
value: js.Value,
};
fn onReadFulfilled(self: *PipeState, data_: ?ReadData, page: *Page) void {
const local = page.js.local.?;
const data = data_ orelse {
return self.finish(local);
};
if (data.done) {
// Stream is finished, close the writable side
self.writable.closeStream(page) catch {};
self.reader.releaseLock();
if (self.resolver) |r| {
local.toLocal(r).resolve("pipeTo complete", {});
}
return;
}
const value = data.value;
if (value.isUndefined()) {
return self.finish(local);
}
self.writable.writeChunk(value, page) catch {
return self.finish(local);
};
// Continue reading the next chunk
self.pumpRead(page) catch {
self.finish(local);
};
}
fn onReadRejected(self: *PipeState, page: *Page) void {
self.finish(page.js.local.?);
}
fn finish(self: *PipeState, local: *const js.Local) void {
self.reader.releaseLock();
if (self.resolver) |r| {
local.toLocal(r).resolve("pipe finished", {});
}
}
};
const Cancel = struct {
callback: ?js.Function.Global = null,
reason: ?[]const u8 = null,
@@ -246,11 +397,15 @@ pub const JsApi = struct {
pub const name = "ReadableStream";
pub const prototype_chain = bridge.prototypeChain();
pub var class_id: bridge.ClassId = undefined;
pub const weak = true;
pub const finalizer = bridge.finalizer(ReadableStream.deinit);
};
pub const constructor = bridge.constructor(ReadableStream.init, .{});
pub const cancel = bridge.function(ReadableStream.cancel, .{});
pub const getReader = bridge.function(ReadableStream.getReader, .{});
pub const pipeThrough = bridge.function(ReadableStream.pipeThrough, .{});
pub const pipeTo = bridge.function(ReadableStream.pipeTo, .{});
pub const locked = bridge.accessor(ReadableStream.getLocked, null, .{});
pub const symbol_async_iterator = bridge.iterator(ReadableStream.getAsyncIterator, .{ .async = true });
};
@@ -267,6 +422,14 @@ pub const AsyncIterator = struct {
});
}
pub fn acquireRef(self: *AsyncIterator) void {
self._stream.acquireRef();
}
pub fn deinit(self: *AsyncIterator, shutdown: bool, page: *Page) void {
self._stream.deinit(shutdown, page);
}
pub fn next(self: *AsyncIterator, page: *Page) !js.Promise {
return self._reader.read(page);
}
@@ -283,6 +446,8 @@ pub const AsyncIterator = struct {
pub const name = "ReadableStreamAsyncIterator";
pub const prototype_chain = bridge.prototypeChain();
pub var class_id: bridge.ClassId = undefined;
pub const weak = true;
pub const finalizer = bridge.finalizer(AsyncIterator.deinit);
};
pub const next = bridge.function(ReadableStream.AsyncIterator.next, .{});

View File

@@ -27,24 +27,27 @@ const ReadableStreamDefaultReader = @import("ReadableStreamDefaultReader.zig");
const IS_DEBUG = @import("builtin").mode == .Debug;
/// ReadableStreamDefaultController uses ReadableStream's arena to make
/// allocation. Indeed, the controller is owned by its ReadableStream.
const ReadableStreamDefaultController = @This();
pub const Chunk = union(enum) {
// the order matters, sorry.
uint8array: js.TypedArray(u8),
string: []const u8,
js_value: js.Value.Global,
pub fn dupe(self: Chunk, allocator: std.mem.Allocator) !Chunk {
return switch (self) {
.string => |str| .{ .string = try allocator.dupe(u8, str) },
.uint8array => |arr| .{ .uint8array = try arr.dupe(allocator) },
.js_value => |val| .{ .js_value = val },
};
}
};
_page: *Page,
_stream: *ReadableStream,
_arena: std.mem.Allocator,
_queue: std.ArrayList(Chunk),
_pending_reads: std.ArrayList(js.PromiseResolver.Global),
_high_water_mark: u32,
@@ -54,15 +57,22 @@ pub fn init(stream: *ReadableStream, high_water_mark: u32, page: *Page) !*Readab
._page = page,
._queue = .empty,
._stream = stream,
._arena = page.arena,
._pending_reads = .empty,
._high_water_mark = high_water_mark,
});
}
pub fn acquireRef(self: *ReadableStreamDefaultController) void {
self._stream.acquireRef();
}
pub fn deinit(self: *ReadableStreamDefaultController, shutdown: bool, page: *Page) void {
self._stream.deinit(shutdown, page);
}
pub fn addPendingRead(self: *ReadableStreamDefaultController, page: *Page) !js.Promise {
const resolver = page.js.local.?.createPromiseResolver();
try self._pending_reads.append(self._arena, try resolver.persist());
try self._pending_reads.append(self._stream._arena, try resolver.persist());
return resolver.promise();
}
@@ -72,8 +82,8 @@ pub fn enqueue(self: *ReadableStreamDefaultController, chunk: Chunk) !void {
}
if (self._pending_reads.items.len == 0) {
const chunk_copy = try chunk.dupe(self._page.arena);
return self._queue.append(self._arena, chunk_copy);
const chunk_copy = try chunk.dupe(self._stream._arena);
return self._queue.append(self._stream._arena, chunk_copy);
}
// I know, this is ouch! But we expect to have very few (if any)
@@ -98,6 +108,40 @@ pub fn enqueue(self: *ReadableStreamDefaultController, chunk: Chunk) !void {
ls.toLocal(resolver).resolve("stream enqueue", result);
}
/// Enqueue a raw JS value, preserving its type (number, bool, object, etc.).
/// Used by the JS-facing API; internal Zig callers should use enqueue(Chunk).
pub fn enqueueValue(self: *ReadableStreamDefaultController, value: js.Value) !void {
if (self._stream._state != .readable) {
return error.StreamNotReadable;
}
if (self._pending_reads.items.len == 0) {
const persisted = try value.persist();
try self._queue.append(self._stream._arena, .{ .js_value = persisted });
return;
}
const resolver = self._pending_reads.orderedRemove(0);
const persisted = try value.persist();
const result = ReadableStreamDefaultReader.ReadResult{
.done = false,
.value = .{ .js_value = persisted },
};
if (comptime IS_DEBUG) {
if (self._page.js.local == null) {
log.fatal(.bug, "null context scope", .{ .src = "ReadableStreamDefaultController.enqueueValue", .url = self._page.url });
std.debug.assert(self._page.js.local != null);
}
}
var ls: js.Local.Scope = undefined;
self._page.js.localScope(&ls);
defer ls.deinit();
ls.toLocal(resolver).resolve("stream enqueue value", result);
}
pub fn close(self: *ReadableStreamDefaultController) !void {
if (self._stream._state != .readable) {
return error.StreamNotReadable;
@@ -134,7 +178,7 @@ pub fn doError(self: *ReadableStreamDefaultController, err: []const u8) !void {
}
self._stream._state = .errored;
self._stream._stored_error = try self._page.arena.dupe(u8, err);
self._stream._stored_error = try self._stream._arena.dupe(u8, err);
// Reject all pending reads
for (self._pending_reads.items) |resolver| {
@@ -174,9 +218,11 @@ pub const JsApi = struct {
pub const name = "ReadableStreamDefaultController";
pub const prototype_chain = bridge.prototypeChain();
pub var class_id: bridge.ClassId = undefined;
pub const weak = true;
pub const finalizer = bridge.finalizer(ReadableStreamDefaultController.deinit);
};
pub const enqueue = bridge.function(ReadableStreamDefaultController.enqueue, .{});
pub const enqueue = bridge.function(ReadableStreamDefaultController.enqueueValue, .{});
pub const close = bridge.function(ReadableStreamDefaultController.close, .{});
pub const @"error" = bridge.function(ReadableStreamDefaultController.doError, .{});
pub const desiredSize = bridge.accessor(ReadableStreamDefaultController.getDesiredSize, null, .{});

View File

@@ -19,6 +19,8 @@
const std = @import("std");
const js = @import("../../js/js.zig");
const IS_DEBUG = @import("builtin").mode == .Debug;
const Page = @import("../../Page.zig");
const ReadableStream = @import("ReadableStream.zig");
const ReadableStreamDefaultController = @import("ReadableStreamDefaultController.zig");
@@ -35,6 +37,21 @@ pub fn init(stream: *ReadableStream, page: *Page) !*ReadableStreamDefaultReader
});
}
pub fn acquireRef(self: *ReadableStreamDefaultReader) void {
const stream = self._stream orelse {
if (comptime IS_DEBUG) {
std.debug.assert(false);
}
return;
};
stream.acquireRef();
}
pub fn deinit(self: *ReadableStreamDefaultReader, shutdown: bool, page: *Page) void {
const stream = self._stream orelse return;
stream.deinit(shutdown, page);
}
pub const ReadResult = struct {
done: bool,
value: Chunk,
@@ -44,11 +61,13 @@ pub const ReadResult = struct {
empty,
string: []const u8,
uint8array: js.TypedArray(u8),
js_value: js.Value.Global,
pub fn fromChunk(chunk: ReadableStreamDefaultController.Chunk) Chunk {
return switch (chunk) {
.string => |s| .{ .string = s },
.uint8array => |arr| .{ .uint8array = arr },
.js_value => |val| .{ .js_value = val },
};
}
};
@@ -108,6 +127,8 @@ pub const JsApi = struct {
pub const name = "ReadableStreamDefaultReader";
pub const prototype_chain = bridge.prototypeChain();
pub var class_id: bridge.ClassId = undefined;
pub const weak = true;
pub const finalizer = bridge.finalizer(ReadableStreamDefaultReader.deinit);
};
pub const read = bridge.function(ReadableStreamDefaultReader.read, .{});

View File

@@ -0,0 +1,218 @@
// Copyright (C) 2023-2026 Lightpanda (Selecy SAS)
//
// Francis Bouvier <francis@lightpanda.io>
// Pierre Tachoire <pierre@lightpanda.io>
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Affero General Public License as
// published by the Free Software Foundation, either version 3 of the
// License, or (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Affero General Public License for more details.
//
// You should have received a copy of the GNU Affero General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
const js = @import("../../js/js.zig");
const Page = @import("../../Page.zig");
const ReadableStream = @import("ReadableStream.zig");
const ReadableStreamDefaultController = @import("ReadableStreamDefaultController.zig");
const WritableStream = @import("WritableStream.zig");
const TransformStream = @This();
pub const DefaultController = TransformStreamDefaultController;
pub const ZigTransformFn = *const fn (*TransformStreamDefaultController, js.Value) anyerror!void;
_readable: *ReadableStream,
_writable: *WritableStream,
_controller: *TransformStreamDefaultController,
const Transformer = struct {
start: ?js.Function = null,
transform: ?js.Function.Global = null,
flush: ?js.Function.Global = null,
};
pub fn init(transformer_: ?Transformer, page: *Page) !*TransformStream {
const readable = try ReadableStream.init(null, null, page);
const self = try page._factory.create(TransformStream{
._readable = readable,
._writable = undefined,
._controller = undefined,
});
const transform_controller = try TransformStreamDefaultController.init(
self,
if (transformer_) |t| t.transform else null,
if (transformer_) |t| t.flush else null,
null,
page,
);
self._controller = transform_controller;
self._writable = try WritableStream.initForTransform(self, page);
if (transformer_) |transformer| {
if (transformer.start) |start| {
try start.call(void, .{transform_controller});
}
}
return self;
}
pub fn initWithZigTransform(zig_transform: ZigTransformFn, page: *Page) !*TransformStream {
const readable = try ReadableStream.init(null, null, page);
const self = try page._factory.create(TransformStream{
._readable = readable,
._writable = undefined,
._controller = undefined,
});
const transform_controller = try TransformStreamDefaultController.init(self, null, null, zig_transform, page);
self._controller = transform_controller;
self._writable = try WritableStream.initForTransform(self, page);
return self;
}
pub fn acquireRef(self: *TransformStream) void {
self._readable.acquireRef();
}
pub fn deinit(self: *TransformStream, shutdown: bool, page: *Page) void {
self._readable.deinit(shutdown, page);
}
pub fn transformWrite(self: *TransformStream, chunk: js.Value, page: *Page) !void {
if (self._controller._zig_transform_fn) |zig_fn| {
// Zig-level transform (used by TextEncoderStream etc.)
try zig_fn(self._controller, chunk);
return;
}
if (self._controller._transform_fn) |transform_fn| {
var ls: js.Local.Scope = undefined;
page.js.localScope(&ls);
defer ls.deinit();
try ls.toLocal(transform_fn).call(void, .{ chunk, self._controller });
} else {
try self._readable._controller.enqueue(.{ .string = try chunk.toStringSlice() });
}
}
pub fn transformClose(self: *TransformStream, page: *Page) !void {
if (self._controller._flush_fn) |flush_fn| {
var ls: js.Local.Scope = undefined;
page.js.localScope(&ls);
defer ls.deinit();
try ls.toLocal(flush_fn).call(void, .{self._controller});
}
try self._readable._controller.close();
}
pub fn getReadable(self: *const TransformStream) *ReadableStream {
return self._readable;
}
pub fn getWritable(self: *const TransformStream) *WritableStream {
return self._writable;
}
pub const JsApi = struct {
pub const bridge = js.Bridge(TransformStream);
pub const Meta = struct {
pub const name = "TransformStream";
pub const prototype_chain = bridge.prototypeChain();
pub var class_id: bridge.ClassId = undefined;
pub const weak = true;
pub const finalizer = bridge.finalizer(TransformStream.deinit);
};
pub const constructor = bridge.constructor(TransformStream.init, .{});
pub const readable = bridge.accessor(TransformStream.getReadable, null, .{});
pub const writable = bridge.accessor(TransformStream.getWritable, null, .{});
};
pub fn registerTypes() []const type {
return &.{
TransformStream,
TransformStreamDefaultController,
};
}
pub const TransformStreamDefaultController = struct {
_stream: *TransformStream,
_transform_fn: ?js.Function.Global,
_flush_fn: ?js.Function.Global,
_zig_transform_fn: ?ZigTransformFn,
pub fn init(
stream: *TransformStream,
transform_fn: ?js.Function.Global,
flush_fn: ?js.Function.Global,
zig_transform_fn: ?ZigTransformFn,
page: *Page,
) !*TransformStreamDefaultController {
return page._factory.create(TransformStreamDefaultController{
._stream = stream,
._transform_fn = transform_fn,
._flush_fn = flush_fn,
._zig_transform_fn = zig_transform_fn,
});
}
pub fn acquireRef(self: *TransformStreamDefaultController) void {
self._stream.acquireRef();
}
pub fn deinit(self: *TransformStreamDefaultController, shutdown: bool, page: *Page) void {
self._stream.deinit(shutdown, page);
}
pub fn enqueue(self: *TransformStreamDefaultController, chunk: ReadableStreamDefaultController.Chunk) !void {
try self._stream._readable._controller.enqueue(chunk);
}
/// Enqueue a raw JS value, preserving its type. Used by the JS-facing API.
pub fn enqueueValue(self: *TransformStreamDefaultController, value: js.Value) !void {
try self._stream._readable._controller.enqueueValue(value);
}
pub fn doError(self: *TransformStreamDefaultController, reason: []const u8) !void {
try self._stream._readable._controller.doError(reason);
}
pub fn terminate(self: *TransformStreamDefaultController) !void {
try self._stream._readable._controller.close();
}
pub const JsApi = struct {
pub const bridge = js.Bridge(TransformStreamDefaultController);
pub const Meta = struct {
pub const name = "TransformStreamDefaultController";
pub const prototype_chain = bridge.prototypeChain();
pub var class_id: bridge.ClassId = undefined;
pub const weak = true;
pub const finalizer = bridge.finalizer(TransformStreamDefaultController.deinit);
};
pub const enqueue = bridge.function(TransformStreamDefaultController.enqueueValue, .{});
pub const @"error" = bridge.function(TransformStreamDefaultController.doError, .{});
pub const terminate = bridge.function(TransformStreamDefaultController.terminate, .{});
};
};

View File

@@ -0,0 +1,156 @@
// Copyright (C) 2023-2026 Lightpanda (Selecy SAS)
//
// Francis Bouvier <francis@lightpanda.io>
// Pierre Tachoire <pierre@lightpanda.io>
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Affero General Public License as
// published by the Free Software Foundation, either version 3 of the
// License, or (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Affero General Public License for more details.
//
// You should have received a copy of the GNU Affero General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
const js = @import("../../js/js.zig");
const Page = @import("../../Page.zig");
const WritableStreamDefaultWriter = @import("WritableStreamDefaultWriter.zig");
const WritableStreamDefaultController = @import("WritableStreamDefaultController.zig");
const TransformStream = @import("TransformStream.zig");
const WritableStream = @This();
pub const State = enum {
writable,
closed,
errored,
};
_state: State,
_writer: ?*WritableStreamDefaultWriter,
_controller: *WritableStreamDefaultController,
_stored_error: ?[]const u8,
_write_fn: ?js.Function.Global,
_close_fn: ?js.Function.Global,
_transform_stream: ?*TransformStream,
const UnderlyingSink = struct {
start: ?js.Function = null,
write: ?js.Function.Global = null,
close: ?js.Function.Global = null,
abort: ?js.Function.Global = null,
type: ?[]const u8 = null,
};
pub fn init(sink_: ?UnderlyingSink, page: *Page) !*WritableStream {
const self = try page._factory.create(WritableStream{
._state = .writable,
._writer = null,
._controller = undefined,
._stored_error = null,
._write_fn = null,
._close_fn = null,
._transform_stream = null,
});
self._controller = try WritableStreamDefaultController.init(self, page);
if (sink_) |sink| {
if (sink.start) |start| {
try start.call(void, .{self._controller});
}
self._write_fn = sink.write;
self._close_fn = sink.close;
}
return self;
}
pub fn initForTransform(transform_stream: *TransformStream, page: *Page) !*WritableStream {
const self = try page._factory.create(WritableStream{
._state = .writable,
._writer = null,
._controller = undefined,
._stored_error = null,
._write_fn = null,
._close_fn = null,
._transform_stream = transform_stream,
});
self._controller = try WritableStreamDefaultController.init(self, page);
return self;
}
pub fn getWriter(self: *WritableStream, page: *Page) !*WritableStreamDefaultWriter {
if (self.getLocked()) {
return error.WriterLocked;
}
const writer = try WritableStreamDefaultWriter.init(self, page);
self._writer = writer;
return writer;
}
pub fn getLocked(self: *const WritableStream) bool {
return self._writer != null;
}
pub fn writeChunk(self: *WritableStream, chunk: js.Value, page: *Page) !void {
if (self._state != .writable) return;
if (self._transform_stream) |ts| {
try ts.transformWrite(chunk, page);
return;
}
if (self._write_fn) |write_fn| {
var ls: js.Local.Scope = undefined;
page.js.localScope(&ls);
defer ls.deinit();
try ls.toLocal(write_fn).call(void, .{ chunk, self._controller });
}
}
pub fn closeStream(self: *WritableStream, page: *Page) !void {
if (self._state != .writable) return;
self._state = .closed;
if (self._transform_stream) |ts| {
try ts.transformClose(page);
return;
}
if (self._close_fn) |close_fn| {
var ls: js.Local.Scope = undefined;
page.js.localScope(&ls);
defer ls.deinit();
try ls.toLocal(close_fn).call(void, .{self._controller});
}
}
pub const JsApi = struct {
pub const bridge = js.Bridge(WritableStream);
pub const Meta = struct {
pub const name = "WritableStream";
pub const prototype_chain = bridge.prototypeChain();
pub var class_id: bridge.ClassId = undefined;
};
pub const constructor = bridge.constructor(WritableStream.init, .{});
pub const getWriter = bridge.function(WritableStream.getWriter, .{});
pub const locked = bridge.accessor(WritableStream.getLocked, null, .{});
};
pub fn registerTypes() []const type {
return &.{
WritableStream,
};
}

View File

@@ -0,0 +1,49 @@
// Copyright (C) 2023-2026 Lightpanda (Selecy SAS)
//
// Francis Bouvier <francis@lightpanda.io>
// Pierre Tachoire <pierre@lightpanda.io>
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Affero General Public License as
// published by the Free Software Foundation, either version 3 of the
// License, or (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Affero General Public License for more details.
//
// You should have received a copy of the GNU Affero General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
const js = @import("../../js/js.zig");
const Page = @import("../../Page.zig");
const WritableStream = @import("WritableStream.zig");
const WritableStreamDefaultController = @This();
_stream: *WritableStream,
pub fn init(stream: *WritableStream, page: *Page) !*WritableStreamDefaultController {
return page._factory.create(WritableStreamDefaultController{
._stream = stream,
});
}
pub fn doError(self: *WritableStreamDefaultController, reason: []const u8) void {
if (self._stream._state != .writable) return;
self._stream._state = .errored;
self._stream._stored_error = reason;
}
pub const JsApi = struct {
pub const bridge = js.Bridge(WritableStreamDefaultController);
pub const Meta = struct {
pub const name = "WritableStreamDefaultController";
pub const prototype_chain = bridge.prototypeChain();
pub var class_id: bridge.ClassId = undefined;
};
pub const @"error" = bridge.function(WritableStreamDefaultController.doError, .{});
};

View File

@@ -0,0 +1,109 @@
// Copyright (C) 2023-2026 Lightpanda (Selecy SAS)
//
// Francis Bouvier <francis@lightpanda.io>
// Pierre Tachoire <pierre@lightpanda.io>
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Affero General Public License as
// published by the Free Software Foundation, either version 3 of the
// License, or (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Affero General Public License for more details.
//
// You should have received a copy of the GNU Affero General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
const js = @import("../../js/js.zig");
const Page = @import("../../Page.zig");
const WritableStream = @import("WritableStream.zig");
const WritableStreamDefaultWriter = @This();
_stream: ?*WritableStream,
pub fn init(stream: *WritableStream, page: *Page) !*WritableStreamDefaultWriter {
return page._factory.create(WritableStreamDefaultWriter{
._stream = stream,
});
}
pub fn write(self: *WritableStreamDefaultWriter, chunk: js.Value, page: *Page) !js.Promise {
const stream = self._stream orelse {
return page.js.local.?.rejectPromise("Writer has been released");
};
if (stream._state != .writable) {
return page.js.local.?.rejectPromise("Stream is not writable");
}
try stream.writeChunk(chunk, page);
return page.js.local.?.resolvePromise(.{});
}
pub fn close(self: *WritableStreamDefaultWriter, page: *Page) !js.Promise {
const stream = self._stream orelse {
return page.js.local.?.rejectPromise("Writer has been released");
};
if (stream._state != .writable) {
return page.js.local.?.rejectPromise("Stream is not writable");
}
try stream.closeStream(page);
return page.js.local.?.resolvePromise(.{});
}
pub fn releaseLock(self: *WritableStreamDefaultWriter) void {
if (self._stream) |stream| {
stream._writer = null;
self._stream = null;
}
}
pub fn getClosed(self: *WritableStreamDefaultWriter, page: *Page) !js.Promise {
const stream = self._stream orelse {
return page.js.local.?.rejectPromise("Writer has been released");
};
if (stream._state == .closed) {
return page.js.local.?.resolvePromise(.{});
}
return page.js.local.?.resolvePromise(.{});
}
pub fn getDesiredSize(self: *const WritableStreamDefaultWriter) ?i32 {
const stream = self._stream orelse return null;
return switch (stream._state) {
.writable => 1,
.closed => 0,
.errored => null,
};
}
pub fn getReady(self: *WritableStreamDefaultWriter, page: *Page) !js.Promise {
_ = self;
return page.js.local.?.resolvePromise(.{});
}
pub const JsApi = struct {
pub const bridge = js.Bridge(WritableStreamDefaultWriter);
pub const Meta = struct {
pub const name = "WritableStreamDefaultWriter";
pub const prototype_chain = bridge.prototypeChain();
pub var class_id: bridge.ClassId = undefined;
};
pub const write = bridge.function(WritableStreamDefaultWriter.write, .{});
pub const close = bridge.function(WritableStreamDefaultWriter.close, .{});
pub const releaseLock = bridge.function(WritableStreamDefaultWriter.releaseLock, .{});
pub const closed = bridge.accessor(WritableStreamDefaultWriter.getClosed, null, .{});
pub const ready = bridge.accessor(WritableStreamDefaultWriter.getReady, null, .{});
pub const desiredSize = bridge.accessor(WritableStreamDefaultWriter.getDesiredSize, null, .{});
};

View File

@@ -406,10 +406,10 @@ pub fn requestAuthRequired(bc: anytype, intercept: *const Notification.RequestAu
.fetch => "Fetch",
},
.authChallenge = .{
.source = if (challenge.source == .server) "Server" else "Proxy",
.origin = "", // TODO get origin, could be the proxy address for example.
.scheme = if (challenge.scheme == .digest) "digest" else "basic",
.realm = challenge.realm,
.source = if (challenge.source) |s| (if (s == .server) "Server" else "Proxy") else "",
.scheme = if (challenge.scheme) |s| (if (s == .digest) "digest" else "basic") else "",
.realm = challenge.realm orelse "",
},
.networkId = &id.toRequestId(transfer.id),
}, .{ .session_id = session_id });

View File

@@ -379,13 +379,22 @@ pub fn pageNavigated(arena: Allocator, bc: anytype, event: *const Notification.P
}, .{ .session_id = session_id });
}
// When we actually recreated the context we should have the inspector send this event, see: resetContextGroup
// Sending this event will tell the client that the context ids they had are invalid and the context shouls be dropped
// The client will expect us to send new contextCreated events, such that the client has new id's for the active contexts.
try cdp.sendEvent("Runtime.executionContextsCleared", null, .{ .session_id = session_id });
const page = bc.session.currentPage() orelse return error.PageNotLoaded;
// When we actually recreated the context we should have the inspector send
// this event, see: resetContextGroup Sending this event will tell the
// client that the context ids they had are invalid and the context shouls
// be dropped The client will expect us to send new contextCreated events,
// such that the client has new id's for the active contexts.
// Only send executionContextsCleared for main frame navigations. For child
// frames (iframes), clearing all contexts would destroy the main frame's
// context, causing Puppeteer's page.evaluate()/page.content() to hang
// forever.
if (event.frame_id == page._frame_id) {
try cdp.sendEvent("Runtime.executionContextsCleared", null, .{ .session_id = session_id });
}
{
const page = bc.session.currentPage() orelse return error.PageNotLoaded;
const aux_data = try std.fmt.allocPrint(arena, "{{\"isDefault\":true,\"type\":\"default\",\"frameId\":\"{s}\",\"loaderId\":\"{s}\"}}", .{ frame_id, loader_id });
var ls: js.Local.Scope = undefined;

View File

@@ -31,6 +31,7 @@ pub fn processMessage(cmd: anytype) !void {
const action = std.meta.stringToEnum(enum {
getTargets,
attachToTarget,
attachToBrowserTarget,
closeTarget,
createBrowserContext,
createTarget,
@@ -47,6 +48,7 @@ pub fn processMessage(cmd: anytype) !void {
switch (action) {
.getTargets => return getTargets(cmd),
.attachToTarget => return attachToTarget(cmd),
.attachToBrowserTarget => return attachToBrowserTarget(cmd),
.closeTarget => return closeTarget(cmd),
.createBrowserContext => return createBrowserContext(cmd),
.createTarget => return createTarget(cmd),
@@ -79,7 +81,7 @@ fn getTargets(cmd: anytype) !void {
.targetInfos = [_]TargetInfo{.{
.targetId = target_id,
.type = "page",
.title = bc.getTitle() orelse "about:blank",
.title = bc.getTitle() orelse "",
.url = bc.getURL() orelse "about:blank",
.attached = true,
.canAccessOpener = false,
@@ -207,7 +209,7 @@ fn createTarget(cmd: anytype) !void {
.targetInfo = TargetInfo{
.attached = false,
.targetId = target_id,
.title = "about:blank",
.title = "",
.browserContextId = bc.id,
.url = "about:blank",
},
@@ -243,14 +245,31 @@ fn attachToTarget(cmd: anytype) !void {
return error.UnknownTargetId;
}
if (bc.session_id == null) {
try doAttachtoTarget(cmd, target_id);
}
try doAttachtoTarget(cmd, target_id);
return cmd.sendResult(
.{ .sessionId = bc.session_id },
.{ .include_session_id = false },
);
return cmd.sendResult(.{ .sessionId = bc.session_id }, .{});
}
fn attachToBrowserTarget(cmd: anytype) !void {
const bc = cmd.browser_context orelse return error.BrowserContextNotLoaded;
const session_id = bc.session_id orelse cmd.cdp.session_id_gen.next();
try cmd.sendEvent("Target.attachedToTarget", AttachToTarget{
.sessionId = session_id,
.targetInfo = TargetInfo{
.targetId = bc.id, // We use the browser context is as browser's target id.
.title = "",
.url = "",
.type = "browser",
// Chrome doesn't send a browserContextId in this case.
.browserContextId = null,
},
}, .{});
bc.session_id = session_id;
return cmd.sendResult(.{ .sessionId = bc.session_id }, .{});
}
fn closeTarget(cmd: anytype) !void {
@@ -311,7 +330,7 @@ fn getTargetInfo(cmd: anytype) !void {
.targetInfo = TargetInfo{
.targetId = target_id,
.type = "page",
.title = bc.getTitle() orelse "about:blank",
.title = bc.getTitle() orelse "",
.url = bc.getURL() orelse "about:blank",
.attached = true,
.canAccessOpener = false,
@@ -323,7 +342,7 @@ fn getTargetInfo(cmd: anytype) !void {
.targetInfo = TargetInfo{
.targetId = "TID-STARTUP-B",
.type = "browser",
.title = "about:blank",
.title = "",
.url = "about:blank",
.attached = true,
.canAccessOpener = false,
@@ -442,8 +461,8 @@ fn setAutoAttach(cmd: anytype) !void {
.targetInfo = TargetInfo{
.type = "page",
.targetId = "TID-STARTUP-P",
.title = "New Private Tab",
.url = "chrome://newtab/",
.title = "",
.url = "about:blank",
.browserContextId = "BID-STARTUP",
},
}, .{});
@@ -451,22 +470,23 @@ fn setAutoAttach(cmd: anytype) !void {
fn doAttachtoTarget(cmd: anytype, target_id: []const u8) !void {
const bc = cmd.browser_context.?;
lp.assert(bc.session_id == null, "CDP.target.doAttachtoTarget not null session_id", .{});
const session_id = cmd.cdp.session_id_gen.next();
const session_id = bc.session_id orelse cmd.cdp.session_id_gen.next();
// extra_headers should not be kept on a new page or tab,
// currently we have only 1 page, we clear it just in case
bc.extra_headers.clearRetainingCapacity();
if (bc.session_id == null) {
// extra_headers should not be kept on a new page or tab,
// currently we have only 1 page, we clear it just in case
bc.extra_headers.clearRetainingCapacity();
}
try cmd.sendEvent("Target.attachedToTarget", AttachToTarget{
.sessionId = session_id,
.targetInfo = TargetInfo{
.targetId = target_id,
.title = "about:blank",
.url = "chrome://newtab/",
.title = bc.getTitle() orelse "",
.url = bc.getURL() orelse "about:blank",
.browserContextId = bc.id,
},
}, .{});
}, .{ .session_id = bc.session_id });
bc.session_id = session_id;
}
@@ -568,7 +588,7 @@ test "cdp.target: createTarget" {
// should create a browser context
const bc = ctx.cdp().browser_context.?;
try ctx.expectSentEvent("Target.targetCreated", .{ .targetInfo = .{ .url = "about:blank", .title = "about:blank", .attached = false, .type = "page", .canAccessOpener = false, .browserContextId = bc.id, .targetId = bc.target_id.? } }, .{});
try ctx.expectSentEvent("Target.targetCreated", .{ .targetInfo = .{ .url = "about:blank", .title = "", .attached = false, .type = "page", .canAccessOpener = false, .browserContextId = bc.id, .targetId = bc.target_id.? } }, .{});
}
{
@@ -580,8 +600,8 @@ test "cdp.target: createTarget" {
// should create a browser context
const bc = ctx.cdp().browser_context.?;
try ctx.expectSentEvent("Target.targetCreated", .{ .targetInfo = .{ .url = "about:blank", .title = "about:blank", .attached = false, .type = "page", .canAccessOpener = false, .browserContextId = bc.id, .targetId = bc.target_id.? } }, .{});
try ctx.expectSentEvent("Target.attachedToTarget", .{ .sessionId = bc.session_id.?, .targetInfo = .{ .url = "chrome://newtab/", .title = "about:blank", .attached = true, .type = "page", .canAccessOpener = false, .browserContextId = bc.id, .targetId = bc.target_id.? } }, .{});
try ctx.expectSentEvent("Target.targetCreated", .{ .targetInfo = .{ .url = "about:blank", .title = "", .attached = false, .type = "page", .canAccessOpener = false, .browserContextId = bc.id, .targetId = bc.target_id.? } }, .{});
try ctx.expectSentEvent("Target.attachedToTarget", .{ .sessionId = bc.session_id.?, .targetInfo = .{ .url = "about:blank", .title = "", .attached = true, .type = "page", .canAccessOpener = false, .browserContextId = bc.id, .targetId = bc.target_id.? } }, .{});
}
var ctx = testing.context();
@@ -596,7 +616,7 @@ test "cdp.target: createTarget" {
try ctx.processMessage(.{ .id = 10, .method = "Target.createTarget", .params = .{ .browserContextId = "BID-9" } });
try testing.expectEqual(true, bc.target_id != null);
try ctx.expectSentResult(.{ .targetId = bc.target_id.? }, .{ .id = 10 });
try ctx.expectSentEvent("Target.targetCreated", .{ .targetInfo = .{ .url = "about:blank", .title = "about:blank", .attached = false, .type = "page", .canAccessOpener = false, .browserContextId = "BID-9", .targetId = bc.target_id.? } }, .{});
try ctx.expectSentEvent("Target.targetCreated", .{ .targetInfo = .{ .url = "about:blank", .title = "", .attached = false, .type = "page", .canAccessOpener = false, .browserContextId = "BID-9", .targetId = bc.target_id.? } }, .{});
}
}
@@ -658,7 +678,7 @@ test "cdp.target: attachToTarget" {
try ctx.processMessage(.{ .id = 11, .method = "Target.attachToTarget", .params = .{ .targetId = "TID-000000000B" } });
const session_id = bc.session_id.?;
try ctx.expectSentResult(.{ .sessionId = session_id }, .{ .id = 11 });
try ctx.expectSentEvent("Target.attachedToTarget", .{ .sessionId = session_id, .targetInfo = .{ .url = "chrome://newtab/", .title = "about:blank", .attached = true, .type = "page", .canAccessOpener = false, .browserContextId = "BID-9", .targetId = bc.target_id.? } }, .{});
try ctx.expectSentEvent("Target.attachedToTarget", .{ .sessionId = session_id, .targetInfo = .{ .url = "about:blank", .title = "", .attached = true, .type = "page", .canAccessOpener = false, .browserContextId = "BID-9", .targetId = bc.target_id.? } }, .{});
}
}
@@ -671,7 +691,7 @@ test "cdp.target: getTargetInfo" {
try ctx.expectSentResult(.{
.targetInfo = .{
.type = "browser",
.title = "about:blank",
.title = "",
.url = "about:blank",
.attached = true,
.canAccessOpener = false,

View File

@@ -46,7 +46,7 @@ pub fn toLoaderId(page_id: u32) [14]u8 {
pub fn toRequestId(page_id: u32) [14]u8 {
var buf: [14]u8 = undefined;
_ = std.fmt.bufPrint(&buf, "RID-{d:0>10}", .{page_id}) catch unreachable;
_ = std.fmt.bufPrint(&buf, "REQ-{d:0>10}", .{page_id}) catch unreachable;
return buf;
}
@@ -174,8 +174,8 @@ test "id: toLoaderId" {
}
test "id: toRequestId" {
try testing.expectEqual("RID-0000000000", toRequestId(0));
try testing.expectEqual("RID-4294967295", toRequestId(4294967295));
try testing.expectEqual("REQ-0000000000", toRequestId(0));
try testing.expectEqual("REQ-4294967295", toRequestId(4294967295));
}
test "id: toInterceptId" {

View File

@@ -228,6 +228,8 @@ pub extern fn X25519_keypair(out_public_value: *[32]u8, out_private_key: *[32]u8
pub const NID_X25519 = @as(c_int, 948);
pub const EVP_PKEY_X25519 = NID_X25519;
pub const NID_ED25519 = 949;
pub const EVP_PKEY_ED25519 = NID_ED25519;
pub extern fn EVP_PKEY_new_raw_private_key(@"type": c_int, unused: ?*ENGINE, in: [*c]const u8, len: usize) [*c]EVP_PKEY;
pub extern fn EVP_PKEY_new_raw_public_key(@"type": c_int, unused: ?*ENGINE, in: [*c]const u8, len: usize) [*c]EVP_PKEY;
@@ -236,3 +238,11 @@ pub extern fn EVP_PKEY_CTX_free(ctx: ?*EVP_PKEY_CTX) void;
pub extern fn EVP_PKEY_derive_init(ctx: ?*EVP_PKEY_CTX) c_int;
pub extern fn EVP_PKEY_derive(ctx: ?*EVP_PKEY_CTX, key: [*c]u8, out_key_len: [*c]usize) c_int;
pub extern fn EVP_PKEY_derive_set_peer(ctx: ?*EVP_PKEY_CTX, peer: [*c]EVP_PKEY) c_int;
pub extern fn EVP_PKEY_free(pkey: ?*EVP_PKEY) void;
pub extern fn EVP_DigestSignInit(ctx: ?*EVP_MD_CTX, pctx: ?*?*EVP_PKEY_CTX, typ: ?*const EVP_MD, e: ?*ENGINE, pkey: ?*EVP_PKEY) c_int;
pub extern fn EVP_DigestSign(ctx: ?*EVP_MD_CTX, sig: [*c]u8, sig_len: *usize, data: [*c]const u8, data_len: usize) c_int;
pub extern fn EVP_MD_CTX_new() ?*EVP_MD_CTX;
pub extern fn EVP_MD_CTX_free(ctx: ?*EVP_MD_CTX) void;
pub const struct_evp_md_ctx_st = opaque {};
pub const EVP_MD_CTX = struct_evp_md_ctx_st;

View File

@@ -29,6 +29,8 @@ const Notification = @import("../Notification.zig");
const CookieJar = @import("../browser/webapi/storage/Cookie.zig").Jar;
const Robots = @import("../browser/Robots.zig");
const RobotStore = Robots.RobotStore;
const WebBotAuth = @import("../browser/WebBotAuth.zig");
const posix = std.posix;
const Allocator = std.mem.Allocator;
@@ -66,7 +68,7 @@ active: usize,
intercepted: usize,
// Our easy handles, managed by a curl multi.
handles: Handles,
handles: Net.Handles,
// Use to generate the next request ID
next_request_id: u32 = 0,
@@ -83,6 +85,9 @@ robot_store: *RobotStore,
// Allows us to fetch the robots.txt just once.
pending_robots_queue: std.StringHashMapUnmanaged(std.ArrayList(Request)) = .empty,
// Reference to the App-owned WebBotAuth.
web_bot_auth: *const ?WebBotAuth,
// Once we have a handle/easy to process a request with, we create a Transfer
// which contains the Request as well as any state we need to process the
// request. These wil come and go with each request.
@@ -121,14 +126,20 @@ pub const CDPClient = struct {
const TransferQueue = std.DoublyLinkedList;
pub fn init(allocator: Allocator, ca_blob: ?Net.Blob, robot_store: *RobotStore, config: *const Config) !*Client {
pub fn init(
allocator: Allocator,
ca_blob: ?Net.Blob,
robot_store: *RobotStore,
web_bot_auth: *const ?WebBotAuth,
config: *const Config,
) !*Client {
var transfer_pool = std.heap.MemoryPool(Transfer).init(allocator);
errdefer transfer_pool.deinit();
const client = try allocator.create(Client);
errdefer allocator.destroy(client);
var handles = try Handles.init(allocator, ca_blob, config);
var handles = try Net.Handles.init(allocator, ca_blob, config);
errdefer handles.deinit(allocator);
// Set transfer callbacks on each connection.
@@ -145,6 +156,7 @@ pub fn init(allocator: Allocator, ca_blob: ?Net.Blob, robot_store: *RobotStore,
.handles = handles,
.allocator = allocator,
.robot_store = robot_store,
.web_bot_auth = web_bot_auth,
.http_proxy = http_proxy,
.use_proxy = http_proxy != null,
.config = config,
@@ -191,6 +203,8 @@ fn _abort(self: *Client, comptime abort_all: bool, frame_id: u32) void {
n = node.next;
const conn: *Net.Connection = @fieldParentPtr("node", node);
var transfer = Transfer.fromConnection(conn) catch |err| {
// Let's cleanup what we can
self.handles.remove(conn);
log.err(.http, "get private info", .{ .err = err, .source = "abort" });
continue;
};
@@ -665,7 +679,7 @@ pub fn restoreOriginalProxy(self: *Client) !void {
}
// Enable TLS verification on all connections.
pub fn enableTlsVerify(self: *const Client) !void {
pub fn enableTlsVerify(self: *Client) !void {
// Remove inflight connections check on enable TLS b/c chromiumoxide calls
// the command during navigate and Curl seems to accept it...
@@ -675,7 +689,7 @@ pub fn enableTlsVerify(self: *const Client) !void {
}
// Disable TLS verification on all connections.
pub fn disableTlsVerify(self: *const Client) !void {
pub fn disableTlsVerify(self: *Client) !void {
// Remove inflight connections check on disable TLS b/c chromiumoxide calls
// the command during navigate and Curl seems to accept it...
@@ -689,7 +703,11 @@ fn makeRequest(self: *Client, conn: *Net.Connection, transfer: *Transfer) anyerr
{
transfer._conn = conn;
errdefer transfer.deinit();
errdefer {
transfer._conn = null;
transfer.deinit();
self.handles.isAvailable(conn);
}
try conn.setURL(req.url);
try conn.setMethod(req.method);
@@ -703,6 +721,12 @@ fn makeRequest(self: *Client, conn: *Net.Connection, transfer: *Transfer) anyerr
try conn.secretHeaders(&header_list, &self.config.http_headers); // Add headers that must be hidden from intercepts
try conn.setHeaders(&header_list);
// If we have WebBotAuth, sign our request.
if (self.web_bot_auth.*) |wba| {
const authority = URL.getHost(req.url);
try wba.signRequest(transfer.arena.allocator(), &header_list, authority);
}
// Add cookies.
if (header_list.cookies) |cookies| {
try conn.setCookies(cookies);
@@ -716,17 +740,20 @@ fn makeRequest(self: *Client, conn: *Net.Connection, transfer: *Transfer) anyerr
}
}
// Once soon as this is called, our "perform" loop is responsible for
// As soon as this is called, our "perform" loop is responsible for
// cleaning things up. That's why the above code is in a block. If anything
// fails BEFORE `curl_multi_add_handle` suceeds, the we still need to do
// fails BEFORE `curl_multi_add_handle` succeeds, the we still need to do
// cleanup. But if things fail after `curl_multi_add_handle`, we expect
// perfom to pickup the failure and cleanup.
try self.handles.add(conn);
self.handles.add(conn) catch |err| {
transfer._conn = null;
transfer.deinit();
self.handles.isAvailable(conn);
return err;
};
if (req.start_callback) |cb| {
cb(transfer) catch |err| {
self.handles.remove(conn);
transfer._conn = null;
transfer.deinit();
return err;
};
@@ -834,7 +861,7 @@ fn processMessages(self: *Client) !bool {
// In case of request w/o data, we need to call the header done
// callback now.
const proceed = transfer.headerDoneCallback(&msg.conn) catch |err| {
log.err(.http, "header_done_callback", .{ .err = err });
log.err(.http, "header_done_callback2", .{ .err = err });
requestFailed(transfer, err, true);
continue;
};
@@ -872,8 +899,6 @@ fn ensureNoActiveConnection(self: *const Client) !void {
}
}
const Handles = Net.Handles;
pub const RequestCookie = struct {
is_http: bool,
jar: *CookieJar,
@@ -1295,14 +1320,14 @@ pub const Transfer = struct {
}
transfer._redirecting = false;
if (status == 401 or status == 407) {
if ((status == 401 or status == 407) and transfer.client.use_proxy) {
// The auth challenge must be parsed from a following
// WWW-Authenticate or Proxy-Authenticate header.
transfer._auth_challenge = .{
.status = status,
.source = undefined,
.scheme = undefined,
.realm = undefined,
.source = null,
.scheme = null,
.realm = null,
};
return buf_len;
}

View File

@@ -29,6 +29,7 @@ pub const Headers = Net.Headers;
const Config = @import("../Config.zig");
const RobotStore = @import("../browser/Robots.zig").RobotStore;
const WebBotAuth = @import("../browser/WebBotAuth.zig");
const Allocator = std.mem.Allocator;
const ArenaAllocator = std.heap.ArenaAllocator;
@@ -45,8 +46,14 @@ allocator: Allocator,
config: *const Config,
ca_blob: ?Net.Blob,
robot_store: *RobotStore,
web_bot_auth: *const ?WebBotAuth,
pub fn init(allocator: Allocator, robot_store: *RobotStore, config: *const Config) !Http {
pub fn init(
allocator: Allocator,
robot_store: *RobotStore,
web_bot_auth: *const ?WebBotAuth,
config: *const Config,
) !Http {
try Net.globalInit();
errdefer Net.globalDeinit();
@@ -68,6 +75,7 @@ pub fn init(allocator: Allocator, robot_store: *RobotStore, config: *const Confi
.config = config,
.ca_blob = ca_blob,
.robot_store = robot_store,
.web_bot_auth = web_bot_auth,
};
}
@@ -81,7 +89,7 @@ pub fn deinit(self: *Http) void {
}
pub fn createClient(self: *Http, allocator: Allocator) !*Client {
return Client.init(allocator, self.ca_blob, self.robot_store, self.config);
return Client.init(allocator, self.ca_blob, self.robot_store, self.web_bot_auth, self.config);
}
pub fn newConnection(self: *Http) !Net.Connection {

View File

@@ -24,22 +24,29 @@ mutex: std.Thread.Mutex = .{},
aw: std.io.Writer.Allocating,
pub fn init(allocator: std.mem.Allocator, app: *App, writer: *std.io.Writer) !*Self {
const http_client = try app.http.createClient(allocator);
errdefer http_client.deinit();
const notification = try lp.Notification.init(allocator);
errdefer notification.deinit();
const self = try allocator.create(Self);
errdefer allocator.destroy(self);
self.allocator = allocator;
self.app = app;
self.writer = writer;
self.aw = .init(allocator);
var browser = try lp.Browser.init(app, .{ .http_client = http_client });
errdefer browser.deinit();
self.http_client = try app.http.createClient(allocator);
errdefer self.http_client.deinit();
self.notification = try .init(allocator);
errdefer self.notification.deinit();
self.browser = try lp.Browser.init(app, .{ .http_client = self.http_client });
errdefer self.browser.deinit();
self.* = .{
.allocator = allocator,
.app = app,
.writer = writer,
.browser = browser,
.aw = .init(allocator),
.http_client = http_client,
.notification = notification,
.session = undefined,
.page = undefined,
};
self.session = try self.browser.newSession(self.notification);
self.page = try self.session.createPage();