mirror of
https://github.com/lightpanda-io/browser.git
synced 2025-10-29 15:13:28 +00:00
Compare commits
1 Commits
pandasurf
...
trusted_ty
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e658b27947 |
8
.github/actions/install/action.yml
vendored
8
.github/actions/install/action.yml
vendored
@@ -5,7 +5,7 @@ inputs:
|
||||
zig:
|
||||
description: 'Zig version to install'
|
||||
required: false
|
||||
default: '0.14.1'
|
||||
default: '0.14.0'
|
||||
arch:
|
||||
description: 'CPU arch used to select the v8 lib'
|
||||
required: false
|
||||
@@ -34,11 +34,9 @@ runs:
|
||||
- name: Install apt deps
|
||||
if: ${{ inputs.os == 'linux' }}
|
||||
shell: bash
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y wget xz-utils python3 ca-certificates git pkg-config libglib2.0-dev gperf libexpat1-dev cmake clang
|
||||
run: sudo apt-get install -y wget xz-utils python3 ca-certificates git pkg-config libglib2.0-dev gperf libexpat1-dev cmake clang
|
||||
|
||||
- uses: mlugg/setup-zig@v2
|
||||
- uses: mlugg/setup-zig@v1
|
||||
with:
|
||||
version: ${{ inputs.zig }}
|
||||
|
||||
|
||||
26
.github/workflows/build.yml
vendored
26
.github/workflows/build.yml
vendored
@@ -1,11 +1,5 @@
|
||||
name: nightly build
|
||||
|
||||
env:
|
||||
AWS_ACCESS_KEY_ID: ${{ vars.NIGHTLY_BUILD_AWS_ACCESS_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.NIGHTLY_BUILD_AWS_SECRET_ACCESS_KEY }}
|
||||
AWS_BUCKET: ${{ vars.NIGHTLY_BUILD_AWS_BUCKET }}
|
||||
AWS_REGION: ${{ vars.NIGHTLY_BUILD_AWS_REGION }}
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "2 2 * * *"
|
||||
@@ -43,11 +37,6 @@ jobs:
|
||||
- name: Rename binary
|
||||
run: mv zig-out/bin/lightpanda lightpanda-${{ env.ARCH }}-${{ env.OS }}
|
||||
|
||||
- name: upload on s3
|
||||
run: |
|
||||
export DIR=`git show --no-patch --no-notes --pretty='%cs_%h'`
|
||||
aws s3 cp --storage-class=GLACIER_IR lightpanda-${{ env.ARCH }}-${{ env.OS }} s3://lpd-nightly-build/${DIR}/lightpanda-${{ env.ARCH }}-${{ env.OS }}
|
||||
|
||||
- name: Upload the build
|
||||
uses: ncipollo/release-action@v1
|
||||
with:
|
||||
@@ -81,11 +70,6 @@ jobs:
|
||||
- name: Rename binary
|
||||
run: mv zig-out/bin/lightpanda lightpanda-${{ env.ARCH }}-${{ env.OS }}
|
||||
|
||||
- name: upload on s3
|
||||
run: |
|
||||
export DIR=`git show --no-patch --no-notes --pretty='%cs_%h'`
|
||||
aws s3 cp --storage-class=GLACIER_IR lightpanda-${{ env.ARCH }}-${{ env.OS }} s3://lpd-nightly-build/${DIR}/lightpanda-${{ env.ARCH }}-${{ env.OS }}
|
||||
|
||||
- name: Upload the build
|
||||
uses: ncipollo/release-action@v1
|
||||
with:
|
||||
@@ -119,11 +103,6 @@ jobs:
|
||||
- name: Rename binary
|
||||
run: mv zig-out/bin/lightpanda lightpanda-${{ env.ARCH }}-${{ env.OS }}
|
||||
|
||||
- name: upload on s3
|
||||
run: |
|
||||
export DIR=`git show --no-patch --no-notes --pretty='%cs_%h'`
|
||||
aws s3 cp --storage-class=GLACIER_IR lightpanda-${{ env.ARCH }}-${{ env.OS }} s3://lpd-nightly-build/${DIR}/lightpanda-${{ env.ARCH }}-${{ env.OS }}
|
||||
|
||||
- name: Upload the build
|
||||
uses: ncipollo/release-action@v1
|
||||
with:
|
||||
@@ -157,11 +136,6 @@ jobs:
|
||||
- name: Rename binary
|
||||
run: mv zig-out/bin/lightpanda lightpanda-${{ env.ARCH }}-${{ env.OS }}
|
||||
|
||||
- name: upload on s3
|
||||
run: |
|
||||
export DIR=`git show --no-patch --no-notes --pretty='%cs_%h'`
|
||||
aws s3 cp --storage-class=GLACIER_IR lightpanda-${{ env.ARCH }}-${{ env.OS }} s3://lpd-nightly-build/${DIR}/lightpanda-${{ env.ARCH }}-${{ env.OS }}
|
||||
|
||||
- name: Upload the build
|
||||
uses: ncipollo/release-action@v1
|
||||
with:
|
||||
|
||||
111
.github/workflows/e2e-test.yml
vendored
111
.github/workflows/e2e-test.yml
vendored
@@ -1,12 +1,5 @@
|
||||
name: e2e-test
|
||||
|
||||
env:
|
||||
AWS_ACCESS_KEY_ID: ${{ vars.LPD_PERF_AWS_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.LPD_PERF_AWS_SECRET_ACCESS_KEY }}
|
||||
AWS_BUCKET: ${{ vars.LPD_PERF_AWS_BUCKET }}
|
||||
AWS_REGION: ${{ vars.LPD_PERF_AWS_REGION }}
|
||||
LIGHTPANDA_DISABLE_TELEMETRY: true
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
@@ -55,7 +48,7 @@ jobs:
|
||||
- uses: ./.github/actions/install
|
||||
|
||||
- name: zig build release
|
||||
run: zig build -Doptimize=ReleaseFast -Dcpu=x86_64 -Dgit_commit=$(git rev-parse --short ${{ github.sha }})
|
||||
run: zig build -Doptimize=ReleaseSafe
|
||||
|
||||
- name: upload artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
@@ -70,7 +63,7 @@ jobs:
|
||||
needs: zig-build-release
|
||||
|
||||
env:
|
||||
MAX_MEMORY: 30000
|
||||
MAX_MEMORY: 29000
|
||||
MAX_AVG_DURATION: 24
|
||||
LIGHTPANDA_DISABLE_TELEMETRY: true
|
||||
|
||||
@@ -142,103 +135,3 @@ jobs:
|
||||
./lightpanda serve & echo $! > LPD.pid
|
||||
go run runner/main.go --verbose
|
||||
kill `cat LPD.pid`
|
||||
|
||||
cdp-and-hyperfine-bench:
|
||||
name: cdp-and-hyperfine-bench
|
||||
needs: zig-build-release
|
||||
|
||||
# Don't execute on PR
|
||||
if: github.event_name != 'pull_request'
|
||||
|
||||
# use a self host runner.
|
||||
runs-on: lpd-bench-hetzner
|
||||
timeout-minutes: 15
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
repository: 'lightpanda-io/demo'
|
||||
fetch-depth: 0
|
||||
|
||||
- run: npm install
|
||||
|
||||
- name: download artifact
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: lightpanda-build-release
|
||||
|
||||
- run: chmod a+x ./lightpanda
|
||||
|
||||
- name: start http
|
||||
run: |
|
||||
go run ws/main.go & echo $! > WS.pid
|
||||
sleep 2
|
||||
|
||||
- name: run puppeteer
|
||||
run: |
|
||||
./lightpanda serve & echo $! > LPD.pid
|
||||
sleep 2
|
||||
RUNS=100 npm run bench-puppeteer-cdp > puppeteer.out || exit 1
|
||||
cat /proc/`cat LPD.pid`/status |grep VmHWM|grep -oP '\d+' > LPD.VmHWM
|
||||
kill `cat LPD.pid`
|
||||
|
||||
- name: puppeteer result
|
||||
run: cat puppeteer.out
|
||||
|
||||
- name: json output
|
||||
run: |
|
||||
export AVG_DURATION=`cat puppeteer.out|grep 'avg run'|sed 's/avg run duration (ms) //'`
|
||||
export TOTAL_DURATION=`cat puppeteer.out|grep 'total duration'|sed 's/total duration (ms) //'`
|
||||
export LPD_VmHWM=`cat LPD.VmHWM`
|
||||
echo "{\"duration_total\":${TOTAL_DURATION},\"duration_avg\":${AVG_DURATION},\"mem_peak\":${LPD_VmHWM}}" > bench.json
|
||||
cat bench.json
|
||||
|
||||
- name: run hyperfine
|
||||
run: |
|
||||
hyperfine --export-json=hyperfine.json --warmup 3 --runs 20 --shell=none "./lightpanda --dump http://127.0.0.1:1234/campfire-commerce/"
|
||||
|
||||
- name: stop http
|
||||
run: kill `cat WS.pid`
|
||||
|
||||
- name: write commit
|
||||
run: |
|
||||
echo "${{github.sha}}" > commit.txt
|
||||
|
||||
- name: upload artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: bench-results
|
||||
path: |
|
||||
bench.json
|
||||
hyperfine.json
|
||||
commit.txt
|
||||
retention-days: 10
|
||||
|
||||
|
||||
perf-fmt:
|
||||
name: perf-fmt
|
||||
needs: cdp-and-hyperfine-bench
|
||||
|
||||
# Don't execute on PR
|
||||
if: github.event_name != 'pull_request'
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 15
|
||||
|
||||
container:
|
||||
image: ghcr.io/lightpanda-io/perf-fmt:latest
|
||||
credentials:
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
steps:
|
||||
- name: download artifact
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: bench-results
|
||||
|
||||
- name: format and send json result
|
||||
run: /perf-fmt cdp ${{ github.sha }} bench.json
|
||||
|
||||
- name: format and send json result
|
||||
run: /perf-fmt hyperfine ${{ github.sha }} hyperfine.json
|
||||
|
||||
4
.github/workflows/zig-fmt.yml
vendored
4
.github/workflows/zig-fmt.yml
vendored
@@ -1,7 +1,7 @@
|
||||
name: zig-fmt
|
||||
|
||||
env:
|
||||
ZIG_VERSION: 0.14.1
|
||||
ZIG_VERSION: 0.14.0
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
@@ -32,7 +32,7 @@ jobs:
|
||||
timeout-minutes: 15
|
||||
|
||||
steps:
|
||||
- uses: mlugg/setup-zig@v2
|
||||
- uses: mlugg/setup-zig@v1
|
||||
with:
|
||||
version: ${{ env.ZIG_VERSION }}
|
||||
|
||||
|
||||
18
Dockerfile
18
Dockerfile
@@ -1,10 +1,10 @@
|
||||
FROM ubuntu:24.04
|
||||
|
||||
ARG MINISIG=0.12
|
||||
ARG ZIG=0.14.1
|
||||
ARG ZIG=0.14.0
|
||||
ARG ZIG_MINISIG=RWSGOq2NVecA2UPNdBUZykf1CCb147pkmdtYxgb3Ti+JO/wCYvhbAb/U
|
||||
ARG ARCH=x86_64
|
||||
ARG V8=13.6.233.8
|
||||
ARG V8=11.1.134
|
||||
ARG ZIG_V8=v0.1.24
|
||||
|
||||
RUN apt-get update -yq && \
|
||||
@@ -20,21 +20,21 @@ RUN curl --fail -L -O https://github.com/jedisct1/minisign/releases/download/${M
|
||||
tar xvzf minisign-${MINISIG}-linux.tar.gz
|
||||
|
||||
# install zig
|
||||
RUN curl --fail -L -O https://ziglang.org/download/${ZIG}/zig-${ARCH}-linux-${ZIG}.tar.xz
|
||||
RUN curl --fail -L -O https://ziglang.org/download/${ZIG}/zig-${ARCH}-linux-${ZIG}.tar.xz.minisig
|
||||
RUN curl --fail -L -O https://ziglang.org/download/${ZIG}/zig-linux-${ARCH}-${ZIG}.tar.xz
|
||||
RUN curl --fail -L -O https://ziglang.org/download/${ZIG}/zig-linux-${ARCH}-${ZIG}.tar.xz.minisig
|
||||
|
||||
RUN minisign-linux/${ARCH}/minisign -Vm zig-${ARCH}-linux-${ZIG}.tar.xz -P ${ZIG_MINISIG}
|
||||
RUN minisign-linux/${ARCH}/minisign -Vm zig-linux-${ARCH}-${ZIG}.tar.xz -P ${ZIG_MINISIG}
|
||||
|
||||
# clean minisg
|
||||
RUN rm -fr minisign-0.11-linux.tar.gz minisign-linux
|
||||
|
||||
# install zig
|
||||
RUN tar xvf zig-${ARCH}-linux-${ZIG}.tar.xz && \
|
||||
mv zig-${ARCH}-linux-${ZIG} /usr/local/lib && \
|
||||
ln -s /usr/local/lib/zig-${ARCH}-linux-${ZIG}/zig /usr/local/bin/zig
|
||||
RUN tar xvf zig-linux-${ARCH}-${ZIG}.tar.xz && \
|
||||
mv zig-linux-${ARCH}-${ZIG} /usr/local/lib && \
|
||||
ln -s /usr/local/lib/zig-linux-${ARCH}-${ZIG}/zig /usr/local/bin/zig
|
||||
|
||||
# clean up zig install
|
||||
RUN rm -fr zig-${ARCH}-linux-${ZIG}.tar.xz zig-${ARCH}-linux-${ZIG}.tar.xz.minisig
|
||||
RUN rm -fr zig-linux-${ARCH}-${ZIG}.tar.xz zig-linux-${ARCH}-${ZIG}.tar.xz.minisig
|
||||
|
||||
# force use of http instead of ssh with github
|
||||
RUN cat <<EOF > /root/.gitconfig
|
||||
|
||||
2
Makefile
2
Makefile
@@ -69,7 +69,7 @@ build:
|
||||
## Build in debug mode
|
||||
build-dev:
|
||||
@printf "\e[36mBuilding (debug)...\e[0m\n"
|
||||
@$(ZIG) build -Dgit_commit=$$(git rev-parse --short HEAD) || (printf "\e[33mBuild ERROR\e[0m\n"; exit 1;)
|
||||
@$(ZIG) build -Dgit_commit=$$(git rev-parse --short HEAD) -Dlog_level=debug || (printf "\e[33mBuild ERROR\e[0m\n"; exit 1;)
|
||||
@printf "\e[33mBuild OK\e[0m\n"
|
||||
|
||||
## Run the server in release mode
|
||||
|
||||
@@ -148,7 +148,7 @@ You can also follow the progress of our Javascript support in our dedicated [zig
|
||||
|
||||
### Prerequisites
|
||||
|
||||
Lightpanda is written with [Zig](https://ziglang.org/) `0.14.1`. You have to
|
||||
Lightpanda is written with [Zig](https://ziglang.org/) `0.14.0`. You have to
|
||||
install it with the right version in order to build the project.
|
||||
|
||||
Lightpanda also depends on
|
||||
|
||||
14
build.zig
14
build.zig
@@ -21,7 +21,7 @@ const builtin = @import("builtin");
|
||||
|
||||
/// Do not rename this constant. It is scanned by some scripts to determine
|
||||
/// which zig version to install.
|
||||
const recommended_zig_version = "0.14.1";
|
||||
const recommended_zig_version = "0.14.0";
|
||||
|
||||
pub fn build(b: *std.Build) !void {
|
||||
switch (comptime builtin.zig_version.order(std.SemanticVersion.parse(recommended_zig_version) catch unreachable)) {
|
||||
@@ -44,6 +44,18 @@ pub fn build(b: *std.Build) !void {
|
||||
b.option([]const u8, "git_commit", "Current git commit") orelse "dev",
|
||||
);
|
||||
|
||||
opts.addOption(
|
||||
std.log.Level,
|
||||
"log_level",
|
||||
b.option(std.log.Level, "log_level", "The log level") orelse std.log.Level.info,
|
||||
);
|
||||
|
||||
opts.addOption(
|
||||
bool,
|
||||
"log_unknown_properties",
|
||||
b.option(bool, "log_unknown_properties", "Log access to unknown properties") orelse false,
|
||||
);
|
||||
|
||||
const target = b.standardTargetOptions(.{});
|
||||
const optimize = b.standardOptimizeOption(.{});
|
||||
|
||||
|
||||
@@ -13,8 +13,8 @@
|
||||
.hash = "tigerbeetle_io-0.0.0-ViLgxpyRBAB5BMfIcj3KMXfbJzwARs9uSl8aRy2OXULd",
|
||||
},
|
||||
.v8 = .{
|
||||
.url = "https://github.com/lightpanda-io/zig-v8-fork/archive/1d25fcf3ced688adca3c7a95a138771e4ebba692.tar.gz",
|
||||
.hash = "v8-0.0.0-xddH61eyAwDICIkLAkfQcxsX4TMCKY80QiSUgNBQqx-u",
|
||||
.url = "https://github.com/lightpanda-io/zig-v8-fork/archive/e38cb27ddb044c6afbf8a938b293721b9804405e.tar.gz",
|
||||
.hash = "v8-0.0.0-xddH6_GzAwCaz83JWuw3sepOGq0I7C_CmfOwA1Gb9q3y",
|
||||
},
|
||||
//.v8 = .{ .path = "../zig-v8-fork" },
|
||||
//.tigerbeetle_io = .{ .path = "../tigerbeetle-io" },
|
||||
|
||||
151
flake.lock
generated
151
flake.lock
generated
@@ -1,5 +1,21 @@
|
||||
{
|
||||
"nodes": {
|
||||
"flake-compat": {
|
||||
"flake": false,
|
||||
"locked": {
|
||||
"lastModified": 1733328505,
|
||||
"narHash": "sha256-NeCCThCEP3eCl2l/+27kNNK7QrwZB1IJCrXfrbv5oqU=",
|
||||
"owner": "edolstra",
|
||||
"repo": "flake-compat",
|
||||
"rev": "ff81ac966bb2cae68946d5ed5fc4994f96d0ffec",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "edolstra",
|
||||
"repo": "flake-compat",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"flake-utils": {
|
||||
"inputs": {
|
||||
"systems": "systems"
|
||||
@@ -18,18 +34,92 @@
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"flake-utils_2": {
|
||||
"inputs": {
|
||||
"systems": "systems_2"
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1731533236,
|
||||
"narHash": "sha256-l0KFg5HjrsfsO/JpG+r7fRrqm12kzFHyUHqHCVpMMbI=",
|
||||
"owner": "numtide",
|
||||
"repo": "flake-utils",
|
||||
"rev": "11707dc2f618dd54ca8739b309ec4fc024de578b",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "numtide",
|
||||
"repo": "flake-utils",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"flake-utils_3": {
|
||||
"inputs": {
|
||||
"systems": "systems_3"
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1731533236,
|
||||
"narHash": "sha256-l0KFg5HjrsfsO/JpG+r7fRrqm12kzFHyUHqHCVpMMbI=",
|
||||
"owner": "numtide",
|
||||
"repo": "flake-utils",
|
||||
"rev": "11707dc2f618dd54ca8739b309ec4fc024de578b",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "numtide",
|
||||
"repo": "flake-utils",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"iguana": {
|
||||
"inputs": {
|
||||
"flake-utils": "flake-utils_2",
|
||||
"nixpkgs": [
|
||||
"nixpkgs"
|
||||
],
|
||||
"zigPkgs": "zigPkgs"
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1746539192,
|
||||
"narHash": "sha256-32nN8JlRqNuCFfrDooyre+gDSnxZuCtK/qaHhRmGMhg=",
|
||||
"owner": "mookums",
|
||||
"repo": "iguana",
|
||||
"rev": "5569f95694edf59803429400ff6cb1c7522da801",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "mookums",
|
||||
"repo": "iguana",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"nixpkgs": {
|
||||
"locked": {
|
||||
"lastModified": 1748964450,
|
||||
"narHash": "sha256-ZouDiXkUk8mkMnah10QcoQ9Nu6UW6AFAHLScS3En6aI=",
|
||||
"lastModified": 1746397377,
|
||||
"narHash": "sha256-5oLdRa3vWSRbuqPIFFmQBGGUqaYZBxX+GGtN9f/n4lU=",
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "ed30f8aba41605e3ab46421e3dcb4510ec560ff8",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "NixOS",
|
||||
"ref": "nixpkgs-unstable",
|
||||
"repo": "nixpkgs",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"nixpkgs_2": {
|
||||
"locked": {
|
||||
"lastModified": 1746481231,
|
||||
"narHash": "sha256-U3VKPi5D2oLBFzaMI0jJLJp8J64ZLjz+EwodUS//QWc=",
|
||||
"owner": "nixos",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "9ff500cd9e123f46c55855eca64beccead29b152",
|
||||
"rev": "c6aca34d2ca2ce9e20b722f54e684cda64b275c2",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "nixos",
|
||||
"ref": "release-25.05",
|
||||
"ref": "release-24.11",
|
||||
"repo": "nixpkgs",
|
||||
"type": "github"
|
||||
}
|
||||
@@ -37,7 +127,8 @@
|
||||
"root": {
|
||||
"inputs": {
|
||||
"flake-utils": "flake-utils",
|
||||
"nixpkgs": "nixpkgs"
|
||||
"iguana": "iguana",
|
||||
"nixpkgs": "nixpkgs_2"
|
||||
}
|
||||
},
|
||||
"systems": {
|
||||
@@ -54,6 +145,56 @@
|
||||
"repo": "default",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"systems_2": {
|
||||
"locked": {
|
||||
"lastModified": 1681028828,
|
||||
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
|
||||
"owner": "nix-systems",
|
||||
"repo": "default",
|
||||
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "nix-systems",
|
||||
"repo": "default",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"systems_3": {
|
||||
"locked": {
|
||||
"lastModified": 1681028828,
|
||||
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
|
||||
"owner": "nix-systems",
|
||||
"repo": "default",
|
||||
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "nix-systems",
|
||||
"repo": "default",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"zigPkgs": {
|
||||
"inputs": {
|
||||
"flake-compat": "flake-compat",
|
||||
"flake-utils": "flake-utils_3",
|
||||
"nixpkgs": "nixpkgs"
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1746475050,
|
||||
"narHash": "sha256-KJC7BNY+NPCc1I+quGkWtoHXOMvFVEyer8Y0haOtTCA=",
|
||||
"owner": "mookums",
|
||||
"repo": "zig-overlay",
|
||||
"rev": "dfa488aa462932e46f44fddf6677ff22f1244c22",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "mookums",
|
||||
"repo": "zig-overlay",
|
||||
"type": "github"
|
||||
}
|
||||
}
|
||||
},
|
||||
"root": "root",
|
||||
|
||||
39
flake.nix
39
flake.nix
@@ -2,53 +2,56 @@
|
||||
description = "headless browser designed for AI and automation";
|
||||
|
||||
inputs = {
|
||||
nixpkgs.url = "github:nixos/nixpkgs/release-25.05";
|
||||
nixpkgs.url = "github:nixos/nixpkgs/release-24.11";
|
||||
|
||||
iguana.url = "github:mookums/iguana";
|
||||
iguana.inputs.nixpkgs.follows = "nixpkgs";
|
||||
|
||||
flake-utils.url = "github:numtide/flake-utils";
|
||||
};
|
||||
|
||||
outputs =
|
||||
{
|
||||
nixpkgs,
|
||||
iguana,
|
||||
flake-utils,
|
||||
...
|
||||
}:
|
||||
flake-utils.lib.eachDefaultSystem (
|
||||
system:
|
||||
let
|
||||
zigVersion = "0_14_0";
|
||||
iguanaLib = iguana.lib.${system};
|
||||
|
||||
pkgs = import nixpkgs {
|
||||
inherit system;
|
||||
overlays = [
|
||||
(iguanaLib.mkZigOverlay zigVersion)
|
||||
(iguanaLib.mkZlsOverlay zigVersion)
|
||||
];
|
||||
};
|
||||
|
||||
# We need crtbeginS.o for building.
|
||||
crtFiles = pkgs.runCommand "crt-files" { } ''
|
||||
mkdir -p $out/lib
|
||||
cp -r ${pkgs.gcc.cc}/lib/gcc $out/lib/gcc
|
||||
'';
|
||||
|
||||
# This build pipeline is very unhappy without an FHS-compliant env.
|
||||
fhs = pkgs.buildFHSEnv {
|
||||
fhs = pkgs.buildFHSUserEnv {
|
||||
name = "fhs-shell";
|
||||
multiArch = true;
|
||||
targetPkgs =
|
||||
pkgs: with pkgs; [
|
||||
# Build Tools
|
||||
zig
|
||||
zls
|
||||
python3
|
||||
pkg-config
|
||||
cmake
|
||||
gperf
|
||||
|
||||
# GCC
|
||||
gcc
|
||||
gcc.cc.lib
|
||||
crtFiles
|
||||
|
||||
# Libaries
|
||||
expat.dev
|
||||
python3
|
||||
glib.dev
|
||||
glibc.dev
|
||||
zlib
|
||||
ninja
|
||||
gn
|
||||
gcc-unwrapped
|
||||
binutils
|
||||
clang
|
||||
clang-tools
|
||||
];
|
||||
};
|
||||
in
|
||||
|
||||
10
src/app.zig
10
src/app.zig
@@ -1,12 +1,13 @@
|
||||
const std = @import("std");
|
||||
const Allocator = std.mem.Allocator;
|
||||
|
||||
const log = @import("log.zig");
|
||||
const Loop = @import("runtime/loop.zig").Loop;
|
||||
const HttpClient = @import("http/client.zig").Client;
|
||||
const Telemetry = @import("telemetry/telemetry.zig").Telemetry;
|
||||
const Notification = @import("notification.zig").Notification;
|
||||
|
||||
const log = std.log.scoped(.app);
|
||||
|
||||
// Container for global state / objects that various parts of the system
|
||||
// might need.
|
||||
pub const App = struct {
|
||||
@@ -52,8 +53,7 @@ pub const App = struct {
|
||||
.telemetry = undefined,
|
||||
.app_dir_path = app_dir_path,
|
||||
.notification = notification,
|
||||
.http_client = try HttpClient.init(allocator, .{
|
||||
.max_concurrent = 3,
|
||||
.http_client = try HttpClient.init(allocator, 5, .{
|
||||
.http_proxy = config.http_proxy,
|
||||
.tls_verify_host = config.tls_verify_host,
|
||||
}),
|
||||
@@ -84,7 +84,7 @@ fn getAndMakeAppDir(allocator: Allocator) ?[]const u8 {
|
||||
return allocator.dupe(u8, "/tmp") catch unreachable;
|
||||
}
|
||||
const app_dir_path = std.fs.getAppDataDir(allocator, "lightpanda") catch |err| {
|
||||
log.warn(.app, "get data dir", .{ .err = err });
|
||||
log.warn("failed to get lightpanda data dir: {}", .{err});
|
||||
return null;
|
||||
};
|
||||
|
||||
@@ -92,7 +92,7 @@ fn getAndMakeAppDir(allocator: Allocator) ?[]const u8 {
|
||||
error.PathAlreadyExists => return app_dir_path,
|
||||
else => {
|
||||
allocator.free(app_dir_path);
|
||||
log.warn(.app, "create data dir", .{ .err = err, .path = app_dir_path });
|
||||
log.warn("failed to create lightpanda data dir: {}", .{err});
|
||||
return null;
|
||||
},
|
||||
};
|
||||
|
||||
@@ -1,65 +0,0 @@
|
||||
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
|
||||
//
|
||||
// Francis Bouvier <francis@lightpanda.io>
|
||||
// Pierre Tachoire <pierre@lightpanda.io>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU Affero General Public License as
|
||||
// published by the Free Software Foundation, either version 3 of the
|
||||
// License, or (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU Affero General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU Affero General Public License
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
// Sometimes we need to extend libdom. For example, its HTMLDocument doesn't
|
||||
// have a readyState. We have a couple different options, such as making the
|
||||
// correction in libdom directly. Another option stems from the fact that every
|
||||
// libdom node has an opaque embedder_data field. This is the struct that we
|
||||
// lazily load into that field.
|
||||
//
|
||||
// It didn't originally start off as a collection of every single extension, but
|
||||
// this quickly proved necessary, since different fields are needed on the same
|
||||
// data at different levels of the prototype chain. This isn't memory efficient.
|
||||
|
||||
const Env = @import("env.zig").Env;
|
||||
const parser = @import("netsurf.zig");
|
||||
const CSSStyleDeclaration = @import("cssom/css_style_declaration.zig").CSSStyleDeclaration;
|
||||
|
||||
// for HTMLScript (but probably needs to be added to more)
|
||||
onload: ?Env.Function = null,
|
||||
onerror: ?Env.Function = null,
|
||||
|
||||
// for HTMLElement
|
||||
style: CSSStyleDeclaration = .empty,
|
||||
|
||||
// for html/document
|
||||
ready_state: ReadyState = .loading,
|
||||
|
||||
// for dom/document
|
||||
active_element: ?*parser.Element = null,
|
||||
|
||||
// for HTMLSelectElement
|
||||
// By default, if no option is explicitly selected, the first option should
|
||||
// be selected. However, libdom doesn't do this, and it sets the
|
||||
// selectedIndex to -1, which is a valid value for "nothing selected".
|
||||
// Therefore, when libdom says the selectedIndex == -1, we don't know if
|
||||
// it means that nothing is selected, or if the first option is selected by
|
||||
// default.
|
||||
// There are cases where this won't work, but when selectedIndex is
|
||||
// explicitly set, we set this boolean flag. Then, when we're getting then
|
||||
// selectedIndex, if this flag is == false, which is to say that if
|
||||
// selectedIndex hasn't been explicitly set AND if we have at least 1 option
|
||||
// AND if it isn't a multi select, we can make the 1st item selected by
|
||||
// default (by returning selectedIndex == 0).
|
||||
explicit_index_set: bool = false,
|
||||
|
||||
const ReadyState = enum {
|
||||
loading,
|
||||
interactive,
|
||||
complete,
|
||||
};
|
||||
@@ -21,7 +21,6 @@ const std = @import("std");
|
||||
const Allocator = std.mem.Allocator;
|
||||
const ArenaAllocator = std.heap.ArenaAllocator;
|
||||
|
||||
const State = @import("State.zig");
|
||||
const Env = @import("env.zig").Env;
|
||||
const App = @import("../app.zig").App;
|
||||
const Session = @import("session.zig").Session;
|
||||
@@ -42,7 +41,6 @@ pub const Browser = struct {
|
||||
session_arena: ArenaAllocator,
|
||||
transfer_arena: ArenaAllocator,
|
||||
notification: *Notification,
|
||||
state_pool: std.heap.MemoryPool(State),
|
||||
|
||||
pub fn init(app: *App) !Browser {
|
||||
const allocator = app.allocator;
|
||||
@@ -63,7 +61,6 @@ pub const Browser = struct {
|
||||
.page_arena = ArenaAllocator.init(allocator),
|
||||
.session_arena = ArenaAllocator.init(allocator),
|
||||
.transfer_arena = ArenaAllocator.init(allocator),
|
||||
.state_pool = std.heap.MemoryPool(State).init(allocator),
|
||||
};
|
||||
}
|
||||
|
||||
@@ -74,7 +71,6 @@ pub const Browser = struct {
|
||||
self.session_arena.deinit();
|
||||
self.transfer_arena.deinit();
|
||||
self.notification.deinit();
|
||||
self.state_pool.deinit();
|
||||
}
|
||||
|
||||
pub fn newSession(self: *Browser) !*Session {
|
||||
|
||||
@@ -19,97 +19,86 @@
|
||||
const std = @import("std");
|
||||
const builtin = @import("builtin");
|
||||
|
||||
const Allocator = std.mem.Allocator;
|
||||
const Page = @import("../page.zig").Page;
|
||||
const JsObject = @import("../env.zig").Env.JsObject;
|
||||
const SessionState = @import("../env.zig").SessionState;
|
||||
|
||||
const log = if (builtin.is_test) &test_capture else @import("../../log.zig");
|
||||
const log = if (builtin.is_test) &test_capture else std.log.scoped(.console);
|
||||
|
||||
pub const Console = struct {
|
||||
// TODO: configurable writer
|
||||
timers: std.StringHashMapUnmanaged(u32) = .{},
|
||||
counts: std.StringHashMapUnmanaged(u32) = .{},
|
||||
|
||||
pub fn static_lp(values: []JsObject, page: *Page) !void {
|
||||
pub fn _log(_: *const Console, values: []JsObject, state: *SessionState) !void {
|
||||
if (values.len == 0) {
|
||||
return;
|
||||
}
|
||||
log.fatal(.console, "lightpanda", .{ .args = try serializeValues(values, page) });
|
||||
log.info("{s}", .{try serializeValues(values, state)});
|
||||
}
|
||||
|
||||
pub fn static_log(values: []JsObject, page: *Page) !void {
|
||||
pub fn _info(console: *const Console, values: []JsObject, state: *SessionState) !void {
|
||||
return console._log(values, state);
|
||||
}
|
||||
|
||||
pub fn _debug(_: *const Console, values: []JsObject, state: *SessionState) !void {
|
||||
if (values.len == 0) {
|
||||
return;
|
||||
}
|
||||
log.info(.console, "info", .{ .args = try serializeValues(values, page) });
|
||||
log.debug("{s}", .{try serializeValues(values, state)});
|
||||
}
|
||||
|
||||
pub fn static_info(values: []JsObject, page: *Page) !void {
|
||||
return static_log(values, page);
|
||||
}
|
||||
|
||||
pub fn static_debug(values: []JsObject, page: *Page) !void {
|
||||
pub fn _warn(_: *const Console, values: []JsObject, state: *SessionState) !void {
|
||||
if (values.len == 0) {
|
||||
return;
|
||||
}
|
||||
log.debug(.console, "debug", .{ .args = try serializeValues(values, page) });
|
||||
log.warn("{s}", .{try serializeValues(values, state)});
|
||||
}
|
||||
|
||||
pub fn static_warn(values: []JsObject, page: *Page) !void {
|
||||
pub fn _error(_: *const Console, values: []JsObject, state: *SessionState) !void {
|
||||
if (values.len == 0) {
|
||||
return;
|
||||
}
|
||||
log.warn(.console, "warn", .{ .args = try serializeValues(values, page) });
|
||||
log.err("{s}", .{try serializeValues(values, state)});
|
||||
}
|
||||
|
||||
pub fn static_error(values: []JsObject, page: *Page) !void {
|
||||
if (values.len == 0) {
|
||||
return;
|
||||
}
|
||||
pub fn _clear(_: *const Console) void {}
|
||||
|
||||
log.info(.console, "error", .{
|
||||
.args = try serializeValues(values, page),
|
||||
.stack = page.stackTrace() catch "???",
|
||||
});
|
||||
}
|
||||
|
||||
pub fn static_clear() void {}
|
||||
|
||||
pub fn _count(self: *Console, label_: ?[]const u8, page: *Page) !void {
|
||||
pub fn _count(self: *Console, label_: ?[]const u8, state: *SessionState) !void {
|
||||
const label = label_ orelse "default";
|
||||
const gop = try self.counts.getOrPut(page.arena, label);
|
||||
const gop = try self.counts.getOrPut(state.arena, label);
|
||||
|
||||
var current: u32 = 0;
|
||||
if (gop.found_existing) {
|
||||
current = gop.value_ptr.*;
|
||||
} else {
|
||||
gop.key_ptr.* = try page.arena.dupe(u8, label);
|
||||
gop.key_ptr.* = try state.arena.dupe(u8, label);
|
||||
}
|
||||
|
||||
const count = current + 1;
|
||||
gop.value_ptr.* = count;
|
||||
|
||||
log.info(.console, "count", .{ .label = label, .count = count });
|
||||
log.info("{s}: {d}", .{ label, count });
|
||||
}
|
||||
|
||||
pub fn _countReset(self: *Console, label_: ?[]const u8) !void {
|
||||
const label = label_ orelse "default";
|
||||
const kv = self.counts.fetchRemove(label) orelse {
|
||||
log.info(.console, "invalid counter", .{ .label = label });
|
||||
log.warn("Counter \"{s}\" doesn't exist.", .{label});
|
||||
return;
|
||||
};
|
||||
log.info(.console, "count reset", .{ .label = label, .count = kv.value });
|
||||
|
||||
log.info("{s}: {d}", .{ label, kv.value });
|
||||
}
|
||||
|
||||
pub fn _time(self: *Console, label_: ?[]const u8, page: *Page) !void {
|
||||
pub fn _time(self: *Console, label_: ?[]const u8, state: *SessionState) !void {
|
||||
const label = label_ orelse "default";
|
||||
const gop = try self.timers.getOrPut(page.arena, label);
|
||||
const gop = try self.timers.getOrPut(state.arena, label);
|
||||
|
||||
if (gop.found_existing) {
|
||||
log.info(.console, "duplicate timer", .{ .label = label });
|
||||
log.warn("Timer \"{s}\" already exists.", .{label});
|
||||
return;
|
||||
}
|
||||
gop.key_ptr.* = try page.arena.dupe(u8, label);
|
||||
gop.key_ptr.* = try state.arena.dupe(u8, label);
|
||||
gop.value_ptr.* = timestamp();
|
||||
}
|
||||
|
||||
@@ -117,48 +106,42 @@ pub const Console = struct {
|
||||
const elapsed = timestamp();
|
||||
const label = label_ orelse "default";
|
||||
const start = self.timers.get(label) orelse {
|
||||
log.info(.console, "invalid timer", .{ .label = label });
|
||||
log.warn("Timer \"{s}\" doesn't exist.", .{label});
|
||||
return;
|
||||
};
|
||||
log.info(.console, "timer", .{ .label = label, .elapsed = elapsed - start });
|
||||
|
||||
log.info("\"{s}\": {d}ms", .{ label, elapsed - start });
|
||||
}
|
||||
|
||||
pub fn _timeStop(self: *Console, label_: ?[]const u8) void {
|
||||
const elapsed = timestamp();
|
||||
const label = label_ orelse "default";
|
||||
const kv = self.timers.fetchRemove(label) orelse {
|
||||
log.info(.console, "invalid timer", .{ .label = label });
|
||||
log.warn("Timer \"{s}\" doesn't exist.", .{label});
|
||||
return;
|
||||
};
|
||||
|
||||
log.warn(.console, "timer stop", .{ .label = label, .elapsed = elapsed - kv.value });
|
||||
log.info("\"{s}\": {d}ms - timer ended", .{ label, elapsed - kv.value });
|
||||
}
|
||||
|
||||
pub fn static_assert(assertion: JsObject, values: []JsObject, page: *Page) !void {
|
||||
pub fn _assert(_: *Console, assertion: JsObject, values: []JsObject, state: *SessionState) !void {
|
||||
if (assertion.isTruthy()) {
|
||||
return;
|
||||
}
|
||||
var serialized_values: []const u8 = "";
|
||||
if (values.len > 0) {
|
||||
serialized_values = try serializeValues(values, page);
|
||||
serialized_values = try serializeValues(values, state);
|
||||
}
|
||||
log.info(.console, "assertion failed", .{ .values = serialized_values });
|
||||
log.err("Assertion failed: {s}", .{serialized_values});
|
||||
}
|
||||
|
||||
fn serializeValues(values: []JsObject, page: *Page) ![]const u8 {
|
||||
if (values.len == 0) {
|
||||
return "";
|
||||
}
|
||||
|
||||
const arena = page.call_arena;
|
||||
const separator = log.separator();
|
||||
fn serializeValues(values: []JsObject, state: *SessionState) ![]const u8 {
|
||||
const arena = state.call_arena;
|
||||
var arr: std.ArrayListUnmanaged(u8) = .{};
|
||||
|
||||
for (values, 1..) |value, i| {
|
||||
try arr.appendSlice(arena, separator);
|
||||
try arr.writer(arena).print("{d}: ", .{i});
|
||||
const serialized = if (builtin.mode == .Debug) value.toDetailString() else value.toString();
|
||||
try arr.appendSlice(arena, try serialized);
|
||||
try arr.appendSlice(arena, try values[0].toString());
|
||||
for (values[1..]) |value| {
|
||||
try arr.append(arena, ' ');
|
||||
try arr.appendSlice(arena, try value.toString());
|
||||
}
|
||||
return arr.items;
|
||||
}
|
||||
@@ -172,11 +155,11 @@ fn timestamp() u32 {
|
||||
var test_capture = TestCapture{};
|
||||
const testing = @import("../../testing.zig");
|
||||
test "Browser.Console" {
|
||||
defer testing.reset();
|
||||
|
||||
var runner = try testing.jsRunner(testing.tracking_allocator, .{});
|
||||
defer runner.deinit();
|
||||
|
||||
defer testing.reset();
|
||||
|
||||
{
|
||||
try runner.testCases(&.{
|
||||
.{ "console.log('a')", "undefined" },
|
||||
@@ -184,8 +167,8 @@ test "Browser.Console" {
|
||||
}, .{});
|
||||
|
||||
const captured = test_capture.captured.items;
|
||||
try testing.expectEqual("[info] args= 1: a", captured[0]);
|
||||
try testing.expectEqual("[warn] args= 1: hello world 2: 23 3: true 4: #<Object>", captured[1]);
|
||||
try testing.expectEqual("a", captured[0]);
|
||||
try testing.expectEqual("hello world 23 true [object Object]", captured[1]);
|
||||
}
|
||||
|
||||
{
|
||||
@@ -203,15 +186,15 @@ test "Browser.Console" {
|
||||
}, .{});
|
||||
|
||||
const captured = test_capture.captured.items;
|
||||
try testing.expectEqual("[invalid counter] label=default", captured[0]);
|
||||
try testing.expectEqual("[count] label=default count=1", captured[1]);
|
||||
try testing.expectEqual("[count] label=teg count=1", captured[2]);
|
||||
try testing.expectEqual("[count] label=teg count=2", captured[3]);
|
||||
try testing.expectEqual("[count] label=teg count=3", captured[4]);
|
||||
try testing.expectEqual("[count] label=default count=2", captured[5]);
|
||||
try testing.expectEqual("[count reset] label=teg count=3", captured[6]);
|
||||
try testing.expectEqual("[count reset] label=default count=2", captured[7]);
|
||||
try testing.expectEqual("[count] label=default count=1", captured[8]);
|
||||
try testing.expectEqual("Counter \"default\" doesn't exist.", captured[0]);
|
||||
try testing.expectEqual("default: 1", captured[1]);
|
||||
try testing.expectEqual("teg: 1", captured[2]);
|
||||
try testing.expectEqual("teg: 2", captured[3]);
|
||||
try testing.expectEqual("teg: 3", captured[4]);
|
||||
try testing.expectEqual("default: 2", captured[5]);
|
||||
try testing.expectEqual("teg: 3", captured[6]);
|
||||
try testing.expectEqual("default: 2", captured[7]);
|
||||
try testing.expectEqual("default: 1", captured[8]);
|
||||
}
|
||||
|
||||
{
|
||||
@@ -225,105 +208,33 @@ test "Browser.Console" {
|
||||
}, .{});
|
||||
|
||||
const captured = test_capture.captured.items;
|
||||
try testing.expectEqual("[assertion failed] values=", captured[0]);
|
||||
try testing.expectEqual("[assertion failed] values= 1: x 2: true", captured[1]);
|
||||
try testing.expectEqual("[assertion failed] values= 1: x", captured[2]);
|
||||
}
|
||||
|
||||
{
|
||||
test_capture.reset();
|
||||
try runner.testCases(&.{
|
||||
.{ "[1].forEach(console.log)", null },
|
||||
}, .{});
|
||||
|
||||
const captured = test_capture.captured.items;
|
||||
try testing.expectEqual("[info] args= 1: 1 2: 0 3: [1]", captured[0]);
|
||||
try testing.expectEqual("Assertion failed: ", captured[0]);
|
||||
try testing.expectEqual("Assertion failed: x true", captured[1]);
|
||||
try testing.expectEqual("Assertion failed: x", captured[2]);
|
||||
}
|
||||
}
|
||||
|
||||
const TestCapture = struct {
|
||||
captured: std.ArrayListUnmanaged([]const u8) = .{},
|
||||
|
||||
fn separator(_: *const TestCapture) []const u8 {
|
||||
return " ";
|
||||
}
|
||||
|
||||
fn reset(self: *TestCapture) void {
|
||||
self.captured = .{};
|
||||
}
|
||||
|
||||
fn debug(
|
||||
self: *TestCapture,
|
||||
comptime scope: @Type(.enum_literal),
|
||||
comptime msg: []const u8,
|
||||
args: anytype,
|
||||
) void {
|
||||
self.capture(scope, msg, args);
|
||||
fn debug(self: *TestCapture, comptime fmt: []const u8, args: anytype) void {
|
||||
const str = std.fmt.allocPrint(testing.arena_allocator, fmt, args) catch unreachable;
|
||||
self.captured.append(testing.arena_allocator, str) catch unreachable;
|
||||
}
|
||||
|
||||
fn info(
|
||||
self: *TestCapture,
|
||||
comptime scope: @Type(.enum_literal),
|
||||
comptime msg: []const u8,
|
||||
args: anytype,
|
||||
) void {
|
||||
self.capture(scope, msg, args);
|
||||
fn info(self: *TestCapture, comptime fmt: []const u8, args: anytype) void {
|
||||
self.debug(fmt, args);
|
||||
}
|
||||
|
||||
fn warn(
|
||||
self: *TestCapture,
|
||||
comptime scope: @Type(.enum_literal),
|
||||
comptime msg: []const u8,
|
||||
args: anytype,
|
||||
) void {
|
||||
self.capture(scope, msg, args);
|
||||
fn warn(self: *TestCapture, comptime fmt: []const u8, args: anytype) void {
|
||||
self.debug(fmt, args);
|
||||
}
|
||||
|
||||
fn err(
|
||||
self: *TestCapture,
|
||||
comptime scope: @Type(.enum_literal),
|
||||
comptime msg: []const u8,
|
||||
args: anytype,
|
||||
) void {
|
||||
self.capture(scope, msg, args);
|
||||
}
|
||||
|
||||
fn fatal(
|
||||
self: *TestCapture,
|
||||
comptime scope: @Type(.enum_literal),
|
||||
comptime msg: []const u8,
|
||||
args: anytype,
|
||||
) void {
|
||||
self.capture(scope, msg, args);
|
||||
}
|
||||
|
||||
fn capture(
|
||||
self: *TestCapture,
|
||||
comptime scope: @Type(.enum_literal),
|
||||
comptime msg: []const u8,
|
||||
args: anytype,
|
||||
) void {
|
||||
self._capture(scope, msg, args) catch unreachable;
|
||||
}
|
||||
|
||||
fn _capture(
|
||||
self: *TestCapture,
|
||||
comptime scope: @Type(.enum_literal),
|
||||
comptime msg: []const u8,
|
||||
args: anytype,
|
||||
) !void {
|
||||
std.debug.assert(scope == .console);
|
||||
|
||||
const allocator = testing.arena_allocator;
|
||||
var buf: std.ArrayListUnmanaged(u8) = .empty;
|
||||
try buf.appendSlice(allocator, "[" ++ msg ++ "] ");
|
||||
|
||||
inline for (@typeInfo(@TypeOf(args)).@"struct".fields) |f| {
|
||||
try buf.appendSlice(allocator, f.name);
|
||||
try buf.append(allocator, '=');
|
||||
try @import("../../log.zig").writeValue(.pretty, @field(args, f.name), buf.writer(allocator));
|
||||
try buf.append(allocator, ' ');
|
||||
}
|
||||
self.captured.append(testing.arena_allocator, std.mem.trimRight(u8, buf.items, " ")) catch unreachable;
|
||||
fn err(self: *TestCapture, comptime fmt: []const u8, args: anytype) void {
|
||||
self.debug(fmt, args);
|
||||
}
|
||||
};
|
||||
|
||||
@@ -43,7 +43,6 @@ const Matcher = struct {
|
||||
}
|
||||
};
|
||||
|
||||
const Elements = @import("../html/elements.zig");
|
||||
test "matchFirst" {
|
||||
const alloc = std.testing.allocator;
|
||||
|
||||
@@ -162,7 +161,7 @@ test "matchFirst" {
|
||||
for (testcases) |tc| {
|
||||
matcher.reset();
|
||||
|
||||
const doc = try parser.documentHTMLParseFromStr(tc.html, &Elements.createElement);
|
||||
const doc = try parser.documentHTMLParseFromStr(tc.html);
|
||||
defer parser.documentHTMLClose(doc) catch {};
|
||||
|
||||
const s = css.parse(alloc, tc.q, .{}) catch |e| {
|
||||
@@ -303,7 +302,7 @@ test "matchAll" {
|
||||
for (testcases) |tc| {
|
||||
matcher.reset();
|
||||
|
||||
const doc = try parser.documentHTMLParseFromStr(tc.html, &Elements.createElement);
|
||||
const doc = try parser.documentHTMLParseFromStr(tc.html);
|
||||
defer parser.documentHTMLClose(doc) catch {};
|
||||
|
||||
const s = css.parse(alloc, tc.q, .{}) catch |e| {
|
||||
|
||||
@@ -20,7 +20,7 @@ const std = @import("std");
|
||||
|
||||
const CSSParser = @import("./css_parser.zig").CSSParser;
|
||||
const CSSValueAnalyzer = @import("./css_value_analyzer.zig").CSSValueAnalyzer;
|
||||
const Page = @import("../page.zig").Page;
|
||||
const SessionState = @import("../env.zig").SessionState;
|
||||
|
||||
pub const Interfaces = .{
|
||||
CSSStyleDeclaration,
|
||||
@@ -33,11 +33,6 @@ pub const CSSStyleDeclaration = struct {
|
||||
store: std.StringHashMapUnmanaged(Property),
|
||||
order: std.ArrayListUnmanaged([]const u8),
|
||||
|
||||
pub const empty: CSSStyleDeclaration = .{
|
||||
.store = .empty,
|
||||
.order = .empty,
|
||||
};
|
||||
|
||||
const Property = struct {
|
||||
value: []const u8,
|
||||
priority: bool,
|
||||
@@ -47,17 +42,17 @@ pub const CSSStyleDeclaration = struct {
|
||||
return self._getPropertyValue("float");
|
||||
}
|
||||
|
||||
pub fn set_cssFloat(self: *CSSStyleDeclaration, value: ?[]const u8, page: *Page) !void {
|
||||
pub fn set_cssFloat(self: *CSSStyleDeclaration, value: ?[]const u8, state: *SessionState) !void {
|
||||
const final_value = value orelse "";
|
||||
return self._setProperty("float", final_value, null, page);
|
||||
return self._setProperty("float", final_value, null, state);
|
||||
}
|
||||
|
||||
pub fn get_cssText(self: *const CSSStyleDeclaration, page: *Page) ![]const u8 {
|
||||
pub fn get_cssText(self: *const CSSStyleDeclaration, state: *SessionState) ![]const u8 {
|
||||
var buffer: std.ArrayListUnmanaged(u8) = .empty;
|
||||
const writer = buffer.writer(page.call_arena);
|
||||
const writer = buffer.writer(state.call_arena);
|
||||
for (self.order.items) |name| {
|
||||
const prop = self.store.get(name).?;
|
||||
const escaped = try CSSValueAnalyzer.escapeCSSValue(page.call_arena, prop.value);
|
||||
const escaped = try CSSValueAnalyzer.escapeCSSValue(state.call_arena, prop.value);
|
||||
try writer.print("{s}: {s}", .{ name, escaped });
|
||||
if (prop.priority) try writer.writeAll(" !important");
|
||||
try writer.writeAll("; ");
|
||||
@@ -66,18 +61,18 @@ pub const CSSStyleDeclaration = struct {
|
||||
}
|
||||
|
||||
// TODO Propagate also upward to parent node
|
||||
pub fn set_cssText(self: *CSSStyleDeclaration, text: []const u8, page: *Page) !void {
|
||||
pub fn set_cssText(self: *CSSStyleDeclaration, text: []const u8, state: *SessionState) !void {
|
||||
self.store.clearRetainingCapacity();
|
||||
self.order.clearRetainingCapacity();
|
||||
|
||||
// call_arena is safe here, because _setProperty will dupe the name
|
||||
// using the page's longer-living arena.
|
||||
const declarations = try CSSParser.parseDeclarations(page.call_arena, text);
|
||||
// using the state's longer-living arena.
|
||||
const declarations = try CSSParser.parseDeclarations(state.call_arena, text);
|
||||
|
||||
for (declarations) |decl| {
|
||||
if (!CSSValueAnalyzer.isValidPropertyName(decl.name)) continue;
|
||||
const priority: ?[]const u8 = if (decl.is_important) "important" else null;
|
||||
try self._setProperty(decl.name, decl.value, priority, page);
|
||||
try self._setProperty(decl.name, decl.value, priority, state);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -95,16 +90,7 @@ pub const CSSStyleDeclaration = struct {
|
||||
|
||||
// TODO should handle properly shorthand properties and canonical forms
|
||||
pub fn _getPropertyValue(self: *const CSSStyleDeclaration, name: []const u8) []const u8 {
|
||||
if (self.store.get(name)) |prop| {
|
||||
return prop.value;
|
||||
}
|
||||
|
||||
// default to everything being visible (unless it's been explicitly set)
|
||||
if (std.mem.eql(u8, name, "visibility")) {
|
||||
return "visible";
|
||||
}
|
||||
|
||||
return "";
|
||||
return if (self.store.get(name)) |prop| prop.value else "";
|
||||
}
|
||||
|
||||
pub fn _item(self: *const CSSStyleDeclaration, index: usize) []const u8 {
|
||||
@@ -119,27 +105,23 @@ pub const CSSStyleDeclaration = struct {
|
||||
break;
|
||||
}
|
||||
}
|
||||
// safe to return, since it's in our page.arena
|
||||
// safe to return, since it's in our state.arena
|
||||
return prop.value.value;
|
||||
}
|
||||
|
||||
pub fn _setProperty(self: *CSSStyleDeclaration, name: []const u8, value: []const u8, priority: ?[]const u8, page: *Page) !void {
|
||||
const owned_value = try page.arena.dupe(u8, value);
|
||||
pub fn _setProperty(self: *CSSStyleDeclaration, name: []const u8, value: []const u8, priority: ?[]const u8, state: *SessionState) !void {
|
||||
const owned_value = try state.arena.dupe(u8, value);
|
||||
const is_important = priority != null and std.ascii.eqlIgnoreCase(priority.?, "important");
|
||||
|
||||
const gop = try self.store.getOrPut(page.arena, name);
|
||||
const gop = try self.store.getOrPut(state.arena, name);
|
||||
if (!gop.found_existing) {
|
||||
const owned_name = try page.arena.dupe(u8, name);
|
||||
const owned_name = try state.arena.dupe(u8, name);
|
||||
gop.key_ptr.* = owned_name;
|
||||
try self.order.append(page.arena, owned_name);
|
||||
try self.order.append(state.arena, owned_name);
|
||||
}
|
||||
|
||||
gop.value_ptr.* = .{ .value = owned_value, .priority = is_important };
|
||||
}
|
||||
|
||||
pub fn named_get(self: *const CSSStyleDeclaration, name: []const u8, _: *bool) []const u8 {
|
||||
return self._getPropertyValue(name);
|
||||
}
|
||||
};
|
||||
|
||||
const testing = @import("../../testing.zig");
|
||||
@@ -177,7 +159,6 @@ test "CSSOM.CSSStyleDeclaration" {
|
||||
.{ "style.setProperty('color', 'green')", "undefined" },
|
||||
.{ "style.getPropertyValue('color')", "green" },
|
||||
.{ "style.length", "4" },
|
||||
.{ "style.color", "green" },
|
||||
|
||||
.{ "style.setProperty('padding', '10px', 'important')", "undefined" },
|
||||
.{ "style.getPropertyValue('padding')", "10px" },
|
||||
@@ -239,9 +220,4 @@ test "CSSOM.CSSStyleDeclaration" {
|
||||
.{ "style.setProperty('border-bottom-left-radius', '5px')", "undefined" },
|
||||
.{ "style.getPropertyValue('border-bottom-left-radius')", "5px" },
|
||||
}, .{});
|
||||
|
||||
try runner.testCases(&.{
|
||||
.{ "style.visibility", "visible" },
|
||||
.{ "style.getPropertyValue('visibility')", "visible" },
|
||||
}, .{});
|
||||
}
|
||||
|
||||
@@ -101,7 +101,7 @@ pub const CharacterData = struct {
|
||||
// netsurf's CharacterData (text, comment) doesn't implement the
|
||||
// dom_node_get_attributes and thus will crash if we try to call nodeIsEqualNode.
|
||||
pub fn _isEqualNode(self: *parser.CharacterData, other_node: *parser.Node) !bool {
|
||||
if (try parser.nodeType(@alignCast(@ptrCast(self))) != try parser.nodeType(other_node)) {
|
||||
if (try parser.nodeType(@ptrCast(self)) != try parser.nodeType(other_node)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
@@ -20,7 +20,7 @@ const parser = @import("../netsurf.zig");
|
||||
|
||||
const CharacterData = @import("character_data.zig").CharacterData;
|
||||
|
||||
const Page = @import("../page.zig").Page;
|
||||
const SessionState = @import("../env.zig").SessionState;
|
||||
|
||||
// https://dom.spec.whatwg.org/#interface-comment
|
||||
pub const Comment = struct {
|
||||
@@ -28,9 +28,9 @@ pub const Comment = struct {
|
||||
pub const prototype = *CharacterData;
|
||||
pub const subtype = .node;
|
||||
|
||||
pub fn constructor(data: ?[]const u8, page: *const Page) !*parser.Comment {
|
||||
pub fn constructor(data: ?[]const u8, state: *const SessionState) !*parser.Comment {
|
||||
return parser.documentCreateComment(
|
||||
parser.documentHTMLToDocument(page.window.document),
|
||||
parser.documentHTMLToDocument(state.window.document),
|
||||
data orelse "",
|
||||
);
|
||||
}
|
||||
|
||||
@@ -19,7 +19,7 @@
|
||||
const std = @import("std");
|
||||
|
||||
const parser = @import("../netsurf.zig");
|
||||
const Page = @import("../page.zig").Page;
|
||||
const SessionState = @import("../env.zig").SessionState;
|
||||
|
||||
const Node = @import("node.zig").Node;
|
||||
const NodeList = @import("nodelist.zig").NodeList;
|
||||
@@ -30,7 +30,6 @@ const css = @import("css.zig");
|
||||
|
||||
const Element = @import("element.zig").Element;
|
||||
const ElementUnion = @import("element.zig").Union;
|
||||
const Elements = @import("../html/elements.zig");
|
||||
const TreeWalker = @import("tree_walker.zig").TreeWalker;
|
||||
|
||||
const Env = @import("../env.zig").Env;
|
||||
@@ -43,15 +42,14 @@ pub const Document = struct {
|
||||
pub const prototype = *Node;
|
||||
pub const subtype = .node;
|
||||
|
||||
pub fn constructor(page: *const Page) !*parser.DocumentHTML {
|
||||
pub fn constructor(state: *const SessionState) !*parser.DocumentHTML {
|
||||
const doc = try parser.documentCreateDocument(
|
||||
try parser.documentHTMLGetTitle(page.window.document),
|
||||
&Elements.createElement,
|
||||
try parser.documentHTMLGetTitle(state.window.document),
|
||||
);
|
||||
|
||||
// we have to work w/ document instead of html document.
|
||||
const ddoc = parser.documentHTMLToDocument(doc);
|
||||
const ccur = parser.documentHTMLToDocument(page.window.document);
|
||||
const ccur = parser.documentHTMLToDocument(state.window.document);
|
||||
try parser.documentSetDocumentURI(ddoc, try parser.documentGetDocumentURI(ccur));
|
||||
try parser.documentSetInputEncoding(ddoc, try parser.documentGetInputEncoding(ccur));
|
||||
|
||||
@@ -143,17 +141,18 @@ pub const Document = struct {
|
||||
pub fn _getElementsByTagName(
|
||||
self: *parser.Document,
|
||||
tag_name: []const u8,
|
||||
page: *Page,
|
||||
state: *SessionState,
|
||||
) !collection.HTMLCollection {
|
||||
return try collection.HTMLCollectionByTagName(page.arena, parser.documentToNode(self), tag_name, true);
|
||||
return try collection.HTMLCollectionByTagName(state.arena, parser.documentToNode(self), tag_name, true);
|
||||
}
|
||||
|
||||
pub fn _getElementsByClassName(
|
||||
self: *parser.Document,
|
||||
classNames: []const u8,
|
||||
page: *Page,
|
||||
state: *SessionState,
|
||||
) !collection.HTMLCollection {
|
||||
return try collection.HTMLCollectionByClassName(page.arena, parser.documentToNode(self), classNames, true);
|
||||
const allocator = state.arena;
|
||||
return try collection.HTMLCollectionByClassName(allocator, parser.documentToNode(self), classNames, true);
|
||||
}
|
||||
|
||||
pub fn _createDocumentFragment(self: *parser.Document) !*parser.DocumentFragment {
|
||||
@@ -215,18 +214,20 @@ pub const Document = struct {
|
||||
return 1;
|
||||
}
|
||||
|
||||
pub fn _querySelector(self: *parser.Document, selector: []const u8, page: *Page) !?ElementUnion {
|
||||
pub fn _querySelector(self: *parser.Document, selector: []const u8, state: *SessionState) !?ElementUnion {
|
||||
if (selector.len == 0) return null;
|
||||
|
||||
const n = try css.querySelector(page.arena, parser.documentToNode(self), selector);
|
||||
const allocator = state.arena;
|
||||
const n = try css.querySelector(allocator, parser.documentToNode(self), selector);
|
||||
|
||||
if (n == null) return null;
|
||||
|
||||
return try Element.toInterface(parser.nodeToElement(n.?));
|
||||
}
|
||||
|
||||
pub fn _querySelectorAll(self: *parser.Document, selector: []const u8, page: *Page) !NodeList {
|
||||
return css.querySelectorAll(page.arena, parser.documentToNode(self), selector);
|
||||
pub fn _querySelectorAll(self: *parser.Document, selector: []const u8, state: *SessionState) !NodeList {
|
||||
const allocator = state.arena;
|
||||
return css.querySelectorAll(allocator, parser.documentToNode(self), selector);
|
||||
}
|
||||
|
||||
pub fn _prepend(self: *parser.Document, nodes: []const Node.NodeOrText) !void {
|
||||
@@ -244,41 +245,11 @@ pub const Document = struct {
|
||||
pub fn _createTreeWalker(_: *parser.Document, root: *parser.Node, what_to_show: ?u32, filter: ?TreeWalker.TreeWalkerOpts) !TreeWalker {
|
||||
return try TreeWalker.init(root, what_to_show, filter);
|
||||
}
|
||||
|
||||
pub fn getActiveElement(self: *parser.Document, page: *Page) !?*parser.Element {
|
||||
if (page.getNodeState(@alignCast(@ptrCast(self)))) |state| {
|
||||
if (state.active_element) |ae| {
|
||||
return ae;
|
||||
}
|
||||
}
|
||||
|
||||
if (try parser.documentHTMLBody(page.window.document)) |body| {
|
||||
return @alignCast(@ptrCast(body));
|
||||
}
|
||||
|
||||
return try parser.documentGetDocumentElement(self);
|
||||
}
|
||||
|
||||
pub fn get_activeElement(self: *parser.Document, page: *Page) !?ElementUnion {
|
||||
const ae = (try getActiveElement(self, page)) orelse return null;
|
||||
return try Element.toInterface(ae);
|
||||
}
|
||||
|
||||
// TODO: some elements can't be focused, like if they're disabled
|
||||
// but there doesn't seem to be a generic way to check this. For example
|
||||
// we could look for the "disabled" attribute, but that's only meaningful
|
||||
// on certain types, and libdom's vtable doesn't seem to expose this.
|
||||
pub fn setFocus(self: *parser.Document, e: *parser.ElementHTML, page: *Page) !void {
|
||||
const state = try page.getOrCreateNodeState(@alignCast(@ptrCast(self)));
|
||||
state.active_element = @ptrCast(e);
|
||||
}
|
||||
};
|
||||
|
||||
const testing = @import("../../testing.zig");
|
||||
test "Browser.DOM.Document" {
|
||||
var runner = try testing.jsRunner(testing.tracking_allocator, .{
|
||||
.url = "about:blank",
|
||||
});
|
||||
var runner = try testing.jsRunner(testing.tracking_allocator, .{});
|
||||
defer runner.deinit();
|
||||
|
||||
try runner.testCases(&.{
|
||||
@@ -442,12 +413,6 @@ test "Browser.DOM.Document" {
|
||||
},
|
||||
}, .{});
|
||||
|
||||
try runner.testCases(&.{
|
||||
.{ "document.activeElement === document.body", "true" },
|
||||
.{ "document.getElementById('link').focus()", "undefined" },
|
||||
.{ "document.activeElement === document.getElementById('link')", "true" },
|
||||
}, .{});
|
||||
|
||||
// this test breaks the doc structure, keep it at the end of the test
|
||||
// suite.
|
||||
try runner.testCases(&.{
|
||||
|
||||
@@ -17,7 +17,7 @@
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
const parser = @import("../netsurf.zig");
|
||||
const Page = @import("../page.zig").Page;
|
||||
const SessionState = @import("../env.zig").SessionState;
|
||||
|
||||
const Node = @import("node.zig").Node;
|
||||
|
||||
@@ -27,9 +27,9 @@ pub const DocumentFragment = struct {
|
||||
pub const prototype = *Node;
|
||||
pub const subtype = .node;
|
||||
|
||||
pub fn constructor(page: *const Page) !*parser.DocumentFragment {
|
||||
pub fn constructor(state: *const SessionState) !*parser.DocumentFragment {
|
||||
return parser.documentCreateDocumentFragment(
|
||||
parser.documentHTMLToDocument(page.window.document),
|
||||
parser.documentHTMLToDocument(state.window.document),
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
@@ -34,7 +34,6 @@ pub const Interfaces = .{
|
||||
EventTarget,
|
||||
DOMImplementation,
|
||||
NamedNodeMap,
|
||||
NamedNodeMap.Iterator,
|
||||
DOMTokenList.Interfaces,
|
||||
NodeList.Interfaces,
|
||||
Node.Node,
|
||||
|
||||
@@ -30,8 +30,8 @@ pub const DOMParser = struct {
|
||||
// TODO: Support XML
|
||||
return error.TypeError;
|
||||
}
|
||||
const Elements = @import("../html/elements.zig");
|
||||
return try parser.documentHTMLParseFromStr(string, &Elements.createElement);
|
||||
|
||||
return try parser.documentHTMLParseFromStr(string);
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
@@ -19,12 +19,11 @@
|
||||
const std = @import("std");
|
||||
|
||||
const parser = @import("../netsurf.zig");
|
||||
const Page = @import("../page.zig").Page;
|
||||
const SessionState = @import("../env.zig").SessionState;
|
||||
|
||||
const css = @import("css.zig");
|
||||
const log = @import("../../log.zig");
|
||||
const dump = @import("../dump.zig");
|
||||
const collection = @import("html_collection.zig");
|
||||
const dump = @import("../dump.zig");
|
||||
const css = @import("css.zig");
|
||||
|
||||
const Node = @import("node.zig").Node;
|
||||
const Walker = @import("walker.zig").WalkerDepthFirst;
|
||||
@@ -32,6 +31,8 @@ const NodeList = @import("nodelist.zig").NodeList;
|
||||
const HTMLElem = @import("../html/elements.zig");
|
||||
pub const Union = @import("../html/elements.zig").Union;
|
||||
|
||||
const log = std.log.scoped(.element);
|
||||
|
||||
// WEB IDL https://dom.spec.whatwg.org/#element
|
||||
pub const Element = struct {
|
||||
pub const Self = parser.Element;
|
||||
@@ -102,14 +103,14 @@ pub const Element = struct {
|
||||
return try parser.nodeGetAttributes(parser.elementToNode(self)) orelse unreachable;
|
||||
}
|
||||
|
||||
pub fn get_innerHTML(self: *parser.Element, page: *Page) ![]const u8 {
|
||||
var buf = std.ArrayList(u8).init(page.arena);
|
||||
pub fn get_innerHTML(self: *parser.Element, state: *SessionState) ![]const u8 {
|
||||
var buf = std.ArrayList(u8).init(state.arena);
|
||||
try dump.writeChildren(parser.elementToNode(self), buf.writer());
|
||||
return buf.items;
|
||||
}
|
||||
|
||||
pub fn get_outerHTML(self: *parser.Element, page: *Page) ![]const u8 {
|
||||
var buf = std.ArrayList(u8).init(page.arena);
|
||||
pub fn get_outerHTML(self: *parser.Element, state: *SessionState) ![]const u8 {
|
||||
var buf = std.ArrayList(u8).init(state.arena);
|
||||
try dump.writeNode(parser.elementToNode(self), buf.writer());
|
||||
return buf.items;
|
||||
}
|
||||
@@ -128,26 +129,26 @@ pub const Element = struct {
|
||||
|
||||
// append children to the node
|
||||
const ln = try parser.nodeListLength(children);
|
||||
for (0..ln) |_| {
|
||||
// always index 0, because ndoeAppendChild moves the node out of
|
||||
// the nodeList and into the new tree
|
||||
const child = try parser.nodeListItem(children, 0) orelse continue;
|
||||
var i: u32 = 0;
|
||||
while (i < ln) {
|
||||
defer i += 1;
|
||||
const child = try parser.nodeListItem(children, i) orelse continue;
|
||||
_ = try parser.nodeAppendChild(node, child);
|
||||
}
|
||||
}
|
||||
|
||||
// The closest() method of the Element interface traverses the element and its parents (heading toward the document root) until it finds a node that matches the specified CSS selector.
|
||||
// Returns the closest ancestor Element or itself, which matches the selectors. If there are no such element, null.
|
||||
pub fn _closest(self: *parser.Element, selector: []const u8, page: *Page) !?*parser.Element {
|
||||
pub fn _closest(self: *parser.Element, selector: []const u8, state: *SessionState) !?*parser.Element {
|
||||
const cssParse = @import("../css/css.zig").parse;
|
||||
const CssNodeWrap = @import("../css/libdom.zig").Node;
|
||||
const select = try cssParse(page.call_arena, selector, .{});
|
||||
const select = try cssParse(state.call_arena, selector, .{});
|
||||
|
||||
var current: CssNodeWrap = .{ .node = parser.elementToNode(self) };
|
||||
while (true) {
|
||||
if (try select.match(current)) {
|
||||
if (!current.isElement()) {
|
||||
log.err(.browser, "closest invalid type", .{ .type = try current.tag() });
|
||||
log.err("closest: is not an element: {s}", .{try current.tag()});
|
||||
return null;
|
||||
}
|
||||
return parser.nodeToElement(current.node);
|
||||
@@ -249,10 +250,10 @@ pub const Element = struct {
|
||||
pub fn _getElementsByTagName(
|
||||
self: *parser.Element,
|
||||
tag_name: []const u8,
|
||||
page: *Page,
|
||||
state: *SessionState,
|
||||
) !collection.HTMLCollection {
|
||||
return try collection.HTMLCollectionByTagName(
|
||||
page.arena,
|
||||
state.arena,
|
||||
parser.elementToNode(self),
|
||||
tag_name,
|
||||
false,
|
||||
@@ -262,10 +263,10 @@ pub const Element = struct {
|
||||
pub fn _getElementsByClassName(
|
||||
self: *parser.Element,
|
||||
classNames: []const u8,
|
||||
page: *Page,
|
||||
state: *SessionState,
|
||||
) !collection.HTMLCollection {
|
||||
return try collection.HTMLCollectionByClassName(
|
||||
page.arena,
|
||||
state.arena,
|
||||
parser.elementToNode(self),
|
||||
classNames,
|
||||
false,
|
||||
@@ -328,18 +329,18 @@ pub const Element = struct {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn _querySelector(self: *parser.Element, selector: []const u8, page: *Page) !?Union {
|
||||
pub fn _querySelector(self: *parser.Element, selector: []const u8, state: *SessionState) !?Union {
|
||||
if (selector.len == 0) return null;
|
||||
|
||||
const n = try css.querySelector(page.arena, parser.elementToNode(self), selector);
|
||||
const n = try css.querySelector(state.arena, parser.elementToNode(self), selector);
|
||||
|
||||
if (n == null) return null;
|
||||
|
||||
return try toInterface(parser.nodeToElement(n.?));
|
||||
}
|
||||
|
||||
pub fn _querySelectorAll(self: *parser.Element, selector: []const u8, page: *Page) !NodeList {
|
||||
return css.querySelectorAll(page.arena, parser.elementToNode(self), selector);
|
||||
pub fn _querySelectorAll(self: *parser.Element, selector: []const u8, state: *SessionState) !NodeList {
|
||||
return css.querySelectorAll(state.arena, parser.elementToNode(self), selector);
|
||||
}
|
||||
|
||||
pub fn _prepend(self: *parser.Element, nodes: []const Node.NodeOrText) !void {
|
||||
@@ -366,56 +367,46 @@ pub const Element = struct {
|
||||
|
||||
// A DOMRect object providing information about the size of an element and its position relative to the viewport.
|
||||
// Returns a 0 DOMRect object if the element is eventually detached from the main window
|
||||
pub fn _getBoundingClientRect(self: *parser.Element, page: *Page) !DOMRect {
|
||||
pub fn _getBoundingClientRect(self: *parser.Element, state: *SessionState) !DOMRect {
|
||||
// Since we are lazy rendering we need to do this check. We could store the renderer in a viewport such that it could cache these, but it would require tracking changes.
|
||||
if (!try page.isNodeAttached(parser.elementToNode(self))) {
|
||||
const root = try parser.nodeGetRootNode(parser.elementToNode(self));
|
||||
if (root != parser.documentToNode(parser.documentHTMLToDocument(state.window.document))) {
|
||||
return DOMRect{ .x = 0, .y = 0, .width = 0, .height = 0 };
|
||||
}
|
||||
return page.renderer.getRect(self);
|
||||
return state.renderer.getRect(self);
|
||||
}
|
||||
|
||||
// Returns a collection of DOMRect objects that indicate the bounding rectangles for each CSS border box in a client.
|
||||
// We do not render so it only always return the element's bounding rect.
|
||||
// Returns an empty array if the element is eventually detached from the main window
|
||||
pub fn _getClientRects(self: *parser.Element, page: *Page) ![]DOMRect {
|
||||
if (!try page.isNodeAttached(parser.elementToNode(self))) {
|
||||
pub fn _getClientRects(self: *parser.Element, state: *SessionState) ![]DOMRect {
|
||||
const root = try parser.nodeGetRootNode(parser.elementToNode(self));
|
||||
if (root != parser.documentToNode(parser.documentHTMLToDocument(state.window.document))) {
|
||||
return &.{};
|
||||
}
|
||||
const heap_ptr = try page.call_arena.create(DOMRect);
|
||||
heap_ptr.* = try page.renderer.getRect(self);
|
||||
const heap_ptr = try state.call_arena.create(DOMRect);
|
||||
heap_ptr.* = try state.renderer.getRect(self);
|
||||
return heap_ptr[0..1];
|
||||
}
|
||||
|
||||
pub fn get_clientWidth(_: *parser.Element, page: *Page) u32 {
|
||||
return page.renderer.width();
|
||||
pub fn get_clientWidth(_: *parser.Element, state: *SessionState) u32 {
|
||||
return state.renderer.width();
|
||||
}
|
||||
|
||||
pub fn get_clientHeight(_: *parser.Element, page: *Page) u32 {
|
||||
return page.renderer.height();
|
||||
pub fn get_clientHeight(_: *parser.Element, state: *SessionState) u32 {
|
||||
return state.renderer.height();
|
||||
}
|
||||
|
||||
pub fn _matches(self: *parser.Element, selectors: []const u8, page: *Page) !bool {
|
||||
pub fn _matches(self: *parser.Element, selectors: []const u8, state: *SessionState) !bool {
|
||||
const cssParse = @import("../css/css.zig").parse;
|
||||
const CssNodeWrap = @import("../css/libdom.zig").Node;
|
||||
const s = try cssParse(page.call_arena, selectors, .{});
|
||||
const s = try cssParse(state.call_arena, selectors, .{});
|
||||
return s.match(CssNodeWrap{ .node = parser.elementToNode(self) });
|
||||
}
|
||||
|
||||
pub fn _scrollIntoViewIfNeeded(_: *parser.Element, center_if_needed: ?bool) void {
|
||||
_ = center_if_needed;
|
||||
}
|
||||
|
||||
const CheckVisibilityOpts = struct {
|
||||
contentVisibilityAuto: bool,
|
||||
opacityProperty: bool,
|
||||
visibilityProperty: bool,
|
||||
};
|
||||
|
||||
pub fn _checkVisibility(self: *parser.Element, opts: ?CheckVisibilityOpts) bool {
|
||||
_ = self;
|
||||
_ = opts;
|
||||
return true;
|
||||
}
|
||||
};
|
||||
|
||||
// Tests
|
||||
@@ -468,17 +459,8 @@ test "Browser.DOM.Element" {
|
||||
.{ "let a = document.getElementById('content')", "undefined" },
|
||||
.{ "a.hasAttributes()", "true" },
|
||||
.{ "a.attributes.length", "1" },
|
||||
|
||||
.{ "a.getAttribute('id')", "content" },
|
||||
.{ "a.attributes['id'].value", "content" },
|
||||
.{
|
||||
\\ let x = '';
|
||||
\\ for (const attr of a.attributes) {
|
||||
\\ x += attr.name + '=' + attr.value;
|
||||
\\ }
|
||||
\\ x;
|
||||
,
|
||||
"id=content",
|
||||
},
|
||||
|
||||
.{ "a.hasAttribute('foo')", "false" },
|
||||
.{ "a.getAttribute('foo')", "null" },
|
||||
@@ -660,10 +642,4 @@ test "Browser.DOM.Element" {
|
||||
.{ "a1.after('over 9000', a1_a);", "undefined" },
|
||||
.{ "after_container.innerHTML", "<div></div>over 9000<p></p>" },
|
||||
}, .{});
|
||||
|
||||
try runner.testCases(&.{
|
||||
.{ "var div1 = document.createElement('div');", null },
|
||||
.{ "div1.innerHTML = \" <link/><table></table><a href='/a'>a</a><input type='checkbox'/>\"", null },
|
||||
.{ "div1.getElementsByTagName('a').length", "1" },
|
||||
}, .{});
|
||||
}
|
||||
|
||||
@@ -18,7 +18,7 @@
|
||||
|
||||
const Env = @import("../env.zig").Env;
|
||||
const parser = @import("../netsurf.zig");
|
||||
const Page = @import("../page.zig").Page;
|
||||
const SessionState = @import("../env.zig").SessionState;
|
||||
|
||||
const EventHandler = @import("../events/event.zig").EventHandler;
|
||||
|
||||
@@ -33,55 +33,53 @@ pub const EventTarget = struct {
|
||||
pub const Self = parser.EventTarget;
|
||||
pub const Exception = DOMException;
|
||||
|
||||
pub fn toInterface(et: *parser.EventTarget, page: *Page) !Union {
|
||||
// Not all targets are *parser.Nodes. page.zig emits a "load" event
|
||||
// where the target is a Window, which cannot be cast directly to a node.
|
||||
// Ideally, we'd remove this duality. Failing that, we'll need to embed
|
||||
// data into the *parser.EventTarget should we need this for other types.
|
||||
// For now, for the Window, which is a singleton, we can do this:
|
||||
if (@intFromPtr(et) == @intFromPtr(&page.window.base)) {
|
||||
return .{ .Window = &page.window };
|
||||
}
|
||||
pub fn toInterface(et: *parser.EventTarget) !Union {
|
||||
// NOTE: for now we state that all EventTarget are Nodes
|
||||
// TODO: handle other types (eg. Window)
|
||||
return Nod.Node.toInterface(@as(*parser.Node, @ptrCast(et)));
|
||||
}
|
||||
|
||||
// JS funcs
|
||||
// --------
|
||||
pub fn _addEventListener(
|
||||
self: *parser.EventTarget,
|
||||
typ: []const u8,
|
||||
listener: EventHandler.Listener,
|
||||
opts: ?EventHandler.Opts,
|
||||
page: *Page,
|
||||
) !void {
|
||||
_ = try EventHandler.register(page.arena, self, typ, listener, opts);
|
||||
}
|
||||
|
||||
const RemoveEventListenerOpts = union(enum) {
|
||||
const AddEventListenerOpts = union(enum) {
|
||||
opts: Opts,
|
||||
capture: bool,
|
||||
|
||||
const Opts = struct {
|
||||
capture: ?bool,
|
||||
once: ?bool, // currently does nothing
|
||||
passive: ?bool, // currently does nothing
|
||||
signal: ?bool, // currently does nothing
|
||||
};
|
||||
};
|
||||
|
||||
pub fn _removeEventListener(
|
||||
pub fn _addEventListener(
|
||||
self: *parser.EventTarget,
|
||||
typ: []const u8,
|
||||
listener: EventHandler.Listener,
|
||||
opts_: ?RemoveEventListenerOpts,
|
||||
cbk: Env.Function,
|
||||
opts_: ?AddEventListenerOpts,
|
||||
state: *SessionState,
|
||||
) !void {
|
||||
var capture = false;
|
||||
if (opts_) |opts| {
|
||||
capture = switch (opts) {
|
||||
.capture => |c| c,
|
||||
.opts => |o| o.capture orelse false,
|
||||
};
|
||||
switch (opts) {
|
||||
.capture => |c| capture = c,
|
||||
.opts => |o| {
|
||||
// Done this way so that, for common cases that _only_ set
|
||||
// capture, i.e. {captrue: true}, it works.
|
||||
// But for any case that sets any of the other flags, we
|
||||
// error. If we don't error, this function call would succeed
|
||||
// but the behavior might be wrong. At this point, it's
|
||||
// better to be explicit and error.
|
||||
if (o.once orelse false) return error.NotImplemented;
|
||||
if (o.signal orelse false) return error.NotImplemented;
|
||||
if (o.passive orelse false) return error.NotImplemented;
|
||||
capture = o.capture orelse false;
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
const cbk = (try listener.callback(self)) orelse return;
|
||||
|
||||
// check if event target has already this listener
|
||||
const lst = try parser.eventTargetHasListener(
|
||||
self,
|
||||
@@ -89,6 +87,35 @@ pub const EventTarget = struct {
|
||||
capture,
|
||||
cbk.id,
|
||||
);
|
||||
if (lst != null) {
|
||||
return;
|
||||
}
|
||||
|
||||
const eh = try EventHandler.init(state.arena, try cbk.withThis(self));
|
||||
|
||||
try parser.eventTargetAddEventListener(
|
||||
self,
|
||||
typ,
|
||||
&eh.node,
|
||||
capture,
|
||||
);
|
||||
}
|
||||
|
||||
pub fn _removeEventListener(
|
||||
self: *parser.EventTarget,
|
||||
typ: []const u8,
|
||||
cbk: Env.Function,
|
||||
capture: ?bool,
|
||||
// TODO: hanle EventListenerOptions
|
||||
// see #https://github.com/lightpanda-io/jsruntime-lib/issues/114
|
||||
) !void {
|
||||
// check if event target has already this listener
|
||||
const lst = try parser.eventTargetHasListener(
|
||||
self,
|
||||
typ,
|
||||
capture orelse false,
|
||||
cbk.id,
|
||||
);
|
||||
if (lst == null) {
|
||||
return;
|
||||
}
|
||||
@@ -98,13 +125,17 @@ pub const EventTarget = struct {
|
||||
self,
|
||||
typ,
|
||||
lst.?,
|
||||
capture,
|
||||
capture orelse false,
|
||||
);
|
||||
}
|
||||
|
||||
pub fn _dispatchEvent(self: *parser.EventTarget, event: *parser.Event) !bool {
|
||||
return try parser.eventTargetDispatchEvent(self, event);
|
||||
}
|
||||
|
||||
pub fn deinit(self: *parser.EventTarget, state: *SessionState) void {
|
||||
parser.eventTargetRemoveAllEventListeners(self, state.arena) catch unreachable;
|
||||
}
|
||||
};
|
||||
|
||||
const testing = @import("../../testing.zig");
|
||||
@@ -217,21 +248,4 @@ test "Browser.DOM.EventTarget" {
|
||||
.{ "phase", "3" },
|
||||
.{ "cur.getAttribute('id')", "content" },
|
||||
}, .{});
|
||||
|
||||
try runner.testCases(&.{
|
||||
.{ "const obj1 = {calls: 0, handleEvent: function() { this.calls += 1; } };", null },
|
||||
.{ "content.addEventListener('he', obj1);", null },
|
||||
.{ "content.dispatchEvent(new Event('he'));", null },
|
||||
.{ "obj1.calls", "1" },
|
||||
|
||||
.{ "content.removeEventListener('he', obj1);", null },
|
||||
.{ "content.dispatchEvent(new Event('he'));", null },
|
||||
.{ "obj1.calls", "1" },
|
||||
}, .{});
|
||||
|
||||
// doesn't crash on null receiver
|
||||
try runner.testCases(&.{
|
||||
.{ "content.addEventListener('he2', null);", null },
|
||||
.{ "content.dispatchEvent(new Event('he2'));", null },
|
||||
}, .{});
|
||||
}
|
||||
|
||||
@@ -432,8 +432,7 @@ pub const HTMLCollection = struct {
|
||||
for (0..len) |i| {
|
||||
const node = try self.item(@intCast(i)) orelse unreachable;
|
||||
const e = @as(*parser.Element, @ptrCast(node));
|
||||
const as_interface = try Element.toInterface(e);
|
||||
try js_this.setIndex(@intCast(i), as_interface, .{});
|
||||
try js_this.setIndex(@intCast(i), e, .{});
|
||||
|
||||
if (try item_name(e)) |name| {
|
||||
// Even though an entry might have an empty id, the spec says
|
||||
@@ -441,7 +440,7 @@ pub const HTMLCollection = struct {
|
||||
if (name.len > 0) {
|
||||
// Named fields should not be enumerable (it is defined with
|
||||
// the LegacyUnenumerableNamedProperties flag.)
|
||||
try js_this.set(name, as_interface, .{ .DONT_ENUM = true });
|
||||
try js_this.set(name, e, .{ .DONT_ENUM = true });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -42,8 +42,7 @@ pub const DOMImplementation = struct {
|
||||
}
|
||||
|
||||
pub fn _createHTMLDocument(_: *DOMImplementation, title: ?[]const u8) !*parser.DocumentHTML {
|
||||
const Elements = @import("../html/elements.zig");
|
||||
return try parser.domImplementationCreateHTMLDocument(title, &Elements.createElement);
|
||||
return try parser.domImplementationCreateHTMLDocument(title);
|
||||
}
|
||||
|
||||
pub fn _hasFeature(_: *DOMImplementation) bool {
|
||||
|
||||
@@ -18,9 +18,8 @@
|
||||
|
||||
const std = @import("std");
|
||||
|
||||
const log = @import("../../log.zig");
|
||||
const parser = @import("../netsurf.zig");
|
||||
const Page = @import("../page.zig").Page;
|
||||
const SessionState = @import("../env.zig").SessionState;
|
||||
|
||||
const Env = @import("../env.zig").Env;
|
||||
const Element = @import("element.zig").Element;
|
||||
@@ -30,6 +29,8 @@ pub const Interfaces = .{
|
||||
IntersectionObserverEntry,
|
||||
};
|
||||
|
||||
const log = std.log.scoped(.events);
|
||||
|
||||
// This is supposed to listen to change between the root and observation targets.
|
||||
// However, our rendered stores everything as 1 pixel sized boxes in a long row that never changes.
|
||||
// As such, there are no changes to intersections between the root and any target.
|
||||
@@ -39,19 +40,19 @@ pub const Interfaces = .{
|
||||
// The returned Entries are phony, they always indicate full intersection.
|
||||
// https://developer.mozilla.org/en-US/docs/Web/API/IntersectionObserver
|
||||
pub const IntersectionObserver = struct {
|
||||
page: *Page,
|
||||
callback: Env.Function,
|
||||
options: IntersectionObserverOptions,
|
||||
state: *SessionState,
|
||||
|
||||
observed_entries: std.ArrayListUnmanaged(IntersectionObserverEntry),
|
||||
|
||||
// new IntersectionObserver(callback)
|
||||
// new IntersectionObserver(callback, options) [not supported yet]
|
||||
pub fn constructor(callback: Env.Function, options_: ?IntersectionObserverOptions, page: *Page) !IntersectionObserver {
|
||||
pub fn constructor(callback: Env.Function, options_: ?IntersectionObserverOptions, state: *SessionState) !IntersectionObserver {
|
||||
var options = IntersectionObserverOptions{
|
||||
.root = parser.documentToNode(parser.documentHTMLToDocument(page.window.document)),
|
||||
.root = parser.documentToNode(parser.documentHTMLToDocument(state.window.document)),
|
||||
.rootMargin = "0px 0px 0px 0px",
|
||||
.threshold = .{ .single = 0.0 },
|
||||
.threshold = &.{0.0},
|
||||
};
|
||||
if (options_) |*o| {
|
||||
if (o.root) |root| {
|
||||
@@ -60,9 +61,9 @@ pub const IntersectionObserver = struct {
|
||||
}
|
||||
|
||||
return .{
|
||||
.page = page,
|
||||
.callback = callback,
|
||||
.options = options,
|
||||
.state = state,
|
||||
.observed_entries = .{},
|
||||
};
|
||||
}
|
||||
@@ -78,19 +79,16 @@ pub const IntersectionObserver = struct {
|
||||
}
|
||||
}
|
||||
|
||||
try self.observed_entries.append(self.page.arena, .{
|
||||
.page = self.page,
|
||||
try self.observed_entries.append(self.state.arena, .{
|
||||
.state = self.state,
|
||||
.target = target_element,
|
||||
.options = &self.options,
|
||||
});
|
||||
|
||||
var result: Env.Function.Result = undefined;
|
||||
self.callback.tryCall(void, .{self.observed_entries.items}, &result) catch {
|
||||
log.debug(.user_script, "callback error", .{
|
||||
.err = result.exception,
|
||||
.stack = result.stack,
|
||||
.source = "intersection observer",
|
||||
});
|
||||
log.err("intersection observer callback error: {s}", .{result.exception});
|
||||
log.debug("stack:\n{s}", .{result.stack orelse "???"});
|
||||
};
|
||||
}
|
||||
|
||||
@@ -111,24 +109,19 @@ pub const IntersectionObserver = struct {
|
||||
const IntersectionObserverOptions = struct {
|
||||
root: ?*parser.Node, // Element or Document
|
||||
rootMargin: ?[]const u8,
|
||||
threshold: ?Threshold,
|
||||
|
||||
const Threshold = union(enum) {
|
||||
single: f32,
|
||||
list: []const f32,
|
||||
};
|
||||
threshold: ?[]const f32,
|
||||
};
|
||||
|
||||
// https://developer.mozilla.org/en-US/docs/Web/API/IntersectionObserverEntry
|
||||
// https://w3c.github.io/IntersectionObserver/#intersection-observer-entry
|
||||
pub const IntersectionObserverEntry = struct {
|
||||
page: *Page,
|
||||
state: *SessionState,
|
||||
target: *parser.Element,
|
||||
options: *IntersectionObserverOptions,
|
||||
|
||||
// Returns the bounds rectangle of the target element as a DOMRectReadOnly. The bounds are computed as described in the documentation for Element.getBoundingClientRect().
|
||||
pub fn get_boundingClientRect(self: *const IntersectionObserverEntry) !Element.DOMRect {
|
||||
return Element._getBoundingClientRect(self.target, self.page);
|
||||
return Element._getBoundingClientRect(self.target, self.state);
|
||||
}
|
||||
|
||||
// Returns the ratio of the intersectionRect to the boundingClientRect.
|
||||
@@ -138,14 +131,10 @@ pub const IntersectionObserverEntry = struct {
|
||||
|
||||
// Returns a DOMRectReadOnly representing the target's visible area.
|
||||
pub fn get_intersectionRect(self: *const IntersectionObserverEntry) !Element.DOMRect {
|
||||
return Element._getBoundingClientRect(self.target, self.page);
|
||||
return Element._getBoundingClientRect(self.target, self.state);
|
||||
}
|
||||
|
||||
// A Boolean value which is true if the target element intersects with the
|
||||
// intersection observer's root. If this is true, then, the
|
||||
// IntersectionObserverEntry describes a transition into a state of
|
||||
// intersection; if it's false, then you know the transition is from
|
||||
// intersecting to not-intersecting.
|
||||
// A Boolean value which is true if the target element intersects with the intersection observer's root. If this is true, then, the IntersectionObserverEntry describes a transition into a state of intersection; if it's false, then you know the transition is from intersecting to not-intersecting.
|
||||
pub fn get_isIntersecting(_: *const IntersectionObserverEntry) bool {
|
||||
return true;
|
||||
}
|
||||
@@ -153,8 +142,8 @@ pub const IntersectionObserverEntry = struct {
|
||||
// Returns a DOMRectReadOnly for the intersection observer's root.
|
||||
pub fn get_rootBounds(self: *const IntersectionObserverEntry) !Element.DOMRect {
|
||||
const root = self.options.root.?;
|
||||
if (@intFromPtr(root) == @intFromPtr(self.page.window.document)) {
|
||||
return self.page.renderer.boundingRect();
|
||||
if (@intFromPtr(root) == @intFromPtr(self.state.window.document)) {
|
||||
return self.state.renderer.boundingRect();
|
||||
}
|
||||
|
||||
const root_type = try parser.nodeType(root);
|
||||
@@ -169,7 +158,7 @@ pub const IntersectionObserverEntry = struct {
|
||||
else => return error.InvalidState,
|
||||
}
|
||||
|
||||
return Element._getBoundingClientRect(element, self.page);
|
||||
return Element._getBoundingClientRect(element, self.state);
|
||||
}
|
||||
|
||||
// The Element whose intersection with the root changed.
|
||||
|
||||
@@ -19,9 +19,8 @@
|
||||
const std = @import("std");
|
||||
const Allocator = std.mem.Allocator;
|
||||
|
||||
const log = @import("../../log.zig");
|
||||
const parser = @import("../netsurf.zig");
|
||||
const Page = @import("../page.zig").Page;
|
||||
const SessionState = @import("../env.zig").SessionState;
|
||||
|
||||
const Env = @import("../env.zig").Env;
|
||||
const NodeList = @import("nodelist.zig").NodeList;
|
||||
@@ -33,20 +32,22 @@ pub const Interfaces = .{
|
||||
|
||||
const Walker = @import("../dom/walker.zig").WalkerChildren;
|
||||
|
||||
const log = std.log.scoped(.events);
|
||||
|
||||
// WEB IDL https://dom.spec.whatwg.org/#interface-mutationobserver
|
||||
pub const MutationObserver = struct {
|
||||
cbk: Env.Function,
|
||||
arena: Allocator,
|
||||
|
||||
// List of records which were observed. When the call scope ends, we need to
|
||||
// List of records which were observed. When the scopeEnds, we need to
|
||||
// execute our callback with it.
|
||||
observed: std.ArrayListUnmanaged(*MutationRecord),
|
||||
|
||||
pub fn constructor(cbk: Env.Function, page: *Page) !MutationObserver {
|
||||
pub fn constructor(cbk: Env.Function, state: *SessionState) !MutationObserver {
|
||||
return .{
|
||||
.cbk = cbk,
|
||||
.observed = .{},
|
||||
.arena = page.arena,
|
||||
.arena = state.arena,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -63,13 +64,13 @@ pub const MutationObserver = struct {
|
||||
|
||||
// register node's events
|
||||
if (options.childList or options.subtree) {
|
||||
_ = try parser.eventTargetAddEventListener(
|
||||
try parser.eventTargetAddEventListener(
|
||||
parser.toEventTarget(parser.Node, node),
|
||||
"DOMNodeInserted",
|
||||
&observer.event_node,
|
||||
false,
|
||||
);
|
||||
_ = try parser.eventTargetAddEventListener(
|
||||
try parser.eventTargetAddEventListener(
|
||||
parser.toEventTarget(parser.Node, node),
|
||||
"DOMNodeRemoved",
|
||||
&observer.event_node,
|
||||
@@ -77,7 +78,7 @@ pub const MutationObserver = struct {
|
||||
);
|
||||
}
|
||||
if (options.attr()) {
|
||||
_ = try parser.eventTargetAddEventListener(
|
||||
try parser.eventTargetAddEventListener(
|
||||
parser.toEventTarget(parser.Node, node),
|
||||
"DOMAttrModified",
|
||||
&observer.event_node,
|
||||
@@ -85,7 +86,7 @@ pub const MutationObserver = struct {
|
||||
);
|
||||
}
|
||||
if (options.cdata()) {
|
||||
_ = try parser.eventTargetAddEventListener(
|
||||
try parser.eventTargetAddEventListener(
|
||||
parser.toEventTarget(parser.Node, node),
|
||||
"DOMCharacterDataModified",
|
||||
&observer.event_node,
|
||||
@@ -93,7 +94,7 @@ pub const MutationObserver = struct {
|
||||
);
|
||||
}
|
||||
if (options.subtree) {
|
||||
_ = try parser.eventTargetAddEventListener(
|
||||
try parser.eventTargetAddEventListener(
|
||||
parser.toEventTarget(parser.Node, node),
|
||||
"DOMSubtreeModified",
|
||||
&observer.event_node,
|
||||
@@ -114,11 +115,8 @@ pub const MutationObserver = struct {
|
||||
const records = [_]MutationRecord{r.*};
|
||||
var result: Env.Function.Result = undefined;
|
||||
self.cbk.tryCall(void, .{records}, &result) catch {
|
||||
log.debug(.user_script, "callback error", .{
|
||||
.err = result.exception,
|
||||
.stack = result.stack,
|
||||
.source = "mutation observer",
|
||||
});
|
||||
log.err("mutation observer callback error: {s}", .{result.exception});
|
||||
log.debug("stack:\n{s}", .{result.stack orelse "???"});
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -245,16 +243,15 @@ const Observer = struct {
|
||||
|
||||
fn handle(en: *parser.EventNode, event: *parser.Event) void {
|
||||
const self: *Observer = @fieldParentPtr("event_node", en);
|
||||
self._handle(event) catch |err| {
|
||||
log.err(.web_api, "handle error", .{ .err = err, .source = "mutation observer" });
|
||||
};
|
||||
}
|
||||
|
||||
fn _handle(self: *Observer, event: *parser.Event) !void {
|
||||
var mutation_observer = self.mutation_observer;
|
||||
|
||||
const node = blk: {
|
||||
const event_target = try parser.eventTarget(event) orelse return;
|
||||
const event_target = parser.eventTarget(event) catch |e| {
|
||||
log.err("mutation observer event target: {any}", .{e});
|
||||
return;
|
||||
} orelse return;
|
||||
|
||||
break :blk parser.eventTargetToNode(event_target);
|
||||
};
|
||||
|
||||
@@ -263,7 +260,10 @@ const Observer = struct {
|
||||
}
|
||||
|
||||
const event_type = blk: {
|
||||
const t = try parser.eventType(event);
|
||||
const t = parser.eventType(event) catch |e| {
|
||||
log.err("mutation observer event type: {any}", .{e});
|
||||
return;
|
||||
};
|
||||
break :blk std.meta.stringToEnum(MutationEventType, t) orelse return;
|
||||
};
|
||||
|
||||
@@ -273,7 +273,9 @@ const Observer = struct {
|
||||
.target = self.node,
|
||||
.type = event_type.recordType(),
|
||||
};
|
||||
try mutation_observer.observed.append(arena, &self.record.?);
|
||||
mutation_observer.observed.append(arena, &self.record.?) catch |err| {
|
||||
log.err("mutation_observer append: {}", .{err});
|
||||
};
|
||||
}
|
||||
|
||||
var record = &self.record.?;
|
||||
@@ -293,12 +295,18 @@ const Observer = struct {
|
||||
},
|
||||
.DOMNodeInserted => {
|
||||
if (parser.mutationEventRelatedNode(mutation_event) catch null) |related_node| {
|
||||
try record.added_nodes.append(arena, related_node);
|
||||
record.added_nodes.append(arena, related_node) catch |e| {
|
||||
log.err("mutation event handler error: {any}", .{e});
|
||||
return;
|
||||
};
|
||||
}
|
||||
},
|
||||
.DOMNodeRemoved => {
|
||||
if (parser.mutationEventRelatedNode(mutation_event) catch null) |related_node| {
|
||||
try record.removed_nodes.append(arena, related_node);
|
||||
record.removed_nodes.append(arena, related_node) catch |e| {
|
||||
log.err("mutation event handler error: {any}", .{e});
|
||||
return;
|
||||
};
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
@@ -25,7 +25,6 @@ pub const NamedNodeMap = struct {
|
||||
pub const Self = parser.NamedNodeMap;
|
||||
|
||||
pub const Exception = DOMException;
|
||||
pub const Iterator = NamedNodeMapIterator;
|
||||
|
||||
// TODO implement LegacyUnenumerableNamedProperties.
|
||||
// https://webidl.spec.whatwg.org/#LegacyUnenumerableNamedProperties
|
||||
@@ -71,48 +70,11 @@ pub const NamedNodeMap = struct {
|
||||
}
|
||||
|
||||
pub fn indexed_get(self: *parser.NamedNodeMap, index: u32, has_value: *bool) !*parser.Attribute {
|
||||
return (try _item(self, index)) orelse {
|
||||
return (try NamedNodeMap._item(self, index)) orelse {
|
||||
has_value.* = false;
|
||||
return undefined;
|
||||
};
|
||||
}
|
||||
|
||||
pub fn named_get(self: *parser.NamedNodeMap, name: []const u8, has_value: *bool) !*parser.Attribute {
|
||||
return (try _getNamedItem(self, name)) orelse {
|
||||
has_value.* = false;
|
||||
return undefined;
|
||||
};
|
||||
}
|
||||
|
||||
pub fn _symbol_iterator(self: *parser.NamedNodeMap) NamedNodeMapIterator {
|
||||
return .{ .map = self };
|
||||
}
|
||||
};
|
||||
|
||||
pub const NamedNodeMapIterator = struct {
|
||||
index: u32 = 0,
|
||||
map: *parser.NamedNodeMap,
|
||||
|
||||
pub const Return = struct {
|
||||
done: bool,
|
||||
value: ?*parser.Attribute,
|
||||
};
|
||||
|
||||
pub fn _next(self: *NamedNodeMapIterator) !Return {
|
||||
const e = try NamedNodeMap._item(self.map, self.index);
|
||||
if (e == null) {
|
||||
return .{
|
||||
.value = null,
|
||||
.done = true,
|
||||
};
|
||||
}
|
||||
|
||||
self.index += 1;
|
||||
return .{
|
||||
.value = e,
|
||||
.done = false,
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
// Tests
|
||||
@@ -131,8 +93,5 @@ test "Browser.DOM.NamedNodeMap" {
|
||||
.{ "a.getNamedItem('id')", "[object Attr]" },
|
||||
.{ "a.getNamedItem('foo')", "null" },
|
||||
.{ "a.setNamedItem(a.getNamedItem('id'))", "[object Attr]" },
|
||||
.{ "a['id'].name", "id" },
|
||||
.{ "a['id'].value", "content" },
|
||||
.{ "a['other']", "undefined" },
|
||||
}, .{});
|
||||
}
|
||||
|
||||
@@ -18,11 +18,10 @@
|
||||
|
||||
const std = @import("std");
|
||||
|
||||
const log = @import("../../log.zig");
|
||||
const parser = @import("../netsurf.zig");
|
||||
const generate = @import("../../runtime/generate.zig");
|
||||
|
||||
const Page = @import("../page.zig").Page;
|
||||
const SessionState = @import("../env.zig").SessionState;
|
||||
const EventTarget = @import("event_target.zig").EventTarget;
|
||||
|
||||
// DOM
|
||||
@@ -42,6 +41,8 @@ const Walker = @import("walker.zig").WalkerDepthFirst;
|
||||
const HTML = @import("../html/html.zig");
|
||||
const HTMLElem = @import("../html/elements.zig");
|
||||
|
||||
const log = std.log.scoped(.node);
|
||||
|
||||
// Node interfaces
|
||||
pub const Interfaces = .{
|
||||
Attr,
|
||||
@@ -269,7 +270,7 @@ pub const Node = struct {
|
||||
// - An Element that is not attached to a document or a shadow tree will return the root of the DOM tree it belongs to
|
||||
pub fn _getRootNode(self: *parser.Node, options: ?struct { composed: bool = false }) !Union {
|
||||
if (options) |options_| if (options_.composed) {
|
||||
log.warn(.web_api, "not implemented", .{ .feature = "getRootNode composed" });
|
||||
log.warn("getRootNode composed is not implemented yet", .{});
|
||||
};
|
||||
return try Node.toInterface(try parser.nodeGetRootNode(self));
|
||||
}
|
||||
@@ -278,8 +279,8 @@ pub const Node = struct {
|
||||
return try parser.nodeHasChildNodes(self);
|
||||
}
|
||||
|
||||
pub fn get_childNodes(self: *parser.Node, page: *Page) !NodeList {
|
||||
const allocator = page.arena;
|
||||
pub fn get_childNodes(self: *parser.Node, state: *SessionState) !NodeList {
|
||||
const allocator = state.arena;
|
||||
var list: NodeList = .{};
|
||||
|
||||
var n = try parser.nodeFirstChild(self) orelse return list;
|
||||
@@ -289,11 +290,8 @@ pub const Node = struct {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn _insertBefore(self: *parser.Node, new_node: *parser.Node, ref_node_: ?*parser.Node) !Union {
|
||||
if (ref_node_) |ref_node| {
|
||||
return Node.toInterface(try parser.nodeInsertBefore(self, new_node, ref_node));
|
||||
}
|
||||
return _appendChild(self, new_node);
|
||||
pub fn _insertBefore(self: *parser.Node, new_node: *parser.Node, ref_node: *parser.Node) !*parser.Node {
|
||||
return try parser.nodeInsertBefore(self, new_node, ref_node);
|
||||
}
|
||||
|
||||
pub fn _isDefaultNamespace(self: *parser.Node, namespace: ?[]const u8) !bool {
|
||||
@@ -496,7 +494,7 @@ pub const Node = struct {
|
||||
fn toNode(self: NodeOrText, doc: *parser.Document) !*parser.Node {
|
||||
return switch (self) {
|
||||
.node => |n| n,
|
||||
.text => |txt| @alignCast(@ptrCast(try parser.documentCreateTextNode(doc, txt))),
|
||||
.text => |txt| @ptrCast(try parser.documentCreateTextNode(doc, txt)),
|
||||
};
|
||||
}
|
||||
|
||||
@@ -665,10 +663,6 @@ test "Browser.DOM.node" {
|
||||
.{ "let insertBefore = document.createElement('a')", "undefined" },
|
||||
.{ "link.insertBefore(insertBefore, text) !== undefined", "true" },
|
||||
.{ "link.firstChild.localName === 'a'", "true" },
|
||||
|
||||
.{ "let insertBefore2 = document.createElement('b')", null },
|
||||
.{ "link.insertBefore(insertBefore2, null).localName", "b" },
|
||||
.{ "link.childNodes[link.childNodes.length - 1].localName", "b" },
|
||||
}, .{});
|
||||
|
||||
try runner.testCases(&.{
|
||||
|
||||
@@ -18,7 +18,6 @@
|
||||
|
||||
const std = @import("std");
|
||||
|
||||
const log = @import("../../log.zig");
|
||||
const parser = @import("../netsurf.zig");
|
||||
|
||||
const JsThis = @import("../env.zig").JsThis;
|
||||
@@ -29,6 +28,8 @@ const Node = @import("node.zig").Node;
|
||||
|
||||
const U32Iterator = @import("../iterator/iterator.zig").U32Iterator;
|
||||
|
||||
const log = std.log.scoped(.nodelist);
|
||||
|
||||
const DOMException = @import("exceptions.zig").DOMException;
|
||||
|
||||
pub const Interfaces = .{
|
||||
@@ -145,7 +146,8 @@ pub const NodeList = struct {
|
||||
const ii: u32 = @intCast(i);
|
||||
var result: Function.Result = undefined;
|
||||
cbk.tryCall(void, .{ n, ii, self }, &result) catch {
|
||||
log.debug(.user_script, "forEach callback", .{ .err = result.exception, .stack = result.stack });
|
||||
log.err("callback error: {s}", .{result.exception});
|
||||
log.debug("stack:\n{s}", .{result.stack orelse "???"});
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
@@ -20,7 +20,7 @@ const std = @import("std");
|
||||
|
||||
const parser = @import("../netsurf.zig");
|
||||
const Node = @import("node.zig").Node;
|
||||
const Page = @import("../page.zig").Page;
|
||||
const SessionState = @import("../env.zig").SessionState;
|
||||
|
||||
// https://dom.spec.whatwg.org/#processinginstruction
|
||||
pub const ProcessingInstruction = struct {
|
||||
@@ -39,9 +39,9 @@ pub const ProcessingInstruction = struct {
|
||||
// There's something wrong when we try to clone a ProcessInstruction normally.
|
||||
// The resulting object can't be cast back into a node (it crashes). This is
|
||||
// a simple workaround.
|
||||
pub fn _cloneNode(self: *parser.ProcessingInstruction, _: ?bool, page: *Page) !*parser.ProcessingInstruction {
|
||||
pub fn _cloneNode(self: *parser.ProcessingInstruction, _: ?bool, state: *SessionState) !*parser.ProcessingInstruction {
|
||||
return try parser.documentCreateProcessingInstruction(
|
||||
@ptrCast(page.window.document),
|
||||
@ptrCast(state.window.document),
|
||||
try get_target(self),
|
||||
(try get_data(self)) orelse "",
|
||||
);
|
||||
|
||||
@@ -17,7 +17,7 @@
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
const parser = @import("../netsurf.zig");
|
||||
const Page = @import("../page.zig").Page;
|
||||
const SessionState = @import("../env.zig").SessionState;
|
||||
|
||||
const CharacterData = @import("character_data.zig").CharacterData;
|
||||
const CDATASection = @import("cdata_section.zig").CDATASection;
|
||||
@@ -32,9 +32,9 @@ pub const Text = struct {
|
||||
pub const prototype = *CharacterData;
|
||||
pub const subtype = .node;
|
||||
|
||||
pub fn constructor(data: ?[]const u8, page: *const Page) !*parser.Text {
|
||||
pub fn constructor(data: ?[]const u8, state: *const SessionState) !*parser.Text {
|
||||
return parser.documentCreateTextNode(
|
||||
parser.documentHTMLToDocument(page.window.document),
|
||||
parser.documentHTMLToDocument(state.window.document),
|
||||
data orelse "",
|
||||
);
|
||||
}
|
||||
|
||||
@@ -18,7 +18,6 @@
|
||||
|
||||
const std = @import("std");
|
||||
|
||||
const log = @import("../../log.zig");
|
||||
const parser = @import("../netsurf.zig");
|
||||
const iterator = @import("../iterator/iterator.zig");
|
||||
|
||||
@@ -26,6 +25,8 @@ const Function = @import("../env.zig").Function;
|
||||
const JsObject = @import("../env.zig").JsObject;
|
||||
const DOMException = @import("exceptions.zig").DOMException;
|
||||
|
||||
const log = std.log.scoped(.token_list);
|
||||
|
||||
pub const Interfaces = .{
|
||||
DOMTokenList,
|
||||
DOMTokenListIterable,
|
||||
@@ -142,11 +143,8 @@ pub const DOMTokenList = struct {
|
||||
while (try entries._next()) |entry| {
|
||||
var result: Function.Result = undefined;
|
||||
cbk.tryCallWithThis(void, this_arg, .{ entry.@"1", entry.@"0", self }, &result) catch {
|
||||
log.debug(.user_script, "callback error", .{
|
||||
.err = result.exception,
|
||||
.stack = result.stack,
|
||||
.soure = "tokenList foreach",
|
||||
});
|
||||
log.err("callback error: {s}", .{result.exception});
|
||||
log.debug("stack:\n{s}", .{result.stack orelse "???"});
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
@@ -21,7 +21,7 @@ const parser = @import("../netsurf.zig");
|
||||
|
||||
const NodeFilter = @import("node_filter.zig").NodeFilter;
|
||||
const Env = @import("../env.zig").Env;
|
||||
const Page = @import("../page.zig").Page;
|
||||
const SessionState = @import("../env.zig").SessionState;
|
||||
|
||||
// https://developer.mozilla.org/en-US/docs/Web/API/TreeWalker
|
||||
pub const TreeWalker = struct {
|
||||
|
||||
@@ -28,32 +28,6 @@ pub fn writeHTML(doc: *parser.Document, writer: anytype) !void {
|
||||
try writer.writeAll("\n");
|
||||
}
|
||||
|
||||
// Spec: https://www.w3.org/TR/xml/#sec-prolog-dtd
|
||||
pub fn writeDocType(doc_type: *parser.DocumentType, writer: anytype) !void {
|
||||
try writer.writeAll("<!DOCTYPE ");
|
||||
try writer.writeAll(try parser.documentTypeGetName(doc_type));
|
||||
|
||||
const public_id = try parser.documentTypeGetPublicId(doc_type);
|
||||
const system_id = try parser.documentTypeGetSystemId(doc_type);
|
||||
if (public_id.len != 0 and system_id.len != 0) {
|
||||
try writer.writeAll(" PUBLIC \"");
|
||||
try writeEscapedAttributeValue(writer, public_id);
|
||||
try writer.writeAll("\" \"");
|
||||
try writeEscapedAttributeValue(writer, system_id);
|
||||
try writer.writeAll("\"");
|
||||
} else if (public_id.len != 0) {
|
||||
try writer.writeAll(" PUBLIC \"");
|
||||
try writeEscapedAttributeValue(writer, public_id);
|
||||
try writer.writeAll("\"");
|
||||
} else if (system_id.len != 0) {
|
||||
try writer.writeAll(" SYSTEM \"");
|
||||
try writeEscapedAttributeValue(writer, system_id);
|
||||
try writer.writeAll("\"");
|
||||
}
|
||||
// Internal subset is not implemented
|
||||
try writer.writeAll(">");
|
||||
}
|
||||
|
||||
pub fn writeNode(node: *parser.Node, writer: anytype) anyerror!void {
|
||||
switch (try parser.nodeType(node)) {
|
||||
.element => {
|
||||
@@ -114,7 +88,7 @@ pub fn writeNode(node: *parser.Node, writer: anytype) anyerror!void {
|
||||
.document_fragment => return,
|
||||
// document will never be called, but required for completeness.
|
||||
.document => return,
|
||||
// done globally instead, but required for completeness. Only the outer DOCTYPE should be written
|
||||
// done globally instead, but required for completeness.
|
||||
.document_type => return,
|
||||
// deprecated
|
||||
.attribute => return,
|
||||
@@ -182,9 +156,6 @@ fn writeEscapedAttributeValue(writer: anytype, value: []const u8) !void {
|
||||
|
||||
const testing = std.testing;
|
||||
test "dump.writeHTML" {
|
||||
try parser.init(testing.allocator);
|
||||
defer parser.deinit();
|
||||
|
||||
try testWriteHTML(
|
||||
"<div id=\"content\">Over 9000!</div>",
|
||||
"<div id=\"content\">Over 9000!</div>",
|
||||
@@ -225,8 +196,7 @@ fn testWriteFullHTML(comptime expected: []const u8, src: []const u8) !void {
|
||||
var buf = std.ArrayListUnmanaged(u8){};
|
||||
defer buf.deinit(testing.allocator);
|
||||
|
||||
const Elements = @import("html/elements.zig");
|
||||
const doc_html = try parser.documentHTMLParseFromStr(src, &Elements.createElement);
|
||||
const doc_html = try parser.documentHTMLParseFromStr(src);
|
||||
defer parser.documentHTMLClose(doc_html) catch {};
|
||||
|
||||
const doc = parser.documentHTMLToDocument(doc_html);
|
||||
|
||||
@@ -1,24 +1,29 @@
|
||||
const std = @import("std");
|
||||
|
||||
const Page = @import("page.zig").Page;
|
||||
const parser = @import("netsurf.zig");
|
||||
const URL = @import("../url.zig").URL;
|
||||
const js = @import("../runtime/js.zig");
|
||||
const storage = @import("storage/storage.zig");
|
||||
const generate = @import("../runtime/generate.zig");
|
||||
const Renderer = @import("renderer.zig").Renderer;
|
||||
const Loop = @import("../runtime/loop.zig").Loop;
|
||||
const RequestFactory = @import("../http/client.zig").RequestFactory;
|
||||
|
||||
const WebApis = struct {
|
||||
// Wrapped like this for debug ergonomics.
|
||||
// When we create our Env, a few lines down, we define it as:
|
||||
// pub const Env = js.Env(*Page, WebApis);
|
||||
// pub const Env = js.Env(*SessionState, WebApis);
|
||||
//
|
||||
// If there's a compile time error witht he Env, it's type will be readable,
|
||||
// i.e.: runtime.js.Env(*browser.env.Page, browser.env.WebApis)
|
||||
// i.e.: runtime.js.Env(*browser.env.SessionState, browser.env.WebApis)
|
||||
//
|
||||
// But if we didn't wrap it in the struct, like we once didn't, and defined
|
||||
// env as:
|
||||
// pub const Env = js.Env(*Page, Interfaces);
|
||||
// pub const Env = js.Env(*SessionState, Interfaces);
|
||||
//
|
||||
// Because Interfaces is an anynoumous type, it doesn't have a friendly name
|
||||
// and errors would be something like:
|
||||
// runtime.js.Env(*browser.Page, .{...A HUNDRED TYPES...})
|
||||
// runtime.js.Env(*browser.env.SessionState, .{...A HUNDRED TYPES...})
|
||||
pub const Interfaces = generate.Tuple(.{
|
||||
@import("crypto/crypto.zig").Crypto,
|
||||
@import("console/console.zig").Console,
|
||||
@@ -39,5 +44,41 @@ const WebApis = struct {
|
||||
pub const JsThis = Env.JsThis;
|
||||
pub const JsObject = Env.JsObject;
|
||||
pub const Function = Env.Function;
|
||||
pub const Env = js.Env(*Page, WebApis);
|
||||
pub const Global = @import("html/window.zig").Window;
|
||||
pub const Env = js.Env(*SessionState, WebApis);
|
||||
|
||||
const Window = @import("html/window.zig").Window;
|
||||
pub const Global = Window;
|
||||
|
||||
pub const SessionState = struct {
|
||||
loop: *Loop,
|
||||
url: *const URL,
|
||||
window: *Window,
|
||||
renderer: *Renderer,
|
||||
arena: std.mem.Allocator,
|
||||
cookie_jar: *storage.CookieJar,
|
||||
request_factory: RequestFactory,
|
||||
|
||||
// dangerous, but set by the JS framework
|
||||
// shorter-lived than the arena above, which
|
||||
// exists for the entire rendering of the page
|
||||
call_arena: std.mem.Allocator = undefined,
|
||||
|
||||
pub fn getOrCreateNodeWrapper(self: *SessionState, comptime T: type, node: *parser.Node) !*T {
|
||||
if (try self.getNodeWrapper(T, node)) |wrap| {
|
||||
return wrap;
|
||||
}
|
||||
|
||||
const wrap = try self.arena.create(T);
|
||||
wrap.* = T{};
|
||||
|
||||
parser.nodeSetEmbedderData(node, wrap);
|
||||
return wrap;
|
||||
}
|
||||
|
||||
pub fn getNodeWrapper(_: *SessionState, comptime T: type, node: *parser.Node) !?*T {
|
||||
if (parser.nodeGetEmbedderData(node)) |wrap| {
|
||||
return @alignCast(@ptrCast(wrap));
|
||||
}
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
@@ -23,7 +23,6 @@ const JsObject = @import("../env.zig").JsObject;
|
||||
// https://dom.spec.whatwg.org/#interface-customevent
|
||||
pub const CustomEvent = struct {
|
||||
pub const prototype = *Event;
|
||||
pub const union_make_copy = true;
|
||||
|
||||
proto: parser.Event,
|
||||
detail: ?JsObject,
|
||||
|
||||
@@ -19,25 +19,24 @@
|
||||
const std = @import("std");
|
||||
const Allocator = std.mem.Allocator;
|
||||
|
||||
const log = @import("../../log.zig");
|
||||
const parser = @import("../netsurf.zig");
|
||||
const Function = @import("../env.zig").Function;
|
||||
const generate = @import("../../runtime/generate.zig");
|
||||
|
||||
const Page = @import("../page.zig").Page;
|
||||
const DOMException = @import("../dom/exceptions.zig").DOMException;
|
||||
const EventTarget = @import("../dom/event_target.zig").EventTarget;
|
||||
const EventTargetUnion = @import("../dom/event_target.zig").Union;
|
||||
|
||||
const CustomEvent = @import("custom_event.zig").CustomEvent;
|
||||
const ProgressEvent = @import("../xhr/progress_event.zig").ProgressEvent;
|
||||
const MouseEvent = @import("mouse_event.zig").MouseEvent;
|
||||
|
||||
const log = std.log.scoped(.events);
|
||||
|
||||
// Event interfaces
|
||||
pub const Interfaces = .{
|
||||
Event,
|
||||
CustomEvent,
|
||||
ProgressEvent,
|
||||
MouseEvent,
|
||||
};
|
||||
|
||||
pub const Union = generate.Union(Interfaces);
|
||||
@@ -61,7 +60,6 @@ pub const Event = struct {
|
||||
.event => .{ .Event = evt },
|
||||
.custom_event => .{ .CustomEvent = @as(*CustomEvent, @ptrCast(evt)).* },
|
||||
.progress_event => .{ .ProgressEvent = @as(*ProgressEvent, @ptrCast(evt)).* },
|
||||
.mouse_event => .{ .MouseEvent = @as(*parser.MouseEvent, @ptrCast(evt)) },
|
||||
};
|
||||
}
|
||||
|
||||
@@ -77,16 +75,16 @@ pub const Event = struct {
|
||||
return try parser.eventType(self);
|
||||
}
|
||||
|
||||
pub fn get_target(self: *parser.Event, page: *Page) !?EventTargetUnion {
|
||||
pub fn get_target(self: *parser.Event) !?EventTargetUnion {
|
||||
const et = try parser.eventTarget(self);
|
||||
if (et == null) return null;
|
||||
return try EventTarget.toInterface(et.?, page);
|
||||
return try EventTarget.toInterface(et.?);
|
||||
}
|
||||
|
||||
pub fn get_currentTarget(self: *parser.Event, page: *Page) !?EventTargetUnion {
|
||||
pub fn get_currentTarget(self: *parser.Event) !?EventTargetUnion {
|
||||
const et = try parser.eventCurrentTarget(self);
|
||||
if (et == null) return null;
|
||||
return try EventTarget.toInterface(et.?, page);
|
||||
return try EventTarget.toInterface(et.?);
|
||||
}
|
||||
|
||||
pub fn get_eventPhase(self: *parser.Event) !u8 {
|
||||
@@ -142,126 +140,33 @@ pub const Event = struct {
|
||||
};
|
||||
|
||||
pub const EventHandler = struct {
|
||||
once: bool,
|
||||
capture: bool,
|
||||
callback: Function,
|
||||
node: parser.EventNode,
|
||||
listener: *parser.EventListener,
|
||||
|
||||
const Env = @import("../env.zig").Env;
|
||||
const Function = Env.Function;
|
||||
|
||||
pub const Listener = union(enum) {
|
||||
function: Function,
|
||||
object: Env.JsObject,
|
||||
|
||||
pub fn callback(self: Listener, target: *parser.EventTarget) !?Function {
|
||||
return switch (self) {
|
||||
.function => |func| try func.withThis(target),
|
||||
.object => |obj| blk: {
|
||||
const func = (try obj.getFunction("handleEvent")) orelse return null;
|
||||
break :blk try func.withThis(try obj.persist());
|
||||
},
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
pub const Opts = union(enum) {
|
||||
flags: Flags,
|
||||
capture: bool,
|
||||
|
||||
const Flags = struct {
|
||||
once: ?bool,
|
||||
capture: ?bool,
|
||||
// We ignore this property. It seems to be largely used to help the
|
||||
// browser make certain performance tweaks (i.e. the browser knows
|
||||
// that the listener won't call preventDefault() and thus can safely
|
||||
// run the default as needed).
|
||||
passive: ?bool,
|
||||
signal: ?bool, // currently does nothing
|
||||
};
|
||||
};
|
||||
|
||||
pub fn register(
|
||||
allocator: Allocator,
|
||||
target: *parser.EventTarget,
|
||||
typ: []const u8,
|
||||
listener: Listener,
|
||||
opts_: ?Opts,
|
||||
) !?*EventHandler {
|
||||
var once = false;
|
||||
var capture = false;
|
||||
if (opts_) |opts| {
|
||||
switch (opts) {
|
||||
.capture => |c| capture = c,
|
||||
.flags => |f| {
|
||||
// Done this way so that, for common cases that _only_ set
|
||||
// capture, i.e. {captrue: true}, it works.
|
||||
// But for any case that sets any of the other flags, we
|
||||
// error. If we don't error, this function call would succeed
|
||||
// but the behavior might be wrong. At this point, it's
|
||||
// better to be explicit and error.
|
||||
if (f.signal orelse false) return error.NotImplemented;
|
||||
once = f.once orelse false;
|
||||
capture = f.capture orelse false;
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
const callback = (try listener.callback(target)) orelse return null;
|
||||
|
||||
// check if event target has already this listener
|
||||
if (try parser.eventTargetHasListener(target, typ, capture, callback.id) != null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
pub fn init(allocator: Allocator, callback: Function) !*EventHandler {
|
||||
const eh = try allocator.create(EventHandler);
|
||||
eh.* = .{
|
||||
.once = once,
|
||||
.capture = capture,
|
||||
.callback = callback,
|
||||
.node = .{
|
||||
.id = callback.id,
|
||||
.func = handle,
|
||||
},
|
||||
.listener = undefined,
|
||||
};
|
||||
|
||||
eh.listener = try parser.eventTargetAddEventListener(
|
||||
target,
|
||||
typ,
|
||||
&eh.node,
|
||||
capture,
|
||||
);
|
||||
return eh;
|
||||
}
|
||||
|
||||
fn handle(node: *parser.EventNode, event: *parser.Event) void {
|
||||
const ievent = Event.toInterface(event) catch |err| {
|
||||
log.err(.app, "toInterface error", .{ .err = err });
|
||||
log.err("Event.toInterface: {}", .{err});
|
||||
return;
|
||||
};
|
||||
|
||||
const self: *EventHandler = @fieldParentPtr("node", node);
|
||||
var result: Function.Result = undefined;
|
||||
self.callback.tryCall(void, .{ievent}, &result) catch {
|
||||
log.debug(.user_script, "callback error", .{
|
||||
.err = result.exception,
|
||||
.stack = result.stack,
|
||||
.source = "event handler",
|
||||
});
|
||||
log.err("event handler error: {s}", .{result.exception});
|
||||
log.debug("stack:\n{s}", .{result.stack orelse "???"});
|
||||
};
|
||||
|
||||
if (self.once) {
|
||||
const target = (parser.eventTarget(event) catch return).?;
|
||||
const typ = parser.eventType(event) catch return;
|
||||
parser.eventTargetRemoveEventListener(
|
||||
target,
|
||||
typ,
|
||||
self.listener,
|
||||
self.capture,
|
||||
) catch {};
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
@@ -362,13 +267,4 @@ test "Browser.Event" {
|
||||
.{ "document.dispatchEvent(new Event('count'))", "true" },
|
||||
.{ "nb", "0" },
|
||||
}, .{});
|
||||
|
||||
try runner.testCases(&.{
|
||||
.{ "nb = 0; function cbk(event) { nb ++; }", null },
|
||||
.{ "document.addEventListener('count', cbk, {once: true})", null },
|
||||
.{ "document.dispatchEvent(new Event('count'))", "true" },
|
||||
.{ "document.dispatchEvent(new Event('count'))", "true" },
|
||||
.{ "document.dispatchEvent(new Event('count'))", "true" },
|
||||
.{ "nb", "1" },
|
||||
}, .{});
|
||||
}
|
||||
|
||||
@@ -1,140 +0,0 @@
|
||||
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
|
||||
//
|
||||
// Francis Bouvier <francis@lightpanda.io>
|
||||
// Pierre Tachoire <pierre@lightpanda.io>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU Affero General Public License as
|
||||
// published by the Free Software Foundation, either version 3 of the
|
||||
// License, or (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU Affero General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU Affero General Public License
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
const std = @import("std");
|
||||
const log = std.log.scoped(.mouse_event);
|
||||
|
||||
const parser = @import("../netsurf.zig");
|
||||
const Event = @import("event.zig").Event;
|
||||
const JsObject = @import("../env.zig").JsObject;
|
||||
|
||||
// TODO: We currently don't have a UIEvent interface so we skip it in the prototype chain.
|
||||
// https://developer.mozilla.org/en-US/docs/Web/API/UIEvent
|
||||
const UIEvent = Event;
|
||||
|
||||
// https://developer.mozilla.org/en-US/docs/Web/API/MouseEvent
|
||||
pub const MouseEvent = struct {
|
||||
pub const Self = parser.MouseEvent;
|
||||
pub const prototype = *UIEvent;
|
||||
|
||||
const MouseButton = enum(u16) {
|
||||
main_button = 0,
|
||||
auxillary_button = 1,
|
||||
secondary_button = 2,
|
||||
fourth_button = 3,
|
||||
fifth_button = 4,
|
||||
};
|
||||
|
||||
const MouseEventInit = struct {
|
||||
screenX: i32 = 0,
|
||||
screenY: i32 = 0,
|
||||
clientX: i32 = 0,
|
||||
clientY: i32 = 0,
|
||||
ctrlKey: bool = false,
|
||||
shiftKey: bool = false,
|
||||
altKey: bool = false,
|
||||
metaKey: bool = false,
|
||||
button: MouseButton = .main_button,
|
||||
};
|
||||
|
||||
pub fn constructor(event_type: []const u8, opts_: ?MouseEventInit) !*parser.MouseEvent {
|
||||
const opts = opts_ orelse MouseEventInit{};
|
||||
|
||||
var mouse_event = try parser.mouseEventCreate();
|
||||
try parser.eventSetInternalType(@ptrCast(&mouse_event), .mouse_event);
|
||||
|
||||
try parser.mouseEventInit(mouse_event, event_type, .{
|
||||
.x = opts.clientX,
|
||||
.y = opts.clientY,
|
||||
.ctrl = opts.ctrlKey,
|
||||
.shift = opts.shiftKey,
|
||||
.alt = opts.altKey,
|
||||
.meta = opts.metaKey,
|
||||
.button = @intFromEnum(opts.button),
|
||||
});
|
||||
|
||||
if (!std.mem.eql(u8, event_type, "click")) {
|
||||
log.warn("MouseEvent currently only supports listeners for 'click' events!", .{});
|
||||
}
|
||||
|
||||
return mouse_event;
|
||||
}
|
||||
|
||||
pub fn get_button(self: *parser.MouseEvent) u16 {
|
||||
return self.button;
|
||||
}
|
||||
|
||||
// These is just an alias for clientX.
|
||||
pub fn get_x(self: *parser.MouseEvent) i32 {
|
||||
return self.cx;
|
||||
}
|
||||
|
||||
// These is just an alias for clientY.
|
||||
pub fn get_y(self: *parser.MouseEvent) i32 {
|
||||
return self.cy;
|
||||
}
|
||||
|
||||
pub fn get_clientX(self: *parser.MouseEvent) i32 {
|
||||
return self.cx;
|
||||
}
|
||||
|
||||
pub fn get_clientY(self: *parser.MouseEvent) i32 {
|
||||
return self.cy;
|
||||
}
|
||||
|
||||
pub fn get_screenX(self: *parser.MouseEvent) i32 {
|
||||
return self.sx;
|
||||
}
|
||||
|
||||
pub fn get_screenY(self: *parser.MouseEvent) i32 {
|
||||
return self.sy;
|
||||
}
|
||||
};
|
||||
|
||||
const testing = @import("../../testing.zig");
|
||||
test "Browser.MouseEvent" {
|
||||
var runner = try testing.jsRunner(testing.tracking_allocator, .{});
|
||||
defer runner.deinit();
|
||||
|
||||
try runner.testCases(&.{
|
||||
// Default MouseEvent
|
||||
.{ "let event = new MouseEvent('click')", "undefined" },
|
||||
.{ "event.type", "click" },
|
||||
.{ "event instanceof MouseEvent", "true" },
|
||||
.{ "event instanceof Event", "true" },
|
||||
.{ "event.clientX", "0" },
|
||||
.{ "event.clientY", "0" },
|
||||
.{ "event.screenX", "0" },
|
||||
.{ "event.screenY", "0" },
|
||||
// MouseEvent with parameters
|
||||
.{ "let new_event = new MouseEvent('click', { 'button': 0, 'clientX': 10, 'clientY': 20 })", "undefined" },
|
||||
.{ "new_event.button", "0" },
|
||||
.{ "new_event.x", "10" },
|
||||
.{ "new_event.y", "20" },
|
||||
.{ "new_event.screenX", "10" },
|
||||
.{ "new_event.screenY", "20" },
|
||||
// MouseEvent Listener
|
||||
.{ "let me = new MouseEvent('click')", "undefined" },
|
||||
.{ "me instanceof Event", "true" },
|
||||
.{ "var eevt = null; function ccbk(event) { eevt = event; }", "undefined" },
|
||||
.{ "document.addEventListener('click', ccbk)", "undefined" },
|
||||
.{ "document.dispatchEvent(me)", "true" },
|
||||
.{ "eevt.type", "click" },
|
||||
.{ "eevt instanceof MouseEvent", "true" },
|
||||
}, .{});
|
||||
}
|
||||
@@ -18,9 +18,8 @@
|
||||
|
||||
const std = @import("std");
|
||||
|
||||
const log = @import("../../log.zig");
|
||||
const parser = @import("../netsurf.zig");
|
||||
const Page = @import("../page.zig").Page;
|
||||
const SessionState = @import("../env.zig").SessionState;
|
||||
|
||||
const Window = @import("window.zig").Window;
|
||||
const Element = @import("../dom/element.zig").Element;
|
||||
@@ -39,6 +38,14 @@ pub const HTMLDocument = struct {
|
||||
pub const prototype = *Document;
|
||||
pub const subtype = .node;
|
||||
|
||||
ready_state: ReadyState = .loading,
|
||||
|
||||
const ReadyState = enum {
|
||||
loading,
|
||||
interactive,
|
||||
complete,
|
||||
};
|
||||
|
||||
// JS funcs
|
||||
// --------
|
||||
|
||||
@@ -79,18 +86,18 @@ pub const HTMLDocument = struct {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_cookie(_: *parser.DocumentHTML, page: *Page) ![]const u8 {
|
||||
pub fn get_cookie(_: *parser.DocumentHTML, state: *SessionState) ![]const u8 {
|
||||
var buf: std.ArrayListUnmanaged(u8) = .{};
|
||||
try page.cookie_jar.forRequest(&page.url.uri, buf.writer(page.arena), .{ .navigation = true });
|
||||
try state.cookie_jar.forRequest(&state.url.uri, buf.writer(state.arena), .{ .navigation = true });
|
||||
return buf.items;
|
||||
}
|
||||
|
||||
pub fn set_cookie(_: *parser.DocumentHTML, cookie_str: []const u8, page: *Page) ![]const u8 {
|
||||
pub fn set_cookie(_: *parser.DocumentHTML, cookie_str: []const u8, state: *SessionState) ![]const u8 {
|
||||
// we use the cookie jar's allocator to parse the cookie because it
|
||||
// outlives the page's arena.
|
||||
const c = try Cookie.parse(page.cookie_jar.allocator, &page.url.uri, cookie_str);
|
||||
const c = try Cookie.parse(state.cookie_jar.allocator, &state.url.uri, cookie_str);
|
||||
errdefer c.deinit();
|
||||
try page.cookie_jar.add(c, std.time.timestamp());
|
||||
try state.cookie_jar.add(c, std.time.timestamp());
|
||||
return cookie_str;
|
||||
}
|
||||
|
||||
@@ -103,8 +110,8 @@ pub const HTMLDocument = struct {
|
||||
return v;
|
||||
}
|
||||
|
||||
pub fn _getElementsByName(self: *parser.DocumentHTML, name: []const u8, page: *Page) !NodeList {
|
||||
const arena = page.arena;
|
||||
pub fn _getElementsByName(self: *parser.DocumentHTML, name: []const u8, state: *SessionState) !NodeList {
|
||||
const arena = state.arena;
|
||||
var list: NodeList = .{};
|
||||
|
||||
if (name.len == 0) return list;
|
||||
@@ -123,24 +130,24 @@ pub const HTMLDocument = struct {
|
||||
return list;
|
||||
}
|
||||
|
||||
pub fn get_images(self: *parser.DocumentHTML, page: *Page) !collection.HTMLCollection {
|
||||
return try collection.HTMLCollectionByTagName(page.arena, parser.documentHTMLToNode(self), "img", false);
|
||||
pub fn get_images(self: *parser.DocumentHTML, state: *SessionState) !collection.HTMLCollection {
|
||||
return try collection.HTMLCollectionByTagName(state.arena, parser.documentHTMLToNode(self), "img", false);
|
||||
}
|
||||
|
||||
pub fn get_embeds(self: *parser.DocumentHTML, page: *Page) !collection.HTMLCollection {
|
||||
return try collection.HTMLCollectionByTagName(page.arena, parser.documentHTMLToNode(self), "embed", false);
|
||||
pub fn get_embeds(self: *parser.DocumentHTML, state: *SessionState) !collection.HTMLCollection {
|
||||
return try collection.HTMLCollectionByTagName(state.arena, parser.documentHTMLToNode(self), "embed", false);
|
||||
}
|
||||
|
||||
pub fn get_plugins(self: *parser.DocumentHTML, page: *Page) !collection.HTMLCollection {
|
||||
return get_embeds(self, page);
|
||||
pub fn get_plugins(self: *parser.DocumentHTML, state: *SessionState) !collection.HTMLCollection {
|
||||
return get_embeds(self, state);
|
||||
}
|
||||
|
||||
pub fn get_forms(self: *parser.DocumentHTML, page: *Page) !collection.HTMLCollection {
|
||||
return try collection.HTMLCollectionByTagName(page.arena, parser.documentHTMLToNode(self), "form", false);
|
||||
pub fn get_forms(self: *parser.DocumentHTML, state: *SessionState) !collection.HTMLCollection {
|
||||
return try collection.HTMLCollectionByTagName(state.arena, parser.documentHTMLToNode(self), "form", false);
|
||||
}
|
||||
|
||||
pub fn get_scripts(self: *parser.DocumentHTML, page: *Page) !collection.HTMLCollection {
|
||||
return try collection.HTMLCollectionByTagName(page.arena, parser.documentHTMLToNode(self), "script", false);
|
||||
pub fn get_scripts(self: *parser.DocumentHTML, state: *SessionState) !collection.HTMLCollection {
|
||||
return try collection.HTMLCollectionByTagName(state.arena, parser.documentHTMLToNode(self), "script", false);
|
||||
}
|
||||
|
||||
pub fn get_applets(_: *parser.DocumentHTML) !collection.HTMLCollection {
|
||||
@@ -167,10 +174,6 @@ pub const HTMLDocument = struct {
|
||||
return try parser.documentHTMLGetLocation(Location, self);
|
||||
}
|
||||
|
||||
pub fn set_location(_: *const parser.DocumentHTML, url: []const u8, page: *Page) !void {
|
||||
return page.navigateFromWebAPI(url, .{ .reason = .script });
|
||||
}
|
||||
|
||||
pub fn get_designMode(_: *parser.DocumentHTML) []const u8 {
|
||||
return "off";
|
||||
}
|
||||
@@ -179,13 +182,13 @@ pub const HTMLDocument = struct {
|
||||
return "off";
|
||||
}
|
||||
|
||||
pub fn get_defaultView(_: *parser.DocumentHTML, page: *Page) *Window {
|
||||
return &page.window;
|
||||
pub fn get_defaultView(_: *parser.DocumentHTML, state: *const SessionState) *Window {
|
||||
return state.window;
|
||||
}
|
||||
|
||||
pub fn get_readyState(self: *parser.DocumentHTML, page: *Page) ![]const u8 {
|
||||
const state = try page.getOrCreateNodeState(@alignCast(@ptrCast(self)));
|
||||
return @tagName(state.ready_state);
|
||||
pub fn get_readyState(node: *parser.DocumentHTML, state: *SessionState) ![]const u8 {
|
||||
const self = try state.getOrCreateNodeWrapper(HTMLDocument, @ptrCast(node));
|
||||
return @tagName(self.ready_state);
|
||||
}
|
||||
|
||||
// noop legacy functions
|
||||
@@ -229,57 +232,53 @@ pub const HTMLDocument = struct {
|
||||
// Since LightPanda requires the client to know what they are clicking on we do not return the underlying element at this moment
|
||||
// This can currenty only happen if the first pixel is clicked without having rendered any element. This will change when css properties are supported.
|
||||
// This returns an ElementUnion instead of a *Parser.Element in case the element somehow hasn't passed through the js runtime yet.
|
||||
pub fn _elementFromPoint(_: *parser.DocumentHTML, x: f32, y: f32, page: *Page) !?ElementUnion {
|
||||
pub fn _elementFromPoint(_: *parser.DocumentHTML, x: f32, y: f32, state: *SessionState) !?ElementUnion {
|
||||
const ix: i32 = @intFromFloat(@floor(x));
|
||||
const iy: i32 = @intFromFloat(@floor(y));
|
||||
const element = page.renderer.getElementAtPosition(ix, iy) orelse return null;
|
||||
const element = state.renderer.getElementAtPosition(ix, iy) orelse return null;
|
||||
// TODO if pointer-events set to none the underlying element should be returned (parser.documentGetDocumentElement(self.document);?)
|
||||
return try Element.toInterface(element);
|
||||
}
|
||||
|
||||
// Returns an array of all elements at the specified coordinates (relative to the viewport). The elements are ordered from the topmost to the bottommost box of the viewport.
|
||||
pub fn _elementsFromPoint(_: *parser.DocumentHTML, x: f32, y: f32, page: *Page) ![]ElementUnion {
|
||||
pub fn _elementsFromPoint(_: *parser.DocumentHTML, x: f32, y: f32, state: *SessionState) ![]ElementUnion {
|
||||
const ix: i32 = @intFromFloat(@floor(x));
|
||||
const iy: i32 = @intFromFloat(@floor(y));
|
||||
const element = page.renderer.getElementAtPosition(ix, iy) orelse return &.{};
|
||||
const element = state.renderer.getElementAtPosition(ix, iy) orelse return &.{};
|
||||
// TODO if pointer-events set to none the underlying element should be returned (parser.documentGetDocumentElement(self.document);?)
|
||||
|
||||
var list: std.ArrayListUnmanaged(ElementUnion) = .empty;
|
||||
try list.ensureTotalCapacity(page.call_arena, 3);
|
||||
try list.ensureTotalCapacity(state.call_arena, 3);
|
||||
list.appendAssumeCapacity(try Element.toInterface(element));
|
||||
|
||||
// Since we are using a flat renderer there is no hierarchy of elements. What we do know is that the element is part of the main document.
|
||||
// Thus we can add the HtmlHtmlElement and it's child HTMLBodyElement to the returned list.
|
||||
// TBD Should we instead return every parent that is an element? Note that a child does not physically need to be overlapping the parent.
|
||||
// Should we do a render pass on demand?
|
||||
const doc_elem = try parser.documentGetDocumentElement(parser.documentHTMLToDocument(page.window.document)) orelse {
|
||||
const doc_elem = try parser.documentGetDocumentElement(parser.documentHTMLToDocument(state.window.document)) orelse {
|
||||
return list.items;
|
||||
};
|
||||
if (try parser.documentHTMLBody(page.window.document)) |body| {
|
||||
if (try parser.documentHTMLBody(state.window.document)) |body| {
|
||||
list.appendAssumeCapacity(try Element.toInterface(parser.bodyToElement(body)));
|
||||
}
|
||||
list.appendAssumeCapacity(try Element.toInterface(doc_elem));
|
||||
return list.items;
|
||||
}
|
||||
|
||||
pub fn documentIsLoaded(self: *parser.DocumentHTML, page: *Page) !void {
|
||||
const state = try page.getOrCreateNodeState(@alignCast(@ptrCast(self)));
|
||||
state.ready_state = .interactive;
|
||||
pub fn documentIsLoaded(html_doc: *parser.DocumentHTML, state: *SessionState) !void {
|
||||
const self = try state.getOrCreateNodeWrapper(HTMLDocument, @ptrCast(html_doc));
|
||||
self.ready_state = .interactive;
|
||||
|
||||
const evt = try parser.eventCreate();
|
||||
defer parser.eventDestroy(evt);
|
||||
|
||||
log.debug(.script_event, "dispatch event", .{
|
||||
.type = "DOMContentLoaded",
|
||||
.source = "document",
|
||||
});
|
||||
try parser.eventInit(evt, "DOMContentLoaded", .{ .bubbles = true, .cancelable = true });
|
||||
_ = try parser.eventTargetDispatchEvent(parser.toEventTarget(parser.DocumentHTML, self), evt);
|
||||
_ = try parser.eventTargetDispatchEvent(parser.toEventTarget(parser.DocumentHTML, html_doc), evt);
|
||||
}
|
||||
|
||||
pub fn documentIsComplete(self: *parser.DocumentHTML, page: *Page) !void {
|
||||
const state = try page.getOrCreateNodeState(@alignCast(@ptrCast(self)));
|
||||
state.ready_state = .complete;
|
||||
pub fn documentIsComplete(html_doc: *parser.DocumentHTML, state: *SessionState) !void {
|
||||
const self = try state.getOrCreateNodeWrapper(HTMLDocument, @ptrCast(html_doc));
|
||||
self.ready_state = .complete;
|
||||
}
|
||||
};
|
||||
|
||||
@@ -384,12 +383,12 @@ test "Browser.HTML.Document" {
|
||||
.{ "document.readyState", "loading" },
|
||||
}, .{});
|
||||
|
||||
try HTMLDocument.documentIsLoaded(runner.page.window.document, runner.page);
|
||||
try HTMLDocument.documentIsLoaded(runner.window.document, &runner.state);
|
||||
try runner.testCases(&.{
|
||||
.{ "document.readyState", "interactive" },
|
||||
}, .{});
|
||||
|
||||
try HTMLDocument.documentIsComplete(runner.page.window.document, runner.page);
|
||||
try HTMLDocument.documentIsComplete(runner.window.document, &runner.state);
|
||||
try runner.testCases(&.{
|
||||
.{ "document.readyState", "complete" },
|
||||
}, .{});
|
||||
|
||||
@@ -19,14 +19,11 @@ const std = @import("std");
|
||||
|
||||
const parser = @import("../netsurf.zig");
|
||||
const generate = @import("../../runtime/generate.zig");
|
||||
const Env = @import("../env.zig").Env;
|
||||
const Page = @import("../page.zig").Page;
|
||||
const SessionState = @import("../env.zig").SessionState;
|
||||
|
||||
const urlStitch = @import("../../url.zig").URL.stitch;
|
||||
const URL = @import("../url/url.zig").URL;
|
||||
const Node = @import("../dom/node.zig").Node;
|
||||
const Element = @import("../dom/element.zig").Element;
|
||||
const State = @import("../State.zig");
|
||||
|
||||
const CSSStyleDeclaration = @import("../cssom/css_style_declaration.zig").CSSStyleDeclaration;
|
||||
|
||||
@@ -60,7 +57,6 @@ pub const Interfaces = .{
|
||||
HTMLHtmlElement,
|
||||
HTMLIFrameElement,
|
||||
HTMLImageElement,
|
||||
HTMLImageElement.Factory,
|
||||
HTMLInputElement,
|
||||
HTMLLIElement,
|
||||
HTMLLabelElement,
|
||||
@@ -113,9 +109,14 @@ pub const HTMLElement = struct {
|
||||
pub const prototype = *Element;
|
||||
pub const subtype = .node;
|
||||
|
||||
pub fn get_style(e: *parser.ElementHTML, page: *Page) !*CSSStyleDeclaration {
|
||||
const state = try page.getOrCreateNodeState(@ptrCast(e));
|
||||
return &state.style;
|
||||
style: CSSStyleDeclaration = .{
|
||||
.store = .{},
|
||||
.order = .{},
|
||||
},
|
||||
|
||||
pub fn get_style(e: *parser.ElementHTML, state: *SessionState) !*CSSStyleDeclaration {
|
||||
const self = try state.getOrCreateNodeWrapper(HTMLElement, @ptrCast(e));
|
||||
return &self.style;
|
||||
}
|
||||
|
||||
pub fn get_innerText(e: *parser.ElementHTML) ![]const u8 {
|
||||
@@ -134,7 +135,7 @@ pub const HTMLElement = struct {
|
||||
try Node.removeChildren(n);
|
||||
|
||||
// attach the text node.
|
||||
_ = try parser.nodeAppendChild(n, @as(*parser.Node, @alignCast(@ptrCast(t))));
|
||||
_ = try parser.nodeAppendChild(n, @as(*parser.Node, @ptrCast(t)));
|
||||
}
|
||||
|
||||
pub fn _click(e: *parser.ElementHTML) !void {
|
||||
@@ -148,20 +149,6 @@ pub const HTMLElement = struct {
|
||||
});
|
||||
_ = try parser.elementDispatchEvent(@ptrCast(e), @ptrCast(event));
|
||||
}
|
||||
|
||||
const FocusOpts = struct {
|
||||
preventScroll: bool,
|
||||
focusVisible: bool,
|
||||
};
|
||||
pub fn _focus(e: *parser.ElementHTML, _: ?FocusOpts, page: *Page) !void {
|
||||
if (!try page.isNodeAttached(@ptrCast(e))) {
|
||||
return;
|
||||
}
|
||||
|
||||
const Document = @import("../dom/document.zig").Document;
|
||||
const root_node = try parser.nodeGetRootNode(@ptrCast(e));
|
||||
try Document.setFocus(@ptrCast(root_node), e, page);
|
||||
}
|
||||
};
|
||||
|
||||
// Deprecated HTMLElements in Chrome (2023/03/15)
|
||||
@@ -208,9 +195,8 @@ pub const HTMLAnchorElement = struct {
|
||||
return try parser.anchorGetHref(self);
|
||||
}
|
||||
|
||||
pub fn set_href(self: *parser.Anchor, href: []const u8, page: *const Page) !void {
|
||||
const full = try urlStitch(page.call_arena, href, page.url.raw, .{});
|
||||
return try parser.anchorSetHref(self, full);
|
||||
pub fn set_href(self: *parser.Anchor, href: []const u8) !void {
|
||||
return try parser.anchorSetHref(self, href);
|
||||
}
|
||||
|
||||
pub fn get_hreflang(self: *parser.Anchor) ![]const u8 {
|
||||
@@ -245,25 +231,26 @@ pub const HTMLAnchorElement = struct {
|
||||
return try parser.nodeSetTextContent(parser.anchorToNode(self), v);
|
||||
}
|
||||
|
||||
inline fn url(self: *parser.Anchor, page: *Page) !URL {
|
||||
return URL.constructor(.{ .element = @alignCast(@ptrCast(self)) }, null, page); // TODO inject base url
|
||||
inline fn url(self: *parser.Anchor, state: *SessionState) !URL {
|
||||
const href = try parser.anchorGetHref(self);
|
||||
return URL.constructor(href, null, state); // TODO inject base url
|
||||
}
|
||||
|
||||
// TODO return a disposable string
|
||||
pub fn get_origin(self: *parser.Anchor, page: *Page) ![]const u8 {
|
||||
var u = try url(self, page);
|
||||
return try u.get_origin(page);
|
||||
pub fn get_origin(self: *parser.Anchor, state: *SessionState) ![]const u8 {
|
||||
var u = try url(self, state);
|
||||
return try u.get_origin(state);
|
||||
}
|
||||
|
||||
// TODO return a disposable string
|
||||
pub fn get_protocol(self: *parser.Anchor, page: *Page) ![]const u8 {
|
||||
var u = try url(self, page);
|
||||
return u.get_protocol(page);
|
||||
pub fn get_protocol(self: *parser.Anchor, state: *SessionState) ![]const u8 {
|
||||
var u = try url(self, state);
|
||||
return u.get_protocol(state);
|
||||
}
|
||||
|
||||
pub fn set_protocol(self: *parser.Anchor, v: []const u8, page: *Page) !void {
|
||||
const arena = page.arena;
|
||||
var u = try url(self, page);
|
||||
pub fn set_protocol(self: *parser.Anchor, v: []const u8, state: *SessionState) !void {
|
||||
const arena = state.arena;
|
||||
var u = try url(self, state);
|
||||
|
||||
u.uri.scheme = v;
|
||||
const href = try u.toString(arena);
|
||||
@@ -271,12 +258,12 @@ pub const HTMLAnchorElement = struct {
|
||||
}
|
||||
|
||||
// TODO return a disposable string
|
||||
pub fn get_host(self: *parser.Anchor, page: *Page) ![]const u8 {
|
||||
var u = try url(self, page);
|
||||
return try u.get_host(page);
|
||||
pub fn get_host(self: *parser.Anchor, state: *SessionState) ![]const u8 {
|
||||
var u = try url(self, state);
|
||||
return try u.get_host(state);
|
||||
}
|
||||
|
||||
pub fn set_host(self: *parser.Anchor, v: []const u8, page: *Page) !void {
|
||||
pub fn set_host(self: *parser.Anchor, v: []const u8, state: *SessionState) !void {
|
||||
// search : separator
|
||||
var p: ?u16 = null;
|
||||
var h: []const u8 = undefined;
|
||||
@@ -288,8 +275,8 @@ pub const HTMLAnchorElement = struct {
|
||||
}
|
||||
}
|
||||
|
||||
const arena = page.arena;
|
||||
var u = try url(self, page);
|
||||
const arena = state.arena;
|
||||
var u = try url(self, state);
|
||||
|
||||
if (p) |pp| {
|
||||
u.uri.host = .{ .raw = h };
|
||||
@@ -303,28 +290,29 @@ pub const HTMLAnchorElement = struct {
|
||||
try parser.anchorSetHref(self, href);
|
||||
}
|
||||
|
||||
pub fn get_hostname(self: *parser.Anchor, page: *Page) ![]const u8 {
|
||||
var u = try url(self, page);
|
||||
return u.get_hostname();
|
||||
// TODO return a disposable string
|
||||
pub fn get_hostname(self: *parser.Anchor, state: *SessionState) ![]const u8 {
|
||||
var u = try url(self, state);
|
||||
return try state.arena.dupe(u8, u.get_hostname());
|
||||
}
|
||||
|
||||
pub fn set_hostname(self: *parser.Anchor, v: []const u8, page: *Page) !void {
|
||||
const arena = page.arena;
|
||||
var u = try url(self, page);
|
||||
pub fn set_hostname(self: *parser.Anchor, v: []const u8, state: *SessionState) !void {
|
||||
const arena = state.arena;
|
||||
var u = try url(self, state);
|
||||
u.uri.host = .{ .raw = v };
|
||||
const href = try u.toString(arena);
|
||||
try parser.anchorSetHref(self, href);
|
||||
}
|
||||
|
||||
// TODO return a disposable string
|
||||
pub fn get_port(self: *parser.Anchor, page: *Page) ![]const u8 {
|
||||
var u = try url(self, page);
|
||||
return try u.get_port(page);
|
||||
pub fn get_port(self: *parser.Anchor, state: *SessionState) ![]const u8 {
|
||||
var u = try url(self, state);
|
||||
return try u.get_port(state);
|
||||
}
|
||||
|
||||
pub fn set_port(self: *parser.Anchor, v: ?[]const u8, page: *Page) !void {
|
||||
const arena = page.arena;
|
||||
var u = try url(self, page);
|
||||
pub fn set_port(self: *parser.Anchor, v: ?[]const u8, state: *SessionState) !void {
|
||||
const arena = state.arena;
|
||||
var u = try url(self, state);
|
||||
|
||||
if (v != null and v.?.len > 0) {
|
||||
u.uri.port = try std.fmt.parseInt(u16, v.?, 10);
|
||||
@@ -337,14 +325,14 @@ pub const HTMLAnchorElement = struct {
|
||||
}
|
||||
|
||||
// TODO return a disposable string
|
||||
pub fn get_username(self: *parser.Anchor, page: *Page) ![]const u8 {
|
||||
var u = try url(self, page);
|
||||
return u.get_username();
|
||||
pub fn get_username(self: *parser.Anchor, state: *SessionState) ![]const u8 {
|
||||
var u = try url(self, state);
|
||||
return try state.arena.dupe(u8, u.get_username());
|
||||
}
|
||||
|
||||
pub fn set_username(self: *parser.Anchor, v: ?[]const u8, page: *Page) !void {
|
||||
const arena = page.arena;
|
||||
var u = try url(self, page);
|
||||
pub fn set_username(self: *parser.Anchor, v: ?[]const u8, state: *SessionState) !void {
|
||||
const arena = state.arena;
|
||||
var u = try url(self, state);
|
||||
|
||||
if (v) |vv| {
|
||||
u.uri.user = .{ .raw = vv };
|
||||
@@ -357,14 +345,14 @@ pub const HTMLAnchorElement = struct {
|
||||
}
|
||||
|
||||
// TODO return a disposable string
|
||||
pub fn get_password(self: *parser.Anchor, page: *Page) ![]const u8 {
|
||||
var u = try url(self, page);
|
||||
return try page.arena.dupe(u8, u.get_password());
|
||||
pub fn get_password(self: *parser.Anchor, state: *SessionState) ![]const u8 {
|
||||
var u = try url(self, state);
|
||||
return try state.arena.dupe(u8, u.get_password());
|
||||
}
|
||||
|
||||
pub fn set_password(self: *parser.Anchor, v: ?[]const u8, page: *Page) !void {
|
||||
const arena = page.arena;
|
||||
var u = try url(self, page);
|
||||
pub fn set_password(self: *parser.Anchor, v: ?[]const u8, state: *SessionState) !void {
|
||||
const arena = state.arena;
|
||||
var u = try url(self, state);
|
||||
|
||||
if (v) |vv| {
|
||||
u.uri.password = .{ .raw = vv };
|
||||
@@ -377,42 +365,49 @@ pub const HTMLAnchorElement = struct {
|
||||
}
|
||||
|
||||
// TODO return a disposable string
|
||||
pub fn get_pathname(self: *parser.Anchor, page: *Page) ![]const u8 {
|
||||
var u = try url(self, page);
|
||||
return u.get_pathname();
|
||||
pub fn get_pathname(self: *parser.Anchor, state: *SessionState) ![]const u8 {
|
||||
var u = try url(self, state);
|
||||
return try state.arena.dupe(u8, u.get_pathname());
|
||||
}
|
||||
|
||||
pub fn set_pathname(self: *parser.Anchor, v: []const u8, page: *Page) !void {
|
||||
const arena = page.arena;
|
||||
var u = try url(self, page);
|
||||
pub fn set_pathname(self: *parser.Anchor, v: []const u8, state: *SessionState) !void {
|
||||
const arena = state.arena;
|
||||
var u = try url(self, state);
|
||||
u.uri.path = .{ .raw = v };
|
||||
const href = try u.toString(arena);
|
||||
|
||||
try parser.anchorSetHref(self, href);
|
||||
}
|
||||
|
||||
pub fn get_search(self: *parser.Anchor, page: *Page) ![]const u8 {
|
||||
var u = try url(self, page);
|
||||
return try u.get_search(page);
|
||||
// TODO return a disposable string
|
||||
pub fn get_search(self: *parser.Anchor, state: *SessionState) ![]const u8 {
|
||||
var u = try url(self, state);
|
||||
return try u.get_search(state);
|
||||
}
|
||||
|
||||
pub fn set_search(self: *parser.Anchor, v: ?[]const u8, page: *Page) !void {
|
||||
var u = try url(self, page);
|
||||
try u.set_search(v, page);
|
||||
pub fn set_search(self: *parser.Anchor, v: ?[]const u8, state: *SessionState) !void {
|
||||
const arena = state.arena;
|
||||
var u = try url(self, state);
|
||||
|
||||
if (v) |vv| {
|
||||
u.uri.query = .{ .raw = vv };
|
||||
} else {
|
||||
u.uri.query = null;
|
||||
}
|
||||
const href = try u.toString(arena);
|
||||
|
||||
const href = try u.toString(page.call_arena);
|
||||
try parser.anchorSetHref(self, href);
|
||||
}
|
||||
|
||||
// TODO return a disposable string
|
||||
pub fn get_hash(self: *parser.Anchor, page: *Page) ![]const u8 {
|
||||
var u = try url(self, page);
|
||||
return try u.get_hash(page);
|
||||
pub fn get_hash(self: *parser.Anchor, state: *SessionState) ![]const u8 {
|
||||
var u = try url(self, state);
|
||||
return try u.get_hash(state);
|
||||
}
|
||||
|
||||
pub fn set_hash(self: *parser.Anchor, v: ?[]const u8, page: *Page) !void {
|
||||
const arena = page.arena;
|
||||
var u = try url(self, page);
|
||||
pub fn set_hash(self: *parser.Anchor, v: ?[]const u8, state: *SessionState) !void {
|
||||
const arena = state.arena;
|
||||
var u = try url(self, state);
|
||||
|
||||
if (v) |vv| {
|
||||
u.uri.fragment = .{ .raw = vv };
|
||||
@@ -573,6 +568,15 @@ pub const HTMLImageElement = struct {
|
||||
pub const Self = parser.Image;
|
||||
pub const prototype = *HTMLElement;
|
||||
pub const subtype = .node;
|
||||
pub const js_name = "Image";
|
||||
|
||||
pub fn constructor(width: ?u32, height: ?u32, state: *const SessionState) !*parser.Image {
|
||||
const element = try parser.documentCreateElement(parser.documentHTMLToDocument(state.window.document), "img");
|
||||
const image: *parser.Image = @ptrCast(element);
|
||||
if (width) |width_| try parser.imageSetWidth(image, width_);
|
||||
if (height) |height_| try parser.imageSetHeight(image, height_);
|
||||
return image;
|
||||
}
|
||||
|
||||
pub fn get_alt(self: *parser.Image) ![]const u8 {
|
||||
return try parser.imageGetAlt(self);
|
||||
@@ -610,200 +614,12 @@ pub const HTMLImageElement = struct {
|
||||
pub fn set_isMap(self: *parser.Image, is_map: bool) !void {
|
||||
try parser.imageSetIsMap(self, is_map);
|
||||
}
|
||||
|
||||
pub const Factory = struct {
|
||||
pub const js_name = "Image";
|
||||
pub const subtype = .node;
|
||||
pub const js_legacy_factory = true;
|
||||
pub const prototype = *HTMLImageElement;
|
||||
|
||||
pub fn constructor(width: ?u32, height: ?u32, page: *const Page) !*parser.Image {
|
||||
const element = try parser.documentCreateElement(parser.documentHTMLToDocument(page.window.document), "img");
|
||||
const image: *parser.Image = @ptrCast(element);
|
||||
if (width) |width_| try parser.imageSetWidth(image, width_);
|
||||
if (height) |height_| try parser.imageSetHeight(image, height_);
|
||||
return image;
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
pub fn createElement(params: [*c]parser.c.dom_html_element_create_params, elem: [*c][*c]parser.ElementHTML) callconv(.c) parser.c.dom_exception {
|
||||
const p: *parser.c.dom_html_element_create_params = @ptrCast(params);
|
||||
switch (p.type) {
|
||||
parser.c.DOM_HTML_ELEMENT_TYPE_INPUT => {
|
||||
return HTMLInputElement.dom_create(params, elem);
|
||||
},
|
||||
else => return parser.c.DOM_NO_ERR,
|
||||
}
|
||||
}
|
||||
|
||||
var input_protected_vtable: parser.c.dom_element_protected_vtable = .{
|
||||
.base = .{
|
||||
.destroy = HTMLInputElement.node_destroy,
|
||||
.copy = HTMLInputElement.node_copy,
|
||||
},
|
||||
.dom_element_parse_attribute = HTMLInputElement.element_parse_attribute,
|
||||
};
|
||||
|
||||
pub const HTMLInputElement = struct {
|
||||
pub const Self = parser.Input;
|
||||
pub const prototype = *HTMLElement;
|
||||
pub const subtype = .node;
|
||||
|
||||
base: parser.ElementHTML,
|
||||
|
||||
type: []const u8 = "text",
|
||||
|
||||
pub fn dom_create(params: *parser.c.dom_html_element_create_params, output: *?*parser.ElementHTML) parser.c.dom_exception {
|
||||
var self = parser.ARENA.?.create(HTMLInputElement) catch return parser.c.DOM_NO_MEM_ERR;
|
||||
output.* = &self.base; // Self can be recovered using @fieldParentPtr
|
||||
|
||||
self.base.base.base.base.vtable = &parser.c._dom_html_element_vtable; // TODO replace get/setAttribute
|
||||
self.base.base.base.vtable = &input_protected_vtable;
|
||||
|
||||
return self.dom_initialise(params);
|
||||
}
|
||||
// Initialise is separated from create such that the leaf type sets the vtable, then calls all the way up the protochain to init
|
||||
pub fn dom_initialise(self: *HTMLInputElement, params: *parser.c.dom_html_element_create_params) parser.c.dom_exception {
|
||||
return parser.c._dom_html_element_initialise(params, &self.base);
|
||||
}
|
||||
|
||||
// This should always be the same and we should not have cleanup for new zig implementation, hopefully
|
||||
pub fn node_destroy(node: [*c]parser.Node) callconv(.c) void {
|
||||
const elem = parser.nodeToHtmlElement(node);
|
||||
parser.c._dom_html_element_finalise(elem);
|
||||
}
|
||||
|
||||
pub fn node_copy(old: [*c]parser.Node, new: [*c][*c]parser.Node) callconv(.c) parser.c.dom_exception {
|
||||
const old_elem = parser.nodeToHtmlElement(old);
|
||||
const self = @as(*HTMLInputElement, @fieldParentPtr("base", old_elem));
|
||||
|
||||
var copy = parser.ARENA.?.create(HTMLInputElement) catch return parser.c.DOM_NO_MEM_ERR;
|
||||
copy.type = self.type;
|
||||
|
||||
const err = parser.c._dom_html_element_copy_internal(old_elem, ©.base);
|
||||
if (err != parser.c.DOM_NO_ERR) {
|
||||
return err;
|
||||
}
|
||||
|
||||
new.* = @ptrCast(copy);
|
||||
return parser.c.DOM_NO_ERR;
|
||||
}
|
||||
|
||||
// fn ([*c]cimport.struct_dom_element, [*c]cimport.struct_dom_string, [*c]cimport.struct_dom_string, [*c][*c]cimport.struct_dom_string) callconv(.c) c_uint
|
||||
pub fn element_parse_attribute(self: [*c]parser.Element, name: [*c]parser.c.dom_string, value: [*c]parser.c.dom_string, parsed: [*c][*c]parser.c.dom_string) callconv(.c) parser.c.dom_exception {
|
||||
_ = name;
|
||||
_ = self;
|
||||
parsed.* = value;
|
||||
_ = parser.c.dom_string_ref(value);
|
||||
|
||||
// TODO actual implementation
|
||||
// Probably should not use this and instead override the getAttribute setAttribute Element methods directly, perhaps other related functions.
|
||||
|
||||
// handle defaultValue likes
|
||||
// Call setter or store in general attribute store
|
||||
// increment domstring ref?
|
||||
return parser.c.DOM_NO_ERR;
|
||||
}
|
||||
|
||||
pub fn get_defaultValue(self: *parser.Input) ![]const u8 {
|
||||
return try parser.inputGetDefaultValue(self);
|
||||
}
|
||||
pub fn set_defaultValue(self: *parser.Input, default_value: []const u8) !void {
|
||||
try parser.inputSetDefaultValue(self, default_value);
|
||||
}
|
||||
pub fn get_defaultChecked(self: *parser.Input) !bool {
|
||||
return try parser.inputGetDefaultChecked(self);
|
||||
}
|
||||
pub fn set_defaultChecked(self: *parser.Input, default_checked: bool) !void {
|
||||
try parser.inputSetDefaultChecked(self, default_checked);
|
||||
}
|
||||
pub fn get_form(self: *parser.Input) !?*parser.Form {
|
||||
return try parser.inputGetForm(self);
|
||||
}
|
||||
pub fn get_accept(self: *parser.Input) ![]const u8 {
|
||||
return try parser.inputGetAccept(self);
|
||||
}
|
||||
pub fn set_accept(self: *parser.Input, accept: []const u8) !void {
|
||||
try parser.inputSetAccept(self, accept);
|
||||
}
|
||||
pub fn get_alt(self: *parser.Input) ![]const u8 {
|
||||
return try parser.inputGetAlt(self);
|
||||
}
|
||||
pub fn set_alt(self: *parser.Input, alt: []const u8) !void {
|
||||
try parser.inputSetAlt(self, alt);
|
||||
}
|
||||
pub fn get_checked(self: *parser.Input) !bool {
|
||||
return try parser.inputGetChecked(self);
|
||||
}
|
||||
pub fn set_checked(self: *parser.Input, checked: bool) !void {
|
||||
try parser.inputSetChecked(self, checked);
|
||||
}
|
||||
pub fn get_disabled(self: *parser.Input) !bool {
|
||||
return try parser.inputGetDisabled(self);
|
||||
}
|
||||
pub fn set_disabled(self: *parser.Input, disabled: bool) !void {
|
||||
try parser.inputSetDisabled(self, disabled);
|
||||
}
|
||||
pub fn get_maxLength(self: *parser.Input) !i32 {
|
||||
return try parser.inputGetMaxLength(self);
|
||||
}
|
||||
pub fn set_maxLength(self: *parser.Input, max_length: i32) !void {
|
||||
try parser.inputSetMaxLength(self, max_length);
|
||||
}
|
||||
pub fn get_name(self: *parser.Input) ![]const u8 {
|
||||
return try parser.inputGetName(self);
|
||||
}
|
||||
pub fn set_name(self: *parser.Input, name: []const u8) !void {
|
||||
try parser.inputSetName(self, name);
|
||||
}
|
||||
pub fn get_readOnly(self: *parser.Input) !bool {
|
||||
return try parser.inputGetReadOnly(self);
|
||||
}
|
||||
pub fn set_readOnly(self: *parser.Input, read_only: bool) !void {
|
||||
try parser.inputSetReadOnly(self, read_only);
|
||||
}
|
||||
pub fn get_size(self: *parser.Input) !u32 {
|
||||
return try parser.inputGetSize(self);
|
||||
}
|
||||
pub fn set_size(self: *parser.Input, size: i32) !void {
|
||||
try parser.inputSetSize(self, size);
|
||||
}
|
||||
pub fn get_src(self: *parser.Input) ![]const u8 {
|
||||
return try parser.inputGetSrc(self);
|
||||
}
|
||||
pub fn set_src(self: *parser.Input, src: []const u8, page: *Page) !void {
|
||||
const new_src = try urlStitch(page.call_arena, src, page.url.raw, .{ .alloc = .if_needed });
|
||||
try parser.inputSetSrc(self, new_src);
|
||||
}
|
||||
pub fn get_type(self: *parser.Input) ![]const u8 {
|
||||
const elem = parser.nodeToHtmlElement(@alignCast(@ptrCast(self)));
|
||||
const input = @as(*HTMLInputElement, @fieldParentPtr("base", elem));
|
||||
|
||||
return input.type;
|
||||
}
|
||||
pub fn set_type(self: *parser.Input, type_: []const u8) !void {
|
||||
const elem = parser.nodeToHtmlElement(@alignCast(@ptrCast(self)));
|
||||
const input = @as(*HTMLInputElement, @fieldParentPtr("base", elem));
|
||||
|
||||
const possible_values = [_][]const u8{ "text", "search", "tel", "url", "email", "password", "date", "month", "week", "time", "datetime-local", "number", "range", "color", "checkbox", "radio", "file", "hidden", "image", "button", "submit", "reset" };
|
||||
var found = false;
|
||||
for (possible_values) |item| {
|
||||
if (std.mem.eql(u8, type_, item)) {
|
||||
found = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
input.type = if (found) type_ else "text";
|
||||
|
||||
// TODO DOM events
|
||||
}
|
||||
pub fn get_value(self: *parser.Input) ![]const u8 {
|
||||
return try parser.inputGetValue(self);
|
||||
}
|
||||
pub fn set_value(self: *parser.Input, value: []const u8) !void {
|
||||
try parser.inputSetValue(self, value);
|
||||
}
|
||||
};
|
||||
|
||||
pub const HTMLLIElement = struct {
|
||||
@@ -1034,26 +850,6 @@ pub const HTMLScriptElement = struct {
|
||||
|
||||
return try parser.elementRemoveAttribute(parser.scriptToElt(self), "nomodule");
|
||||
}
|
||||
|
||||
pub fn get_onload(self: *parser.Script, page: *Page) !?Env.Function {
|
||||
const state = page.getNodeState(@alignCast(@ptrCast(self))) orelse return null;
|
||||
return state.onload;
|
||||
}
|
||||
|
||||
pub fn set_onload(self: *parser.Script, function: ?Env.Function, page: *Page) !void {
|
||||
const state = try page.getOrCreateNodeState(@alignCast(@ptrCast(self)));
|
||||
state.onload = function;
|
||||
}
|
||||
|
||||
pub fn get_onerror(self: *parser.Script, page: *Page) !?Env.Function {
|
||||
const state = page.getNodeState(@alignCast(@ptrCast(self))) orelse return null;
|
||||
return state.onerror;
|
||||
}
|
||||
|
||||
pub fn set_onerror(self: *parser.Script, function: ?Env.Function, page: *Page) !void {
|
||||
const state = try page.getOrCreateNodeState(@alignCast(@ptrCast(self)));
|
||||
state.onerror = function;
|
||||
}
|
||||
};
|
||||
|
||||
pub const HTMLSourceElement = struct {
|
||||
@@ -1232,62 +1028,62 @@ test "Browser.HTML.Element" {
|
||||
defer runner.deinit();
|
||||
|
||||
try runner.testCases(&.{
|
||||
.{ "let link = document.getElementById('link')", "undefined" },
|
||||
.{ "link.target", "" },
|
||||
.{ "link.target = '_blank'", "_blank" },
|
||||
.{ "link.target", "_blank" },
|
||||
.{ "link.target = ''", "" },
|
||||
.{ "let a = document.getElementById('link')", "undefined" },
|
||||
.{ "a.target", "" },
|
||||
.{ "a.target = '_blank'", "_blank" },
|
||||
.{ "a.target", "_blank" },
|
||||
.{ "a.target = ''", "" },
|
||||
|
||||
.{ "link.href", "foo" },
|
||||
.{ "link.href = 'https://lightpanda.io/'", "https://lightpanda.io/" },
|
||||
.{ "link.href", "https://lightpanda.io/" },
|
||||
.{ "a.href", "foo" },
|
||||
.{ "a.href = 'https://lightpanda.io/'", "https://lightpanda.io/" },
|
||||
.{ "a.href", "https://lightpanda.io/" },
|
||||
|
||||
.{ "link.origin", "https://lightpanda.io" },
|
||||
.{ "a.origin", "https://lightpanda.io" },
|
||||
|
||||
.{ "link.host = 'lightpanda.io:443'", "lightpanda.io:443" },
|
||||
.{ "link.host", "lightpanda.io:443" },
|
||||
.{ "link.port", "443" },
|
||||
.{ "link.hostname", "lightpanda.io" },
|
||||
.{ "a.host = 'lightpanda.io:443'", "lightpanda.io:443" },
|
||||
.{ "a.host", "lightpanda.io:443" },
|
||||
.{ "a.port", "443" },
|
||||
.{ "a.hostname", "lightpanda.io" },
|
||||
|
||||
.{ "link.host = 'lightpanda.io'", "lightpanda.io" },
|
||||
.{ "link.host", "lightpanda.io" },
|
||||
.{ "link.port", "" },
|
||||
.{ "link.hostname", "lightpanda.io" },
|
||||
.{ "a.host = 'lightpanda.io'", "lightpanda.io" },
|
||||
.{ "a.host", "lightpanda.io" },
|
||||
.{ "a.port", "" },
|
||||
.{ "a.hostname", "lightpanda.io" },
|
||||
|
||||
.{ "link.host", "lightpanda.io" },
|
||||
.{ "link.hostname", "lightpanda.io" },
|
||||
.{ "link.hostname = 'foo.bar'", "foo.bar" },
|
||||
.{ "link.href", "https://foo.bar/" },
|
||||
.{ "a.host", "lightpanda.io" },
|
||||
.{ "a.hostname", "lightpanda.io" },
|
||||
.{ "a.hostname = 'foo.bar'", "foo.bar" },
|
||||
.{ "a.href", "https://foo.bar/" },
|
||||
|
||||
.{ "link.search", "" },
|
||||
.{ "link.search = 'q=bar'", "q=bar" },
|
||||
.{ "link.search", "?q=bar" },
|
||||
.{ "link.href", "https://foo.bar/?q=bar" },
|
||||
.{ "a.search", "" },
|
||||
.{ "a.search = 'q=bar'", "q=bar" },
|
||||
.{ "a.search", "?q=bar" },
|
||||
.{ "a.href", "https://foo.bar/?q=bar" },
|
||||
|
||||
.{ "link.hash", "" },
|
||||
.{ "link.hash = 'frag'", "frag" },
|
||||
.{ "link.hash", "#frag" },
|
||||
.{ "link.href", "https://foo.bar/?q=bar#frag" },
|
||||
.{ "a.hash", "" },
|
||||
.{ "a.hash = 'frag'", "frag" },
|
||||
.{ "a.hash", "#frag" },
|
||||
.{ "a.href", "https://foo.bar/?q=bar#frag" },
|
||||
|
||||
.{ "link.port", "" },
|
||||
.{ "link.port = '443'", "443" },
|
||||
.{ "link.host", "foo.bar:443" },
|
||||
.{ "link.hostname", "foo.bar" },
|
||||
.{ "link.href", "https://foo.bar:443/?q=bar#frag" },
|
||||
.{ "link.port = null", "null" },
|
||||
.{ "link.href", "https://foo.bar/?q=bar#frag" },
|
||||
.{ "a.port", "" },
|
||||
.{ "a.port = '443'", "443" },
|
||||
.{ "a.host", "foo.bar:443" },
|
||||
.{ "a.hostname", "foo.bar" },
|
||||
.{ "a.href", "https://foo.bar:443/?q=bar#frag" },
|
||||
.{ "a.port = null", "null" },
|
||||
.{ "a.href", "https://foo.bar/?q=bar#frag" },
|
||||
|
||||
.{ "link.href = 'foo'", "foo" },
|
||||
.{ "a.href = 'foo'", "foo" },
|
||||
|
||||
.{ "link.type", "" },
|
||||
.{ "link.type = 'text/html'", "text/html" },
|
||||
.{ "link.type", "text/html" },
|
||||
.{ "link.type = ''", "" },
|
||||
.{ "a.type", "" },
|
||||
.{ "a.type = 'text/html'", "text/html" },
|
||||
.{ "a.type", "text/html" },
|
||||
.{ "a.type = ''", "" },
|
||||
|
||||
.{ "link.text", "OK" },
|
||||
.{ "link.text = 'foo'", "foo" },
|
||||
.{ "link.text", "foo" },
|
||||
.{ "link.text = 'OK'", "OK" },
|
||||
.{ "a.text", "OK" },
|
||||
.{ "a.text = 'foo'", "foo" },
|
||||
.{ "a.text", "foo" },
|
||||
.{ "a.text = 'OK'", "OK" },
|
||||
}, .{});
|
||||
|
||||
try runner.testCases(&.{
|
||||
@@ -1350,151 +1146,4 @@ test "Browser.HTML.Element" {
|
||||
.{ "lyric.src = 15", "15" },
|
||||
.{ "lyric.src", "15" },
|
||||
}, .{});
|
||||
|
||||
try runner.testCases(&.{
|
||||
.{ "let a = document.createElement('a');", null },
|
||||
.{ "a.href = 'about'", null },
|
||||
.{ "a.href", "https://lightpanda.io/opensource-browser/about" },
|
||||
}, .{});
|
||||
|
||||
// detached node cannot be focused
|
||||
try runner.testCases(&.{
|
||||
.{ "const focused = document.activeElement", null },
|
||||
.{ "document.createElement('a').focus()", null },
|
||||
.{ "document.activeElement === focused", "true" },
|
||||
}, .{});
|
||||
}
|
||||
test "Browser.HTML.HtmlInputElement.propeties" {
|
||||
var runner = try testing.jsRunner(testing.tracking_allocator, .{ .url = "https://lightpanda.io/noslashattheend" });
|
||||
defer runner.deinit();
|
||||
var alloc = std.heap.ArenaAllocator.init(runner.app.allocator);
|
||||
defer alloc.deinit();
|
||||
const arena = alloc.allocator();
|
||||
|
||||
try runner.testCases(&.{.{ "let elem_input = document.createElement('input')", null }}, .{});
|
||||
|
||||
try runner.testCases(&.{.{ "elem_input.form", "null" }}, .{}); // Initial value
|
||||
// Valid input.form is tested separately :Browser.HTML.HtmlInputElement.propeties.form
|
||||
try testProperty(arena, &runner, "elem_input.form", "null", &.{.{ .input = "'foo'" }}); // Invalid
|
||||
|
||||
try runner.testCases(&.{.{ "elem_input.accept", "" }}, .{}); // Initial value
|
||||
try testProperty(arena, &runner, "elem_input.accept", null, &str_valids); // Valid
|
||||
|
||||
try runner.testCases(&.{.{ "elem_input.alt", "" }}, .{}); // Initial value
|
||||
try testProperty(arena, &runner, "elem_input.alt", null, &str_valids); // Valid
|
||||
|
||||
try runner.testCases(&.{.{ "elem_input.disabled", "false" }}, .{}); // Initial value
|
||||
try testProperty(arena, &runner, "elem_input.disabled", null, &bool_valids); // Valid
|
||||
|
||||
try runner.testCases(&.{.{ "elem_input.maxLength", "-1" }}, .{}); // Initial value
|
||||
try testProperty(arena, &runner, "elem_input.maxLength", null, &.{.{ .input = "5" }}); // Valid
|
||||
try testProperty(arena, &runner, "elem_input.maxLength", "0", &.{.{ .input = "'banana'" }}); // Invalid
|
||||
try runner.testCases(&.{.{ "try { elem_input.maxLength = -45 } catch(e) {e}", "Error: NegativeValueNotAllowed" }}, .{}); // Error
|
||||
|
||||
try runner.testCases(&.{.{ "elem_input.name", "" }}, .{}); // Initial value
|
||||
try testProperty(arena, &runner, "elem_input.name", null, &str_valids); // Valid
|
||||
|
||||
try runner.testCases(&.{.{ "elem_input.readOnly", "false" }}, .{}); // Initial value
|
||||
try testProperty(arena, &runner, "elem_input.readOnly", null, &bool_valids); // Valid
|
||||
|
||||
try runner.testCases(&.{.{ "elem_input.size", "20" }}, .{}); // Initial value
|
||||
try testProperty(arena, &runner, "elem_input.size", null, &.{.{ .input = "5" }}); // Valid
|
||||
try testProperty(arena, &runner, "elem_input.size", "20", &.{.{ .input = "-26" }}); // Invalid
|
||||
try runner.testCases(&.{.{ "try { elem_input.size = 0 } catch(e) {e}", "Error: ZeroNotAllowed" }}, .{}); // Error
|
||||
try runner.testCases(&.{.{ "try { elem_input.size = 'banana' } catch(e) {e}", "Error: ZeroNotAllowed" }}, .{}); // Error
|
||||
|
||||
try runner.testCases(&.{.{ "elem_input.src", "" }}, .{}); // Initial value
|
||||
try testProperty(arena, &runner, "elem_input.src", null, &.{
|
||||
.{ .input = "'foo'", .expected = "https://lightpanda.io/foo" }, // TODO stitch should work with spaces -> %20
|
||||
.{ .input = "-3", .expected = "https://lightpanda.io/-3" },
|
||||
.{ .input = "''", .expected = "https://lightpanda.io/noslashattheend" },
|
||||
});
|
||||
|
||||
try runner.testCases(&.{.{ "elem_input.type", "text" }}, .{}); // Initial value
|
||||
try testProperty(arena, &runner, "elem_input.type", null, &.{.{ .input = "'checkbox'", .expected = "checkbox" }}); // Valid
|
||||
try testProperty(arena, &runner, "elem_input.type", "text", &.{.{ .input = "'5'" }}); // Invalid
|
||||
|
||||
// Properties that are related
|
||||
try runner.testCases(&.{
|
||||
.{ "let input_checked = document.createElement('input')", null },
|
||||
.{ "input_checked.defaultChecked", "false" },
|
||||
.{ "input_checked.checked", "false" },
|
||||
|
||||
.{ "input_checked.defaultChecked = true", "true" },
|
||||
.{ "input_checked.defaultChecked", "true" },
|
||||
.{ "input_checked.checked", "true" }, // Also perceived as true
|
||||
|
||||
.{ "input_checked.checked = false", "false" },
|
||||
.{ "input_checked.defaultChecked", "true" },
|
||||
.{ "input_checked.checked", "false" },
|
||||
|
||||
.{ "input_checked.defaultChecked = true", "true" },
|
||||
.{ "input_checked.checked", "false" }, // Still false
|
||||
}, .{});
|
||||
try runner.testCases(&.{
|
||||
.{ "let input_value = document.createElement('input')", null },
|
||||
.{ "input_value.defaultValue", "" },
|
||||
.{ "input_value.value", "" },
|
||||
|
||||
.{ "input_value.defaultValue = 3.1", "3.1" },
|
||||
.{ "input_value.defaultValue", "3.1" },
|
||||
.{ "input_value.value", "3.1" }, // Also perceived as 3.1
|
||||
|
||||
.{ "input_value.value = 'mango'", "mango" },
|
||||
.{ "input_value.defaultValue", "3.1" },
|
||||
.{ "input_value.value", "mango" },
|
||||
|
||||
.{ "input_value.defaultValue = true", "true" },
|
||||
.{ "input_value.value", "mango" }, // Still mango
|
||||
}, .{});
|
||||
}
|
||||
test "Browser.HTML.HtmlInputElement.propeties.form" {
|
||||
var runner = try testing.jsRunner(testing.tracking_allocator, .{ .html =
|
||||
\\ <form action="test.php" target="_blank">
|
||||
\\ <p>
|
||||
\\ <label>First name: <input type="text" name="first-name" /></label>
|
||||
\\ </p>
|
||||
\\ </form>
|
||||
});
|
||||
defer runner.deinit();
|
||||
|
||||
try runner.testCases(&.{
|
||||
.{ "let elem_input = document.querySelector('input')", null },
|
||||
.{ "elem_input.form", "[object HTMLFormElement]" }, // Initial value
|
||||
.{ "elem_input.form = 'foo'", null },
|
||||
.{ "elem_input.form", "[object HTMLFormElement]" }, // Invalid
|
||||
}, .{});
|
||||
}
|
||||
|
||||
const Check = struct {
|
||||
input: []const u8,
|
||||
expected: ?[]const u8 = null, // Needed when input != expected
|
||||
};
|
||||
const bool_valids = [_]Check{
|
||||
.{ .input = "true" },
|
||||
.{ .input = "''", .expected = "false" },
|
||||
.{ .input = "13.5", .expected = "true" },
|
||||
};
|
||||
const str_valids = [_]Check{
|
||||
.{ .input = "'foo'", .expected = "foo" },
|
||||
.{ .input = "5", .expected = "5" },
|
||||
.{ .input = "''", .expected = "" },
|
||||
.{ .input = "document", .expected = "[object HTMLDocument]" },
|
||||
};
|
||||
|
||||
// .{ "elem.type = '5'", "5" },
|
||||
// .{ "elem.type", "text" },
|
||||
fn testProperty(
|
||||
arena: std.mem.Allocator,
|
||||
runner: *testing.JsRunner,
|
||||
elem_dot_prop: []const u8,
|
||||
always: ?[]const u8, // Ignores checks' expected if set
|
||||
checks: []const Check,
|
||||
) !void {
|
||||
for (checks) |check| {
|
||||
try runner.testCases(&.{
|
||||
.{ try std.mem.concat(arena, u8, &.{ elem_dot_prop, " = ", check.input }), null },
|
||||
.{ elem_dot_prop, always orelse check.expected orelse check.input },
|
||||
}, .{});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -19,7 +19,6 @@ const std = @import("std");
|
||||
const Allocator = std.mem.Allocator;
|
||||
|
||||
const parser = @import("../netsurf.zig");
|
||||
const Page = @import("../page.zig").Page;
|
||||
const HTMLElement = @import("elements.zig").HTMLElement;
|
||||
const FormData = @import("../xhr/form_data.zig").FormData;
|
||||
|
||||
@@ -28,11 +27,28 @@ pub const HTMLFormElement = struct {
|
||||
pub const prototype = *HTMLElement;
|
||||
pub const subtype = .node;
|
||||
|
||||
pub fn _submit(self: *parser.Form, page: *Page) !void {
|
||||
return page.submitForm(self, null);
|
||||
pub fn _requestSubmit(self: *parser.Form) !void {
|
||||
try parser.formElementSubmit(self);
|
||||
}
|
||||
|
||||
pub fn _reset(self: *parser.Form) !void {
|
||||
try parser.formElementReset(self);
|
||||
}
|
||||
};
|
||||
|
||||
pub const Submission = struct {
|
||||
method: ?[]const u8,
|
||||
form_data: FormData,
|
||||
};
|
||||
|
||||
pub fn processSubmission(arena: Allocator, form: *parser.Form) !?Submission {
|
||||
const form_element: *parser.Element = @ptrCast(form);
|
||||
const method = try parser.elementGetAttribute(form_element, "method");
|
||||
|
||||
return .{
|
||||
.method = method,
|
||||
.form_data = try FormData.fromForm(arena, form),
|
||||
};
|
||||
}
|
||||
|
||||
// Check xhr/form_data.zig for tests
|
||||
|
||||
@@ -25,6 +25,7 @@ const History = @import("history.zig").History;
|
||||
const Location = @import("location.zig").Location;
|
||||
const MediaQueryList = @import("media_query_list.zig").MediaQueryList;
|
||||
const Performance = @import("performance.zig").Performance;
|
||||
const TrustedTypes = @import("trusted_types.zig");
|
||||
|
||||
pub const Interfaces = .{
|
||||
HTMLDocument,
|
||||
@@ -38,4 +39,5 @@ pub const Interfaces = .{
|
||||
Location,
|
||||
MediaQueryList,
|
||||
Performance,
|
||||
TrustedTypes.Interfaces,
|
||||
};
|
||||
|
||||
@@ -16,7 +16,7 @@
|
||||
// You should have received a copy of the GNU Affero General Public License
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
const Page = @import("../page.zig").Page;
|
||||
const SessionState = @import("../env.zig").SessionState;
|
||||
|
||||
const URL = @import("../url/url.zig").URL;
|
||||
|
||||
@@ -24,18 +24,18 @@ const URL = @import("../url/url.zig").URL;
|
||||
pub const Location = struct {
|
||||
url: ?URL = null,
|
||||
|
||||
pub fn get_href(self: *Location, page: *Page) ![]const u8 {
|
||||
if (self.url) |*u| return u.get_href(page);
|
||||
pub fn get_href(self: *Location, state: *SessionState) ![]const u8 {
|
||||
if (self.url) |*u| return u.get_href(state);
|
||||
return "";
|
||||
}
|
||||
|
||||
pub fn get_protocol(self: *Location, page: *Page) ![]const u8 {
|
||||
if (self.url) |*u| return u.get_protocol(page);
|
||||
pub fn get_protocol(self: *Location, state: *SessionState) ![]const u8 {
|
||||
if (self.url) |*u| return u.get_protocol(state);
|
||||
return "";
|
||||
}
|
||||
|
||||
pub fn get_host(self: *Location, page: *Page) ![]const u8 {
|
||||
if (self.url) |*u| return u.get_host(page);
|
||||
pub fn get_host(self: *Location, state: *SessionState) ![]const u8 {
|
||||
if (self.url) |*u| return u.get_host(state);
|
||||
return "";
|
||||
}
|
||||
|
||||
@@ -44,8 +44,8 @@ pub const Location = struct {
|
||||
return "";
|
||||
}
|
||||
|
||||
pub fn get_port(self: *Location, page: *Page) ![]const u8 {
|
||||
if (self.url) |*u| return u.get_port(page);
|
||||
pub fn get_port(self: *Location, state: *SessionState) ![]const u8 {
|
||||
if (self.url) |*u| return u.get_port(state);
|
||||
return "";
|
||||
}
|
||||
|
||||
@@ -54,35 +54,36 @@ pub const Location = struct {
|
||||
return "";
|
||||
}
|
||||
|
||||
pub fn get_search(self: *Location, page: *Page) ![]const u8 {
|
||||
if (self.url) |*u| return u.get_search(page);
|
||||
pub fn get_search(self: *Location, state: *SessionState) ![]const u8 {
|
||||
if (self.url) |*u| return u.get_search(state);
|
||||
return "";
|
||||
}
|
||||
|
||||
pub fn get_hash(self: *Location, page: *Page) ![]const u8 {
|
||||
if (self.url) |*u| return u.get_hash(page);
|
||||
pub fn get_hash(self: *Location, state: *SessionState) ![]const u8 {
|
||||
if (self.url) |*u| return u.get_hash(state);
|
||||
return "";
|
||||
}
|
||||
|
||||
pub fn get_origin(self: *Location, page: *Page) ![]const u8 {
|
||||
if (self.url) |*u| return u.get_origin(page);
|
||||
pub fn get_origin(self: *Location, state: *SessionState) ![]const u8 {
|
||||
if (self.url) |*u| return u.get_origin(state);
|
||||
return "";
|
||||
}
|
||||
|
||||
pub fn _assign(_: *const Location, url: []const u8, page: *Page) !void {
|
||||
return page.navigateFromWebAPI(url, .{ .reason = .script });
|
||||
// TODO
|
||||
pub fn _assign(_: *Location, url: []const u8) !void {
|
||||
_ = url;
|
||||
}
|
||||
|
||||
pub fn _replace(_: *const Location, url: []const u8, page: *Page) !void {
|
||||
return page.navigateFromWebAPI(url, .{ .reason = .script });
|
||||
// TODO
|
||||
pub fn _replace(_: *Location, url: []const u8) !void {
|
||||
_ = url;
|
||||
}
|
||||
|
||||
pub fn _reload(_: *const Location, page: *Page) !void {
|
||||
return page.navigateFromWebAPI(page.url.raw, .{ .reason = .script });
|
||||
}
|
||||
// TODO
|
||||
pub fn _reload(_: *Location) !void {}
|
||||
|
||||
pub fn _toString(self: *Location, page: *Page) ![]const u8 {
|
||||
return try self.get_href(page);
|
||||
pub fn _toString(self: *Location, state: *SessionState) ![]const u8 {
|
||||
return try self.get_href(state);
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
@@ -19,13 +19,27 @@ const std = @import("std");
|
||||
|
||||
const parser = @import("../netsurf.zig");
|
||||
const HTMLElement = @import("elements.zig").HTMLElement;
|
||||
const Page = @import("../page.zig").Page;
|
||||
const SessionState = @import("../env.zig").SessionState;
|
||||
|
||||
pub const HTMLSelectElement = struct {
|
||||
pub const Self = parser.Select;
|
||||
pub const prototype = *HTMLElement;
|
||||
pub const subtype = .node;
|
||||
|
||||
// By default, if no option is explicitly selected, the first option should
|
||||
// be selected. However, libdom doesn't do this, and it sets the
|
||||
// selectedIndex to -1, which is a valid value for "nothing selected".
|
||||
// Therefore, when libdom says the selectedIndex == -1, we don't know if
|
||||
// it means that nothing is selected, or if the first option is selected by
|
||||
// default.
|
||||
// There are cases where this won't work, but when selectedIndex is
|
||||
// explicitly set, we set this boolean flag. Then, when we're getting then
|
||||
// selectedIndex, if this flag is == false, which is to say that if
|
||||
// selectedIndex hasn't been explicitly set AND if we have at least 1 option
|
||||
// AND if it isn't a multi select, we can make the 1st item selected by
|
||||
// default (by returning selectedIndex == 0).
|
||||
explicit_index_set: bool = false,
|
||||
|
||||
pub fn get_length(select: *parser.Select) !u32 {
|
||||
return parser.selectGetLength(select);
|
||||
}
|
||||
@@ -55,12 +69,12 @@ pub const HTMLSelectElement = struct {
|
||||
return parser.selectSetMultiple(select, multiple);
|
||||
}
|
||||
|
||||
pub fn get_selectedIndex(select: *parser.Select, page: *Page) !i32 {
|
||||
const state = try page.getOrCreateNodeState(@alignCast(@ptrCast(select)));
|
||||
pub fn get_selectedIndex(select: *parser.Select, state: *SessionState) !i32 {
|
||||
const self = try state.getOrCreateNodeWrapper(HTMLSelectElement, @ptrCast(select));
|
||||
const selected_index = try parser.selectGetSelectedIndex(select);
|
||||
|
||||
// See the explicit_index_set field documentation
|
||||
if (!state.explicit_index_set) {
|
||||
if (!self.explicit_index_set) {
|
||||
if (selected_index == -1) {
|
||||
if (try parser.selectGetMultiple(select) == false) {
|
||||
if (try get_length(select) > 0) {
|
||||
@@ -74,9 +88,9 @@ pub const HTMLSelectElement = struct {
|
||||
|
||||
// Libdom's dom_html_select_select_set_selected_index will crash if index
|
||||
// is out of range, and it doesn't properly unset options
|
||||
pub fn set_selectedIndex(select: *parser.Select, index: i32, page: *Page) !void {
|
||||
var state = try page.getOrCreateNodeState(@alignCast(@ptrCast(select)));
|
||||
state.explicit_index_set = true;
|
||||
pub fn set_selectedIndex(select: *parser.Select, index: i32, state: *SessionState) !void {
|
||||
var self = try state.getOrCreateNodeWrapper(HTMLSelectElement, @ptrCast(select));
|
||||
self.explicit_index_set = true;
|
||||
|
||||
const options = try parser.selectGetOptions(select);
|
||||
const len = try parser.optionCollectionGetLength(options);
|
||||
|
||||
149
src/browser/html/trusted_types.zig
Normal file
149
src/browser/html/trusted_types.zig
Normal file
@@ -0,0 +1,149 @@
|
||||
// Copyright (C) 2023-2025 Lightpanda (Selecy SAS)
|
||||
//
|
||||
// Francis Bouvier <francis@lightpanda.io>
|
||||
// Pierre Tachoire <pierre@lightpanda.io>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU Affero General Public License as
|
||||
// published by the Free Software Foundation, either version 3 of the
|
||||
// License, or (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU Affero General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU Affero General Public License
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
const std = @import("std");
|
||||
const Allocator = std.mem.Allocator;
|
||||
|
||||
const Env = @import("../env.zig").Env;
|
||||
const SessionState = @import("../env.zig").SessionState;
|
||||
|
||||
const log = std.log.scoped(.trusted_types);
|
||||
|
||||
pub const Interfaces = .{
|
||||
TrustedTypePolicyFactory,
|
||||
TrustedTypePolicy,
|
||||
TrustedTypePolicyOptions,
|
||||
TrustedHTML,
|
||||
TrustedScript,
|
||||
TrustedScriptURL,
|
||||
};
|
||||
|
||||
const TrustedHTML = struct {
|
||||
value: []const u8,
|
||||
|
||||
// TODO _toJSON
|
||||
pub fn _toString(self: *const TrustedHTML) []const u8 {
|
||||
return self.value;
|
||||
}
|
||||
};
|
||||
const TrustedScript = struct {
|
||||
value: []const u8,
|
||||
|
||||
pub fn _toString(self: *const TrustedScript) []const u8 {
|
||||
return self.value;
|
||||
}
|
||||
};
|
||||
const TrustedScriptURL = struct {
|
||||
value: []const u8,
|
||||
|
||||
pub fn _toString(self: *const TrustedScriptURL) []const u8 {
|
||||
return self.value;
|
||||
}
|
||||
};
|
||||
|
||||
// https://developer.mozilla.org/en-US/docs/Web/API/TrustedTypePolicyFactory
|
||||
pub const TrustedTypePolicyFactory = struct {
|
||||
// TBD innerHTML if set the default createHTML should be used when `element.innerHTML = userInput;` does v8 do that for us? Prob not.
|
||||
default_policy: ?TrustedTypePolicy = null, // The default policy, set by creating a policy with the name "default".
|
||||
created_policy_names: std.ArrayListUnmanaged([]const u8) = .empty,
|
||||
|
||||
pub fn _defaultPolicy(self: *TrustedTypePolicyFactory) ?TrustedTypePolicy {
|
||||
return self.default_policy;
|
||||
}
|
||||
|
||||
// https://w3c.github.io/trusted-types/dist/spec/#dom-trustedtypepolicyfactory-createpolicy
|
||||
// https://w3c.github.io/trusted-types/dist/spec/#abstract-opdef-create-a-trusted-type-policy
|
||||
pub fn _createPolicy(self: *TrustedTypePolicyFactory, name: []const u8, options: ?TrustedTypePolicyOptions, state: *SessionState) !TrustedTypePolicy {
|
||||
// TODO Throw TypeError if policy names are restricted by the Content Security Policy trusted-types directive and this name is not on the allowlist.
|
||||
// TODO Throw TypeError if the name is a duplicate and the Content Security Policy trusted-types directive is not using allow-duplicates
|
||||
|
||||
const policy = TrustedTypePolicy{
|
||||
.name = name,
|
||||
.options = options orelse TrustedTypePolicyOptions{},
|
||||
};
|
||||
|
||||
if (std.mem.eql(u8, name, "default")) {
|
||||
// TBD what if default_policy is already set?
|
||||
self.default_policy = policy;
|
||||
}
|
||||
try self.created_policy_names.append(state.arena, try state.arena.dupe(u8, name));
|
||||
|
||||
return policy;
|
||||
}
|
||||
};
|
||||
|
||||
pub const TrustedTypePolicyOptions = struct {
|
||||
createHTML: ?Env.Function = null, // (str, ..args) -> str
|
||||
createScript: ?Env.Function = null, // (str, ..args) -> str
|
||||
createScriptURL: ?Env.Function = null, // (str, ..args) -> str
|
||||
};
|
||||
|
||||
// https://developer.mozilla.org/en-US/docs/Web/API/TrustedTypePolicy
|
||||
pub const TrustedTypePolicy = struct {
|
||||
name: []const u8,
|
||||
options: TrustedTypePolicyOptions,
|
||||
|
||||
pub fn get_name(self: *TrustedTypePolicy) []const u8 {
|
||||
return self.name;
|
||||
}
|
||||
|
||||
pub fn _createHTML(self: *TrustedTypePolicy, html: []const u8) !TrustedHTML {
|
||||
// TODO handle throwIfMissing
|
||||
const create = self.options.createHTML orelse return error.TypeError;
|
||||
|
||||
var result: Env.Function.Result = undefined;
|
||||
const out = try create.tryCall([]const u8, .{html}, &result); // TODO varargs
|
||||
return .{
|
||||
.value = out,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn _createScript(self: *TrustedTypePolicy, script: []const u8) !TrustedScript {
|
||||
// TODO handle throwIfMissing
|
||||
const create = self.options.createScript orelse return error.TypeError;
|
||||
|
||||
var result: Env.Function.Result = undefined;
|
||||
return try create.tryCall(TrustedScript, .{script}, &result); // TODO varargs
|
||||
}
|
||||
|
||||
pub fn _createScriptURL(self: *TrustedTypePolicy, url: []const u8) !TrustedScriptURL {
|
||||
// TODO handle throwIfMissing
|
||||
const create = self.options.createScriptURL orelse return error.TypeError;
|
||||
|
||||
var result: Env.Function.Result = undefined;
|
||||
return try create.tryCall(TrustedScriptURL, .{url}, &result); // TODO varargs
|
||||
}
|
||||
};
|
||||
|
||||
const testing = @import("../../testing.zig");
|
||||
test "Browser.TrustedTypes" {
|
||||
var runner = try testing.jsRunner(testing.tracking_allocator, .{});
|
||||
defer runner.deinit();
|
||||
|
||||
try runner.testCases(&.{
|
||||
.{ "trustedTypes", "[object TrustedTypePolicyFactory]" },
|
||||
.{
|
||||
\\ let escapeHTMLPolicy = trustedTypes.createPolicy('myEscapePolicy', {
|
||||
\\ createHTML: (string) => string.replace(/</g, "<"),
|
||||
\\ });
|
||||
,
|
||||
null,
|
||||
},
|
||||
.{ "escapeHTMLPolicy.createHTML('<img src=x onerror=alert(1)>');", "<img src=x onerror=alert(1)>" },
|
||||
}, .{});
|
||||
}
|
||||
@@ -18,10 +18,9 @@
|
||||
|
||||
const std = @import("std");
|
||||
|
||||
const log = @import("../../log.zig");
|
||||
const parser = @import("../netsurf.zig");
|
||||
const Function = @import("../env.zig").Function;
|
||||
const Page = @import("../page.zig").Page;
|
||||
const SessionState = @import("../env.zig").SessionState;
|
||||
const Loop = @import("../../runtime/loop.zig").Loop;
|
||||
|
||||
const Navigator = @import("navigator.zig").Navigator;
|
||||
@@ -32,10 +31,12 @@ const Console = @import("../console/console.zig").Console;
|
||||
const EventTarget = @import("../dom/event_target.zig").EventTarget;
|
||||
const MediaQueryList = @import("media_query_list.zig").MediaQueryList;
|
||||
const Performance = @import("performance.zig").Performance;
|
||||
const CSSStyleDeclaration = @import("../cssom/css_style_declaration.zig").CSSStyleDeclaration;
|
||||
const TrustedTypePolicyFactory = @import("trusted_types.zig").TrustedTypePolicyFactory;
|
||||
|
||||
const storage = @import("../storage/storage.zig");
|
||||
|
||||
const log = std.log.scoped(.window);
|
||||
|
||||
// https://dom.spec.whatwg.org/#interface-window-extensions
|
||||
// https://html.spec.whatwg.org/multipage/nav-history-apis.html#window
|
||||
pub const Window = struct {
|
||||
@@ -51,18 +52,18 @@ pub const Window = struct {
|
||||
storage_shelf: ?*storage.Shelf = null,
|
||||
|
||||
// counter for having unique timer ids
|
||||
timer_id: u30 = 0,
|
||||
timer_id: u31 = 0,
|
||||
timers: std.AutoHashMapUnmanaged(u32, *TimerCallback) = .{},
|
||||
|
||||
crypto: Crypto = .{},
|
||||
console: Console = .{},
|
||||
navigator: Navigator = .{},
|
||||
performance: Performance,
|
||||
trusted_types: TrustedTypePolicyFactory = .{},
|
||||
|
||||
pub fn create(target: ?[]const u8, navigator: ?Navigator) !Window {
|
||||
var fbs = std.io.fixedBufferStream("");
|
||||
const Elements = @import("../html/elements.zig");
|
||||
const html_doc = try parser.documentHTMLParse(fbs.reader(), "utf-8", &Elements.createElement);
|
||||
const html_doc = try parser.documentHTMLParse(fbs.reader(), "utf-8");
|
||||
const doc = parser.documentHTMLToDocument(html_doc);
|
||||
try parser.documentSetDocumentURI(doc, "about:blank");
|
||||
|
||||
@@ -101,10 +102,6 @@ pub const Window = struct {
|
||||
return &self.location;
|
||||
}
|
||||
|
||||
pub fn set_location(_: *const Window, url: []const u8, page: *Page) !void {
|
||||
return page.navigateFromWebAPI(url, .{ .reason = .script });
|
||||
}
|
||||
|
||||
pub fn get_console(self: *Window) *Console {
|
||||
return &self.console;
|
||||
}
|
||||
@@ -121,11 +118,6 @@ pub const Window = struct {
|
||||
return self;
|
||||
}
|
||||
|
||||
// TODO: frames
|
||||
pub fn get_top(self: *Window) *Window {
|
||||
return self;
|
||||
}
|
||||
|
||||
pub fn get_document(self: *Window) ?*parser.DocumentHTML {
|
||||
return self.document;
|
||||
}
|
||||
@@ -135,15 +127,15 @@ pub const Window = struct {
|
||||
}
|
||||
|
||||
// The interior height of the window in pixels, including the height of the horizontal scroll bar, if present.
|
||||
pub fn get_innerHeight(_: *Window, page: *Page) u32 {
|
||||
pub fn get_innerHeight(_: *Window, state: *SessionState) u32 {
|
||||
// We do not have scrollbars or padding so this is the same as Element.clientHeight
|
||||
return page.renderer.height();
|
||||
return state.renderer.height();
|
||||
}
|
||||
|
||||
// The interior width of the window in pixels. That includes the width of the vertical scroll bar, if one is present.
|
||||
pub fn get_innerWidth(_: *Window, page: *Page) u32 {
|
||||
pub fn get_innerWidth(_: *Window, state: *SessionState) u32 {
|
||||
// We do not have scrollbars or padding so this is the same as Element.clientWidth
|
||||
return page.renderer.width();
|
||||
return state.renderer.width();
|
||||
}
|
||||
|
||||
pub fn get_name(self: *Window) []const u8 {
|
||||
@@ -164,63 +156,68 @@ pub const Window = struct {
|
||||
return &self.performance;
|
||||
}
|
||||
|
||||
pub fn _requestAnimationFrame(self: *Window, cbk: Function, page: *Page) !u32 {
|
||||
return self.createTimeout(cbk, 5, page, .{ .animation_frame = true });
|
||||
pub fn get_trustedTypes(self: *Window) !TrustedTypePolicyFactory {
|
||||
return self.trusted_types;
|
||||
}
|
||||
|
||||
pub fn _cancelAnimationFrame(self: *Window, id: u32, page: *Page) !void {
|
||||
const kv = self.timers.fetchRemove(id) orelse return;
|
||||
return page.loop.cancel(kv.value.loop_id);
|
||||
// Tells the browser you wish to perform an animation. It requests the browser to call a user-supplied callback function before the next repaint.
|
||||
// fn callback(timestamp: f64)
|
||||
// Returns the request ID, that uniquely identifies the entry in the callback list.
|
||||
pub fn _requestAnimationFrame(
|
||||
self: *Window,
|
||||
callback: Function,
|
||||
) !u32 {
|
||||
// We immediately execute the callback, but this may not be correct TBD.
|
||||
// Since: When multiple callbacks queued by requestAnimationFrame() begin to fire in a single frame, each receives the same timestamp even though time has passed during the computation of every previous callback's workload.
|
||||
var result: Function.Result = undefined;
|
||||
callback.tryCall(void, .{self.performance._now()}, &result) catch {
|
||||
log.err("Window.requestAnimationFrame(): {s}", .{result.exception});
|
||||
log.debug("stack:\n{s}", .{result.stack orelse "???"});
|
||||
};
|
||||
return 99; // not unique, but user cannot make assumptions about it. cancelAnimationFrame will be too late anyway.
|
||||
}
|
||||
|
||||
// Cancels an animation frame request previously scheduled through requestAnimationFrame().
|
||||
// This is a no-op since _requestAnimationFrame immediately executes the callback.
|
||||
pub fn _cancelAnimationFrame(_: *Window, request_id: u32) void {
|
||||
_ = request_id;
|
||||
}
|
||||
|
||||
// TODO handle callback arguments.
|
||||
pub fn _setTimeout(self: *Window, cbk: Function, delay: ?u32, page: *Page) !u32 {
|
||||
return self.createTimeout(cbk, delay, page, .{});
|
||||
pub fn _setTimeout(self: *Window, cbk: Function, delay: ?u32, state: *SessionState) !u32 {
|
||||
return self.createTimeout(cbk, delay, state, false);
|
||||
}
|
||||
|
||||
// TODO handle callback arguments.
|
||||
pub fn _setInterval(self: *Window, cbk: Function, delay: ?u32, page: *Page) !u32 {
|
||||
return self.createTimeout(cbk, delay, page, .{ .repeat = true });
|
||||
pub fn _setInterval(self: *Window, cbk: Function, delay: ?u32, state: *SessionState) !u32 {
|
||||
return self.createTimeout(cbk, delay, state, true);
|
||||
}
|
||||
|
||||
pub fn _clearTimeout(self: *Window, id: u32, page: *Page) !void {
|
||||
pub fn _clearTimeout(self: *Window, id: u32, state: *SessionState) !void {
|
||||
const kv = self.timers.fetchRemove(id) orelse return;
|
||||
return page.loop.cancel(kv.value.loop_id);
|
||||
try state.loop.cancel(kv.value.loop_id);
|
||||
}
|
||||
|
||||
pub fn _clearInterval(self: *Window, id: u32, page: *Page) !void {
|
||||
pub fn _clearInterval(self: *Window, id: u32, state: *SessionState) !void {
|
||||
const kv = self.timers.fetchRemove(id) orelse return;
|
||||
return page.loop.cancel(kv.value.loop_id);
|
||||
try state.loop.cancel(kv.value.loop_id);
|
||||
}
|
||||
|
||||
pub fn _matchMedia(_: *const Window, media: []const u8, page: *Page) !MediaQueryList {
|
||||
pub fn _matchMedia(_: *const Window, media: []const u8, state: *SessionState) !MediaQueryList {
|
||||
return .{
|
||||
.matches = false, // TODO?
|
||||
.media = try page.arena.dupe(u8, media),
|
||||
.media = try state.arena.dupe(u8, media),
|
||||
};
|
||||
}
|
||||
|
||||
const CreateTimeoutOpts = struct {
|
||||
repeat: bool = false,
|
||||
animation_frame: bool = false,
|
||||
};
|
||||
fn createTimeout(self: *Window, cbk: Function, delay_: ?u32, page: *Page, comptime opts: CreateTimeoutOpts) !u32 {
|
||||
const delay = delay_ orelse 0;
|
||||
if (delay > 5000) {
|
||||
log.warn(.user_script, "long timeout ignored", .{ .delay = delay, .interval = opts.repeat });
|
||||
// self.timer_id is u30, so the largest value we can generate is
|
||||
// 1_073_741_824. Returning 2_000_000_000 makes sure that clients
|
||||
// can call cancelTimer/cancelInterval without breaking anything.
|
||||
return 2_000_000_000;
|
||||
}
|
||||
|
||||
fn createTimeout(self: *Window, cbk: Function, delay_: ?u32, state: *SessionState, comptime repeat: bool) !u32 {
|
||||
if (self.timers.count() > 512) {
|
||||
return error.TooManyTimeout;
|
||||
}
|
||||
const timer_id = self.timer_id +% 1;
|
||||
self.timer_id = timer_id;
|
||||
|
||||
const arena = page.arena;
|
||||
const arena = state.arena;
|
||||
|
||||
const gop = try self.timers.getOrPut(arena, timer_id);
|
||||
if (gop.found_existing) {
|
||||
@@ -229,7 +226,7 @@ pub const Window = struct {
|
||||
}
|
||||
errdefer _ = self.timers.remove(timer_id);
|
||||
|
||||
const delay_ms: u63 = @as(u63, delay) * std.time.ns_per_ms;
|
||||
const delay: u63 = @as(u63, (delay_ orelse 0)) * std.time.ns_per_ms;
|
||||
const callback = try arena.create(TimerCallback);
|
||||
|
||||
callback.* = .{
|
||||
@@ -238,37 +235,21 @@ pub const Window = struct {
|
||||
.window = self,
|
||||
.timer_id = timer_id,
|
||||
.node = .{ .func = TimerCallback.run },
|
||||
.repeat = if (opts.repeat) delay_ms else null,
|
||||
.animation_frame = opts.animation_frame,
|
||||
.repeat = if (repeat) delay else null,
|
||||
};
|
||||
callback.loop_id = try page.loop.timeout(delay_ms, &callback.node);
|
||||
callback.loop_id = try state.loop.timeout(delay, &callback.node);
|
||||
|
||||
gop.value_ptr.* = callback;
|
||||
return timer_id;
|
||||
}
|
||||
|
||||
// TODO: getComputedStyle should return a read-only CSSStyleDeclaration.
|
||||
// We currently don't have a read-only one, so we return a new instance on
|
||||
// each call.
|
||||
pub fn _getComputedStyle(_: *const Window, element: *parser.Element, pseudo_element: ?[]const u8) !CSSStyleDeclaration {
|
||||
// NOT IMPLEMENTED - This is a dummy implementation that always returns null to deter PlayWright from using this path to solve click.js.
|
||||
// returns an object containing the values of all CSS properties of an element, after applying active stylesheets and resolving any basic computation those values may contain.
|
||||
pub fn _getComputedStyle(_: *Window, element: *parser.Element, pseudo_element: ?[]const u8) !?void {
|
||||
_ = element;
|
||||
_ = pseudo_element;
|
||||
return .empty;
|
||||
}
|
||||
|
||||
const ScrollToOpts = union(enum) {
|
||||
x: i32,
|
||||
opts: Opts,
|
||||
|
||||
const Opts = struct {
|
||||
top: i32,
|
||||
left: i32,
|
||||
behavior: []const u8,
|
||||
};
|
||||
};
|
||||
pub fn _scrollTo(_: *const Window, opts: ScrollToOpts, y: ?u32) void {
|
||||
_ = opts;
|
||||
_ = y;
|
||||
log.warn("Not implemented function getComputedStyle called, null returned", .{});
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
@@ -289,28 +270,15 @@ const TimerCallback = struct {
|
||||
// if the event should be repeated
|
||||
repeat: ?u63 = null,
|
||||
|
||||
animation_frame: bool = false,
|
||||
|
||||
window: *Window,
|
||||
|
||||
fn run(node: *Loop.CallbackNode, repeat_delay: *?u63) void {
|
||||
const self: *TimerCallback = @fieldParentPtr("node", node);
|
||||
|
||||
var result: Function.Result = undefined;
|
||||
|
||||
var call: anyerror!void = undefined;
|
||||
if (self.animation_frame) {
|
||||
call = self.cbk.tryCall(void, .{self.window.performance._now()}, &result);
|
||||
} else {
|
||||
call = self.cbk.tryCall(void, .{}, &result);
|
||||
}
|
||||
|
||||
call catch {
|
||||
log.debug(.user_script, "callback error", .{
|
||||
.err = result.exception,
|
||||
.stack = result.stack,
|
||||
.source = "window timeout",
|
||||
});
|
||||
self.cbk.tryCall(void, .{}, &result) catch {
|
||||
log.err("timeout callback error: {s}", .{result.exception});
|
||||
log.debug("stack:\n{s}", .{result.stack orelse "???"});
|
||||
};
|
||||
|
||||
if (self.repeat) |r| {
|
||||
@@ -329,11 +297,6 @@ test "Browser.HTML.Window" {
|
||||
var runner = try testing.jsRunner(testing.tracking_allocator, .{});
|
||||
defer runner.deinit();
|
||||
|
||||
try runner.testCases(&.{
|
||||
.{ "window.parent === window", "true" },
|
||||
.{ "window.top === window", "true" },
|
||||
}, .{});
|
||||
|
||||
// requestAnimationFrame should be able to wait by recursively calling itself
|
||||
// Note however that we in this test do not wait as the request is just send to the browser
|
||||
try runner.testCases(&.{
|
||||
@@ -380,11 +343,4 @@ test "Browser.HTML.Window" {
|
||||
.{ "innerHeight", "1" },
|
||||
.{ "innerWidth", "2" },
|
||||
}, .{});
|
||||
|
||||
// cancelAnimationFrame should be able to cancel a request with the given id
|
||||
try runner.testCases(&.{
|
||||
.{ "let longCall = false;", null },
|
||||
.{ "window.setTimeout(() => {longCall = true}, 5001);", null },
|
||||
.{ "longCall;", "false" },
|
||||
}, .{});
|
||||
}
|
||||
|
||||
@@ -1,284 +0,0 @@
|
||||
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
|
||||
//
|
||||
// Francis Bouvier <francis@lightpanda.io>
|
||||
// Pierre Tachoire <pierre@lightpanda.io>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU Affero General Public License as
|
||||
// published by the Free Software Foundation, either version 3 of the
|
||||
// License, or (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU Affero General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU Affero General Public License
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
const std = @import("std");
|
||||
|
||||
const Allocator = std.mem.Allocator;
|
||||
|
||||
// Used by FormDAta and URLSearchParams.
|
||||
//
|
||||
// We store the values in an ArrayList rather than a an
|
||||
// StringArrayHashMap([]const u8) because of the way the iterators (i.e., keys(),
|
||||
// values() and entries()) work. The FormData can contain duplicate keys, and
|
||||
// each iteration yields 1 key=>value pair. So, given:
|
||||
//
|
||||
// let f = new FormData();
|
||||
// f.append('a', '1');
|
||||
// f.append('a', '2');
|
||||
//
|
||||
// Then we'd expect f.keys(), f.values() and f.entries() to yield 2 results:
|
||||
// ['a', '1']
|
||||
// ['a', '2']
|
||||
//
|
||||
// This is much easier to do with an ArrayList than a HashMap, especially given
|
||||
// that the FormData could be mutated while iterating.
|
||||
// The downside is that most of the normal operations are O(N).
|
||||
pub const List = struct {
|
||||
entries: std.ArrayListUnmanaged(KeyValue) = .{},
|
||||
|
||||
pub fn init(entries: std.ArrayListUnmanaged(KeyValue)) List {
|
||||
return .{ .entries = entries };
|
||||
}
|
||||
|
||||
pub fn clone(self: *const List, arena: Allocator) !List {
|
||||
const entries = self.entries.items;
|
||||
|
||||
var c: std.ArrayListUnmanaged(KeyValue) = .{};
|
||||
try c.ensureTotalCapacity(arena, entries.len);
|
||||
for (entries) |kv| {
|
||||
c.appendAssumeCapacity(kv);
|
||||
}
|
||||
|
||||
return .{ .entries = c };
|
||||
}
|
||||
|
||||
pub fn fromOwnedSlice(entries: []KeyValue) List {
|
||||
return .{
|
||||
.entries = std.ArrayListUnmanaged(KeyValue).fromOwnedSlice(entries),
|
||||
};
|
||||
}
|
||||
|
||||
pub fn count(self: *const List) usize {
|
||||
return self.entries.items.len;
|
||||
}
|
||||
|
||||
pub fn get(self: *const List, key: []const u8) ?[]const u8 {
|
||||
const result = self.find(key) orelse return null;
|
||||
return result.entry.value;
|
||||
}
|
||||
|
||||
pub fn getAll(self: *const List, arena: Allocator, key: []const u8) ![]const []const u8 {
|
||||
var arr: std.ArrayListUnmanaged([]const u8) = .empty;
|
||||
for (self.entries.items) |entry| {
|
||||
if (std.mem.eql(u8, key, entry.key)) {
|
||||
try arr.append(arena, entry.value);
|
||||
}
|
||||
}
|
||||
return arr.items;
|
||||
}
|
||||
|
||||
pub fn has(self: *const List, key: []const u8) bool {
|
||||
return self.find(key) != null;
|
||||
}
|
||||
|
||||
pub fn set(self: *List, arena: Allocator, key: []const u8, value: []const u8) !void {
|
||||
self.delete(key);
|
||||
return self.append(arena, key, value);
|
||||
}
|
||||
|
||||
pub fn append(self: *List, arena: Allocator, key: []const u8, value: []const u8) !void {
|
||||
return self.appendOwned(arena, try arena.dupe(u8, key), try arena.dupe(u8, value));
|
||||
}
|
||||
|
||||
pub fn appendOwned(self: *List, arena: Allocator, key: []const u8, value: []const u8) !void {
|
||||
return self.entries.append(arena, .{
|
||||
.key = key,
|
||||
.value = value,
|
||||
});
|
||||
}
|
||||
|
||||
pub fn appendOwnedAssumeCapacity(self: *List, key: []const u8, value: []const u8) void {
|
||||
self.entries.appendAssumeCapacity(.{
|
||||
.key = key,
|
||||
.value = value,
|
||||
});
|
||||
}
|
||||
|
||||
pub fn delete(self: *List, key: []const u8) void {
|
||||
var i: usize = 0;
|
||||
while (i < self.entries.items.len) {
|
||||
const entry = self.entries.items[i];
|
||||
if (std.mem.eql(u8, key, entry.key)) {
|
||||
_ = self.entries.swapRemove(i);
|
||||
} else {
|
||||
i += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn deleteKeyValue(self: *List, key: []const u8, value: []const u8) void {
|
||||
var i: usize = 0;
|
||||
while (i < self.entries.items.len) {
|
||||
const entry = self.entries.items[i];
|
||||
if (std.mem.eql(u8, key, entry.key) and std.mem.eql(u8, value, entry.value)) {
|
||||
_ = self.entries.swapRemove(i);
|
||||
} else {
|
||||
i += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn keyIterator(self: *const List) KeyIterator {
|
||||
return .{ .entries = &self.entries };
|
||||
}
|
||||
|
||||
pub fn valueIterator(self: *const List) ValueIterator {
|
||||
return .{ .entries = &self.entries };
|
||||
}
|
||||
|
||||
pub fn entryIterator(self: *const List) EntryIterator {
|
||||
return .{ .entries = &self.entries };
|
||||
}
|
||||
|
||||
pub fn ensureTotalCapacity(self: *List, arena: Allocator, len: usize) !void {
|
||||
return self.entries.ensureTotalCapacity(arena, len);
|
||||
}
|
||||
|
||||
const FindResult = struct {
|
||||
index: usize,
|
||||
entry: KeyValue,
|
||||
};
|
||||
|
||||
fn find(self: *const List, key: []const u8) ?FindResult {
|
||||
for (self.entries.items, 0..) |entry, i| {
|
||||
if (std.mem.eql(u8, key, entry.key)) {
|
||||
return .{ .index = i, .entry = entry };
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
pub const KeyValue = struct {
|
||||
key: []const u8,
|
||||
value: []const u8,
|
||||
};
|
||||
|
||||
pub const KeyIterator = struct {
|
||||
index: usize = 0,
|
||||
entries: *const std.ArrayListUnmanaged(KeyValue),
|
||||
|
||||
pub fn _next(self: *KeyIterator) ?[]const u8 {
|
||||
const entries = self.entries.items;
|
||||
|
||||
const index = self.index;
|
||||
if (index == entries.len) {
|
||||
return null;
|
||||
}
|
||||
self.index += 1;
|
||||
return entries[index].key;
|
||||
}
|
||||
};
|
||||
|
||||
pub const ValueIterator = struct {
|
||||
index: usize = 0,
|
||||
entries: *const std.ArrayListUnmanaged(KeyValue),
|
||||
|
||||
pub fn _next(self: *ValueIterator) ?[]const u8 {
|
||||
const entries = self.entries.items;
|
||||
|
||||
const index = self.index;
|
||||
if (index == entries.len) {
|
||||
return null;
|
||||
}
|
||||
self.index += 1;
|
||||
return entries[index].value;
|
||||
}
|
||||
};
|
||||
|
||||
pub const EntryIterator = struct {
|
||||
index: usize = 0,
|
||||
entries: *const std.ArrayListUnmanaged(KeyValue),
|
||||
|
||||
pub fn _next(self: *EntryIterator) ?struct { []const u8, []const u8 } {
|
||||
const entries = self.entries.items;
|
||||
|
||||
const index = self.index;
|
||||
if (index == entries.len) {
|
||||
return null;
|
||||
}
|
||||
self.index += 1;
|
||||
const entry = entries[index];
|
||||
return .{ entry.key, entry.value };
|
||||
}
|
||||
};
|
||||
|
||||
const URLEncodeMode = enum {
|
||||
form,
|
||||
query,
|
||||
};
|
||||
|
||||
pub fn urlEncode(list: List, mode: URLEncodeMode, writer: anytype) !void {
|
||||
const entries = list.entries.items;
|
||||
if (entries.len == 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
try urlEncodeEntry(entries[0], mode, writer);
|
||||
for (entries[1..]) |entry| {
|
||||
try writer.writeByte('&');
|
||||
try urlEncodeEntry(entry, mode, writer);
|
||||
}
|
||||
}
|
||||
|
||||
fn urlEncodeEntry(entry: KeyValue, mode: URLEncodeMode, writer: anytype) !void {
|
||||
try urlEncodeValue(entry.key, mode, writer);
|
||||
|
||||
// for a form, for an empty value, we'll do "spice="
|
||||
// but for a query, we do "spice"
|
||||
if (mode == .query and entry.value.len == 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
try writer.writeByte('=');
|
||||
try urlEncodeValue(entry.value, mode, writer);
|
||||
}
|
||||
|
||||
fn urlEncodeValue(value: []const u8, mode: URLEncodeMode, writer: anytype) !void {
|
||||
if (!urlEncodeShouldEscape(value, mode)) {
|
||||
return writer.writeAll(value);
|
||||
}
|
||||
|
||||
for (value) |b| {
|
||||
if (urlEncodeUnreserved(b, mode)) {
|
||||
try writer.writeByte(b);
|
||||
} else if (b == ' ' and mode == .form) {
|
||||
// for form submission, space should be encoded as '+', not '%20'
|
||||
try writer.writeByte('+');
|
||||
} else {
|
||||
try writer.print("%{X:0>2}", .{b});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn urlEncodeShouldEscape(value: []const u8, mode: URLEncodeMode) bool {
|
||||
for (value) |b| {
|
||||
if (!urlEncodeUnreserved(b, mode)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
fn urlEncodeUnreserved(b: u8, mode: URLEncodeMode) bool {
|
||||
return switch (b) {
|
||||
'A'...'Z', 'a'...'z', '0'...'9', '-', '.', '_' => true,
|
||||
'~' => mode == .query,
|
||||
else => false,
|
||||
};
|
||||
}
|
||||
@@ -17,29 +17,23 @@
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
const std = @import("std");
|
||||
const Allocator = std.mem.Allocator;
|
||||
|
||||
pub const c = @cImport({
|
||||
const c = @cImport({
|
||||
@cInclude("dom/dom.h");
|
||||
@cInclude("core/pi.h");
|
||||
@cInclude("dom/bindings/hubbub/parser.h");
|
||||
@cInclude("events/event_target.h");
|
||||
@cInclude("events/event.h");
|
||||
@cInclude("events/mouse_event.h");
|
||||
@cInclude("events/keyboard_event.h");
|
||||
@cInclude("utils/validate.h");
|
||||
@cInclude("html/html_element.h");
|
||||
@cInclude("html/html_document.h");
|
||||
});
|
||||
|
||||
const mimalloc = @import("mimalloc.zig");
|
||||
pub var ARENA: ?Allocator = null;
|
||||
|
||||
// init initializes netsurf lib.
|
||||
// init starts a mimalloc heap arena for the netsurf session. The caller must
|
||||
// call deinit() to free the arena memory.
|
||||
pub fn init(allocator: Allocator) !void {
|
||||
ARENA = allocator;
|
||||
pub fn init() !void {
|
||||
try mimalloc.create();
|
||||
}
|
||||
|
||||
@@ -52,7 +46,6 @@ pub fn deinit() void {
|
||||
c.lwc_deinit_strings();
|
||||
|
||||
mimalloc.destroy();
|
||||
ARENA = null;
|
||||
}
|
||||
|
||||
// Vtable
|
||||
@@ -528,7 +521,6 @@ pub const EventType = enum(u8) {
|
||||
event = 0,
|
||||
progress_event = 1,
|
||||
custom_event = 2,
|
||||
mouse_event = 3,
|
||||
};
|
||||
|
||||
pub const MutationEvent = c.dom_mutation_event;
|
||||
@@ -557,7 +549,7 @@ pub fn mutationEventRelatedNode(evt: *MutationEvent) !?*Node {
|
||||
const err = c._dom_mutation_event_get_related_node(evt, &n);
|
||||
try DOMErr(err);
|
||||
if (n == null) return null;
|
||||
return @as(*Node, @alignCast(@ptrCast(n)));
|
||||
return @as(*Node, @ptrCast(n));
|
||||
}
|
||||
|
||||
// EventListener
|
||||
@@ -572,7 +564,7 @@ fn eventListenerGetData(lst: *EventListener) ?*anyopaque {
|
||||
pub const EventTarget = c.dom_event_target;
|
||||
|
||||
pub fn eventTargetToNode(et: *EventTarget) *Node {
|
||||
return @as(*Node, @alignCast(@ptrCast(et)));
|
||||
return @as(*Node, @ptrCast(et));
|
||||
}
|
||||
|
||||
fn eventTargetVtable(et: *EventTarget) c.dom_event_target_vtable {
|
||||
@@ -623,7 +615,7 @@ pub fn eventTargetAddEventListener(
|
||||
typ: []const u8,
|
||||
node: *EventNode,
|
||||
capture: bool,
|
||||
) !*EventListener {
|
||||
) !void {
|
||||
const event_handler = struct {
|
||||
fn handle(event_: ?*Event, ptr_: ?*anyopaque) callconv(.C) void {
|
||||
const ptr = ptr_ orelse return;
|
||||
@@ -642,8 +634,6 @@ pub fn eventTargetAddEventListener(
|
||||
const s = try strFromData(typ);
|
||||
const err = eventTargetVtable(et).add_event_listener.?(et, s, listener, capture);
|
||||
try DOMErr(err);
|
||||
|
||||
return listener.?;
|
||||
}
|
||||
|
||||
pub fn eventTargetHasListener(
|
||||
@@ -869,59 +859,6 @@ pub fn mouseEventDefaultPrevented(evt: *MouseEvent) !bool {
|
||||
return eventDefaultPrevented(@ptrCast(evt));
|
||||
}
|
||||
|
||||
// KeyboardEvent
|
||||
|
||||
pub const KeyboardEvent = c.dom_keyboard_event;
|
||||
|
||||
pub fn keyboardEventCreate() !*KeyboardEvent {
|
||||
var evt: ?*KeyboardEvent = undefined;
|
||||
const err = c._dom_keyboard_event_create(&evt);
|
||||
try DOMErr(err);
|
||||
return evt.?;
|
||||
}
|
||||
|
||||
pub fn keyboardEventDestroy(evt: *KeyboardEvent) void {
|
||||
c._dom_keyboard_event_destroy(evt);
|
||||
}
|
||||
|
||||
const KeyboardEventOpts = struct {
|
||||
key: []const u8,
|
||||
code: []const u8,
|
||||
bubbles: bool = false,
|
||||
cancelable: bool = false,
|
||||
ctrl: bool = false,
|
||||
alt: bool = false,
|
||||
shift: bool = false,
|
||||
meta: bool = false,
|
||||
};
|
||||
|
||||
pub fn keyboardEventInit(evt: *KeyboardEvent, typ: []const u8, opts: KeyboardEventOpts) !void {
|
||||
const s = try strFromData(typ);
|
||||
const err = c._dom_keyboard_event_init(
|
||||
evt,
|
||||
s,
|
||||
opts.bubbles,
|
||||
opts.cancelable,
|
||||
null, // dom_abstract_view* ?
|
||||
try strFromData(opts.key),
|
||||
try strFromData(opts.code),
|
||||
0, // location 0 == standard
|
||||
opts.ctrl,
|
||||
opts.shift,
|
||||
opts.alt,
|
||||
opts.meta,
|
||||
false, // repease
|
||||
false, // is_composiom
|
||||
);
|
||||
try DOMErr(err);
|
||||
}
|
||||
|
||||
pub fn keyboardEventGetKey(evt: *KeyboardEvent) ![]const u8 {
|
||||
var s: ?*String = undefined;
|
||||
_ = c._dom_keyboard_event_get_key(evt, &s);
|
||||
return strToData(s.?);
|
||||
}
|
||||
|
||||
// NodeType
|
||||
|
||||
pub const NodeType = enum(u4) {
|
||||
@@ -954,7 +891,7 @@ pub fn nodeListItem(nodeList: *NodeList, index: u32) !?*Node {
|
||||
const err = c._dom_nodelist_item(nodeList, index, &n);
|
||||
try DOMErr(err);
|
||||
if (n == null) return null;
|
||||
return @as(*Node, @alignCast(@ptrCast(n)));
|
||||
return @as(*Node, @ptrCast(n));
|
||||
}
|
||||
|
||||
// NodeExternal is the libdom public representation of a Node.
|
||||
@@ -1361,10 +1298,6 @@ pub inline fn nodeToElement(node: *Node) *Element {
|
||||
return @as(*Element, @ptrCast(node));
|
||||
}
|
||||
|
||||
pub inline fn nodeToHtmlElement(node: *Node) *ElementHTML {
|
||||
return @as(*ElementHTML, @alignCast(@ptrCast(node)));
|
||||
}
|
||||
|
||||
// nodeToDocument is an helper to convert a node to an document.
|
||||
pub inline fn nodeToDocument(node: *Node) *Document {
|
||||
return @as(*Document, @ptrCast(node));
|
||||
@@ -1387,7 +1320,7 @@ fn characterDataVtable(data: *CharacterData) c.dom_characterdata_vtable {
|
||||
}
|
||||
|
||||
pub inline fn characterDataToNode(cdata: *CharacterData) *Node {
|
||||
return @as(*Node, @alignCast(@ptrCast(cdata)));
|
||||
return @as(*Node, @ptrCast(cdata));
|
||||
}
|
||||
|
||||
pub fn characterDataData(cdata: *CharacterData) ![]const u8 {
|
||||
@@ -1472,7 +1405,7 @@ pub const ProcessingInstruction = c.dom_processing_instruction;
|
||||
|
||||
// processingInstructionToNode is an helper to convert an ProcessingInstruction to a node.
|
||||
pub inline fn processingInstructionToNode(pi: *ProcessingInstruction) *Node {
|
||||
return @as(*Node, @alignCast(@ptrCast(pi)));
|
||||
return @as(*Node, @ptrCast(pi));
|
||||
}
|
||||
|
||||
pub fn processInstructionCopy(pi: *ProcessingInstruction) !*ProcessingInstruction {
|
||||
@@ -1527,7 +1460,7 @@ pub fn attributeGetOwnerElement(a: *Attribute) !?*Element {
|
||||
|
||||
// attributeToNode is an helper to convert an attribute to a node.
|
||||
pub inline fn attributeToNode(a: *Attribute) *Node {
|
||||
return @as(*Node, @alignCast(@ptrCast(a)));
|
||||
return @as(*Node, @ptrCast(a));
|
||||
}
|
||||
|
||||
// Element
|
||||
@@ -1665,7 +1598,7 @@ pub fn elementHasClass(elem: *Element, class: []const u8) !bool {
|
||||
|
||||
// elementToNode is an helper to convert an element to a node.
|
||||
pub inline fn elementToNode(e: *Element) *Node {
|
||||
return @as(*Node, @alignCast(@ptrCast(e)));
|
||||
return @as(*Node, @ptrCast(e));
|
||||
}
|
||||
|
||||
// TokenList
|
||||
@@ -1749,14 +1682,14 @@ pub fn elementHTMLGetTagType(elem_html: *ElementHTML) !Tag {
|
||||
|
||||
// scriptToElt is an helper to convert an script to an element.
|
||||
pub inline fn scriptToElt(s: *Script) *Element {
|
||||
return @as(*Element, @alignCast(@ptrCast(s)));
|
||||
return @as(*Element, @ptrCast(s));
|
||||
}
|
||||
|
||||
// HTMLAnchorElement
|
||||
|
||||
// anchorToNode is an helper to convert an anchor to a node.
|
||||
pub inline fn anchorToNode(a: *Anchor) *Node {
|
||||
return @as(*Node, @alignCast(@ptrCast(a)));
|
||||
return @as(*Node, @ptrCast(a));
|
||||
}
|
||||
|
||||
pub fn anchorGetTarget(a: *Anchor) ![]const u8 {
|
||||
@@ -1901,7 +1834,7 @@ pub const OptionCollection = c.dom_html_options_collection;
|
||||
pub const DocumentFragment = c.dom_document_fragment;
|
||||
|
||||
pub inline fn documentFragmentToNode(doc: *DocumentFragment) *Node {
|
||||
return @as(*Node, @alignCast(@ptrCast(doc)));
|
||||
return @as(*Node, @ptrCast(doc));
|
||||
}
|
||||
|
||||
pub fn documentFragmentBodyChildren(doc: *DocumentFragment) !?*NodeList {
|
||||
@@ -1997,10 +1930,8 @@ pub inline fn domImplementationCreateDocumentType(
|
||||
return dt.?;
|
||||
}
|
||||
|
||||
pub const CreateElementFn = ?*const fn ([*c]c.dom_html_element_create_params, [*c][*c]ElementHTML) callconv(.c) c.dom_exception;
|
||||
|
||||
pub inline fn domImplementationCreateHTMLDocument(title: ?[]const u8, create_element: CreateElementFn) !*DocumentHTML {
|
||||
const doc_html = try documentCreateDocument(title, create_element);
|
||||
pub inline fn domImplementationCreateHTMLDocument(title: ?[]const u8) !*DocumentHTML {
|
||||
const doc_html = try documentCreateDocument(title);
|
||||
const doc = documentHTMLToDocument(doc_html);
|
||||
|
||||
// add hierarchy: html, head, body.
|
||||
@@ -2013,7 +1944,7 @@ pub inline fn domImplementationCreateHTMLDocument(title: ?[]const u8, create_ele
|
||||
if (title) |t| {
|
||||
const htitle = try documentCreateElement(doc, "title");
|
||||
const txt = try documentCreateTextNode(doc, t);
|
||||
_ = try nodeAppendChild(elementToNode(htitle), @as(*Node, @alignCast(@ptrCast(txt))));
|
||||
_ = try nodeAppendChild(elementToNode(htitle), @as(*Node, @ptrCast(txt)));
|
||||
_ = try nodeAppendChild(elementToNode(head), elementToNode(htitle));
|
||||
}
|
||||
|
||||
@@ -2031,7 +1962,7 @@ fn documentVtable(doc: *Document) c.dom_document_vtable {
|
||||
}
|
||||
|
||||
pub inline fn documentToNode(doc: *Document) *Node {
|
||||
return @as(*Node, @alignCast(@ptrCast(doc)));
|
||||
return @as(*Node, @ptrCast(doc));
|
||||
}
|
||||
|
||||
pub inline fn documentGetElementById(doc: *Document, id: []const u8) !?*Element {
|
||||
@@ -2081,7 +2012,7 @@ pub inline fn documentSetInputEncoding(doc: *Document, enc: []const u8) !void {
|
||||
try DOMErr(err);
|
||||
}
|
||||
|
||||
pub inline fn documentCreateDocument(title: ?[]const u8, create_element: CreateElementFn) !*DocumentHTML {
|
||||
pub inline fn documentCreateDocument(title: ?[]const u8) !*DocumentHTML {
|
||||
var doc: ?*Document = undefined;
|
||||
const err = c.dom_implementation_create_document(
|
||||
c.DOM_IMPLEMENTATION_HTML,
|
||||
@@ -2095,9 +2026,6 @@ pub inline fn documentCreateDocument(title: ?[]const u8, create_element: CreateE
|
||||
try DOMErr(err);
|
||||
const doc_html = @as(*DocumentHTML, @ptrCast(doc.?));
|
||||
if (title) |t| try documentHTMLSetTitle(doc_html, t);
|
||||
|
||||
doc_html.create_element_external = create_element;
|
||||
|
||||
return doc_html;
|
||||
}
|
||||
|
||||
@@ -2172,7 +2100,7 @@ pub inline fn documentImportNode(doc: *Document, node: *Node, deep: bool) !*Node
|
||||
const nodeext = toNodeExternal(Node, node);
|
||||
const err = documentVtable(doc).dom_document_import_node.?(doc, nodeext, deep, &res);
|
||||
try DOMErr(err);
|
||||
return @as(*Node, @alignCast(@ptrCast(res)));
|
||||
return @as(*Node, @ptrCast(res));
|
||||
}
|
||||
|
||||
pub inline fn documentAdoptNode(doc: *Document, node: *Node) !*Node {
|
||||
@@ -2180,7 +2108,7 @@ pub inline fn documentAdoptNode(doc: *Document, node: *Node) !*Node {
|
||||
const nodeext = toNodeExternal(Node, node);
|
||||
const err = documentVtable(doc).dom_document_adopt_node.?(doc, nodeext, &res);
|
||||
try DOMErr(err);
|
||||
return @as(*Node, @alignCast(@ptrCast(res)));
|
||||
return @as(*Node, @ptrCast(res));
|
||||
}
|
||||
|
||||
pub inline fn documentCreateAttribute(doc: *Document, name: []const u8) !*Attribute {
|
||||
@@ -2202,20 +2130,12 @@ pub inline fn documentCreateAttributeNS(doc: *Document, ns: []const u8, qname: [
|
||||
return attr.?;
|
||||
}
|
||||
|
||||
pub fn documentSetScriptAddedCallback(
|
||||
doc: *Document,
|
||||
ctx: *anyopaque,
|
||||
callback: c.dom_script_added_callback,
|
||||
) void {
|
||||
c._dom_document_set_script_added_callback(doc, ctx, callback);
|
||||
}
|
||||
|
||||
// DocumentHTML
|
||||
pub const DocumentHTML = c.dom_html_document;
|
||||
|
||||
// documentHTMLToNode is an helper to convert a documentHTML to an node.
|
||||
pub inline fn documentHTMLToNode(doc: *DocumentHTML) *Node {
|
||||
return @as(*Node, @alignCast(@ptrCast(doc)));
|
||||
return @as(*Node, @ptrCast(doc));
|
||||
}
|
||||
|
||||
fn documentHTMLVtable(doc_html: *DocumentHTML) c.dom_html_document_vtable {
|
||||
@@ -2261,26 +2181,24 @@ fn parserErr(err: HubbubErr) ParserError!void {
|
||||
|
||||
// documentHTMLParseFromStr parses the given HTML string.
|
||||
// The caller is responsible for closing the document.
|
||||
pub fn documentHTMLParseFromStr(str: []const u8, create_element: CreateElementFn) !*DocumentHTML {
|
||||
pub fn documentHTMLParseFromStr(str: []const u8) !*DocumentHTML {
|
||||
var fbs = std.io.fixedBufferStream(str);
|
||||
return try documentHTMLParse(fbs.reader(), "UTF-8", create_element);
|
||||
return try documentHTMLParse(fbs.reader(), "UTF-8");
|
||||
}
|
||||
|
||||
pub fn documentHTMLParse(reader: anytype, enc: ?[:0]const u8, create_element: CreateElementFn) !*DocumentHTML {
|
||||
pub fn documentHTMLParse(reader: anytype, enc: ?[:0]const u8) !*DocumentHTML {
|
||||
var parser: ?*c.dom_hubbub_parser = undefined;
|
||||
var doc: ?*c.dom_document = undefined;
|
||||
var err: c.hubbub_error = undefined;
|
||||
var params = parseParams(enc);
|
||||
|
||||
err = c.dom_hubbub_parser_create(¶ms, &parser, &doc);
|
||||
const result = @as(*DocumentHTML, @ptrCast(doc.?));
|
||||
result.create_element_external = create_element;
|
||||
try parserErr(err);
|
||||
defer c.dom_hubbub_parser_destroy(parser);
|
||||
|
||||
try parseData(parser.?, reader);
|
||||
|
||||
return result;
|
||||
return @as(*DocumentHTML, @ptrCast(doc.?));
|
||||
}
|
||||
|
||||
pub fn documentParseFragmentFromStr(self: *Document, str: []const u8) !*DocumentFragment {
|
||||
@@ -2362,7 +2280,7 @@ pub inline fn documentHTMLBody(doc_html: *DocumentHTML) !?*Body {
|
||||
}
|
||||
|
||||
pub inline fn bodyToElement(body: *Body) *Element {
|
||||
return @as(*Element, @alignCast(@ptrCast(body)));
|
||||
return @as(*Element, @ptrCast(body));
|
||||
}
|
||||
|
||||
pub inline fn documentHTMLSetBody(doc_html: *DocumentHTML, elt: ?*ElementHTML) !void {
|
||||
@@ -2401,7 +2319,7 @@ pub inline fn documentHTMLSetTitle(doc: *DocumentHTML, v: []const u8) !void {
|
||||
|
||||
pub fn documentHTMLSetCurrentScript(doc: *DocumentHTML, script: ?*Script) !void {
|
||||
var s: ?*ElementHTML = null;
|
||||
if (script != null) s = @alignCast(@ptrCast(script.?));
|
||||
if (script != null) s = @ptrCast(script.?);
|
||||
const err = documentHTMLVtable(doc).set_current_script.?(doc, s);
|
||||
try DOMErr(err);
|
||||
}
|
||||
@@ -2462,11 +2380,6 @@ pub fn textareaGetValue(textarea: *TextArea) ![]const u8 {
|
||||
return strToData(s);
|
||||
}
|
||||
|
||||
pub fn textareaSetValue(textarea: *TextArea, value: []const u8) !void {
|
||||
const err = c.dom_html_text_area_element_set_value(textarea, try strFromData(value));
|
||||
try DOMErr(err);
|
||||
}
|
||||
|
||||
// Select
|
||||
pub fn selectGetOptions(select: *Select) !*OptionCollection {
|
||||
var collection: ?*OptionCollection = null;
|
||||
@@ -2574,6 +2487,14 @@ pub fn optionSetSelected(option: *Option, selected: bool) !void {
|
||||
try DOMErr(err);
|
||||
}
|
||||
|
||||
// Input
|
||||
pub fn inputGetChecked(input: *Input) !bool {
|
||||
var b: bool = false;
|
||||
const err = c.dom_html_input_element_get_checked(input, &b);
|
||||
try DOMErr(err);
|
||||
return b;
|
||||
}
|
||||
|
||||
// HtmlCollection
|
||||
pub fn htmlCollectionGetLength(collection: *HTMLCollection) !u32 {
|
||||
var len: u32 = 0;
|
||||
@@ -2669,191 +2590,3 @@ pub fn imageSetIsMap(image: *Image, is_map: bool) !void {
|
||||
const err = c.dom_html_image_element_set_is_map(image, is_map);
|
||||
try DOMErr(err);
|
||||
}
|
||||
|
||||
// Input
|
||||
// - Input.align is deprecated
|
||||
// - Input.useMap is deprecated
|
||||
// - HTMLElement.access_key
|
||||
// - HTMLElement.tabIndex
|
||||
// TODO methods:
|
||||
// - HTMLElement.blur
|
||||
// - HTMLElement.focus
|
||||
// - select
|
||||
// - HTMLElement.click
|
||||
|
||||
pub fn inputGetDefaultValue(input: *Input) ![]const u8 {
|
||||
var s_: ?*String = null;
|
||||
const err = c.dom_html_input_element_get_default_value(input, &s_);
|
||||
try DOMErr(err);
|
||||
const s = s_ orelse return "";
|
||||
return strToData(s);
|
||||
}
|
||||
pub fn inputSetDefaultValue(input: *Input, default_value: []const u8) !void {
|
||||
const err = c.dom_html_input_element_set_default_value(input, try strFromData(default_value));
|
||||
try DOMErr(err);
|
||||
}
|
||||
|
||||
pub fn inputGetDefaultChecked(input: *Input) !bool {
|
||||
var default_checked: bool = false;
|
||||
const err = c.dom_html_input_element_get_default_checked(input, &default_checked);
|
||||
try DOMErr(err);
|
||||
return default_checked;
|
||||
}
|
||||
pub fn inputSetDefaultChecked(input: *Input, default_checked: bool) !void {
|
||||
const err = c.dom_html_input_element_set_default_checked(input, default_checked);
|
||||
try DOMErr(err);
|
||||
}
|
||||
|
||||
pub fn inputGetForm(input: *Input) !?*Form {
|
||||
var form: ?*Form = null;
|
||||
const err = c.dom_html_input_element_get_form(input, &form);
|
||||
try DOMErr(err);
|
||||
return form;
|
||||
}
|
||||
|
||||
pub fn inputGetAccept(input: *Input) ![]const u8 {
|
||||
var s_: ?*String = null;
|
||||
const err = c.dom_html_input_element_get_accept(input, &s_);
|
||||
try DOMErr(err);
|
||||
const s = s_ orelse return "";
|
||||
return strToData(s);
|
||||
}
|
||||
pub fn inputSetAccept(input: *Input, accept: []const u8) !void {
|
||||
const err = c.dom_html_input_element_set_accept(input, try strFromData(accept));
|
||||
try DOMErr(err);
|
||||
}
|
||||
|
||||
pub fn inputGetAlt(input: *Input) ![]const u8 {
|
||||
var s_: ?*String = null;
|
||||
const err = c.dom_html_input_element_get_alt(input, &s_);
|
||||
try DOMErr(err);
|
||||
const s = s_ orelse return "";
|
||||
return strToData(s);
|
||||
}
|
||||
pub fn inputSetAlt(input: *Input, alt: []const u8) !void {
|
||||
const err = c.dom_html_input_element_set_alt(input, try strFromData(alt));
|
||||
try DOMErr(err);
|
||||
}
|
||||
|
||||
pub fn inputGetChecked(input: *Input) !bool {
|
||||
var checked: bool = false;
|
||||
const err = c.dom_html_input_element_get_checked(input, &checked);
|
||||
try DOMErr(err);
|
||||
return checked;
|
||||
}
|
||||
pub fn inputSetChecked(input: *Input, checked: bool) !void {
|
||||
const err = c.dom_html_input_element_set_checked(input, checked);
|
||||
try DOMErr(err);
|
||||
}
|
||||
|
||||
pub fn inputGetDisabled(input: *Input) !bool {
|
||||
var disabled: bool = false;
|
||||
const err = c.dom_html_input_element_get_disabled(input, &disabled);
|
||||
try DOMErr(err);
|
||||
return disabled;
|
||||
}
|
||||
pub fn inputSetDisabled(input: *Input, disabled: bool) !void {
|
||||
const err = c.dom_html_input_element_set_disabled(input, disabled);
|
||||
try DOMErr(err);
|
||||
}
|
||||
|
||||
pub fn inputGetMaxLength(input: *Input) !i32 {
|
||||
var max_length: i32 = 0;
|
||||
const err = c.dom_html_input_element_get_max_length(input, &max_length);
|
||||
try DOMErr(err);
|
||||
return max_length;
|
||||
}
|
||||
pub fn inputSetMaxLength(input: *Input, max_length: i32) !void {
|
||||
if (max_length < 0) return error.NegativeValueNotAllowed;
|
||||
const err = c.dom_html_input_element_set_max_length(input, @intCast(max_length));
|
||||
try DOMErr(err);
|
||||
}
|
||||
|
||||
pub fn inputGetName(input: *Input) ![]const u8 {
|
||||
var s_: ?*String = null;
|
||||
const err = c.dom_html_input_element_get_name(input, &s_);
|
||||
try DOMErr(err);
|
||||
const s = s_ orelse return "";
|
||||
return strToData(s);
|
||||
}
|
||||
pub fn inputSetName(input: *Input, name: []const u8) !void {
|
||||
const err = c.dom_html_input_element_set_name(input, try strFromData(name));
|
||||
try DOMErr(err);
|
||||
}
|
||||
pub fn inputGetReadOnly(input: *Input) !bool {
|
||||
var read_only: bool = false;
|
||||
const err = c.dom_html_input_element_get_read_only(input, &read_only);
|
||||
try DOMErr(err);
|
||||
return read_only;
|
||||
}
|
||||
pub fn inputSetReadOnly(input: *Input, read_only: bool) !void {
|
||||
const err = c.dom_html_input_element_set_read_only(input, read_only);
|
||||
try DOMErr(err);
|
||||
}
|
||||
pub fn inputGetSize(input: *Input) !u32 {
|
||||
var size: u32 = 0;
|
||||
const err = c.dom_html_input_element_get_size(input, &size);
|
||||
try DOMErr(err);
|
||||
if (size == ulongNegativeOne) return 20; // 20
|
||||
return size;
|
||||
}
|
||||
pub fn inputSetSize(input: *Input, size: i32) !void {
|
||||
if (size == 0) return error.ZeroNotAllowed;
|
||||
const new_size = if (size < 0) 20 else size;
|
||||
const err = c.dom_html_input_element_set_size(input, @intCast(new_size));
|
||||
try DOMErr(err);
|
||||
}
|
||||
|
||||
pub fn inputGetSrc(input: *Input) ![]const u8 {
|
||||
var s_: ?*String = null;
|
||||
const err = c.dom_html_input_element_get_src(input, &s_);
|
||||
try DOMErr(err);
|
||||
const s = s_ orelse return "";
|
||||
return strToData(s);
|
||||
}
|
||||
// url should already be stitched!
|
||||
pub fn inputSetSrc(input: *Input, src: []const u8) !void {
|
||||
const err = c.dom_html_input_element_set_src(input, try strFromData(src));
|
||||
try DOMErr(err);
|
||||
}
|
||||
|
||||
pub fn inputGetType(input: *Input) ![]const u8 {
|
||||
var s_: ?*String = null;
|
||||
const err = c.dom_html_input_element_get_type(input, &s_);
|
||||
try DOMErr(err);
|
||||
const s = s_ orelse return "text";
|
||||
return strToData(s);
|
||||
}
|
||||
pub fn inputSetType(input: *Input, type_: []const u8) !void {
|
||||
// @speed sort values by usage frequency/length
|
||||
const possible_values = [_][]const u8{ "text", "search", "tel", "url", "email", "password", "date", "month", "week", "time", "datetime-local", "number", "range", "color", "checkbox", "radio", "file", "hidden", "image", "button", "submit", "reset" };
|
||||
var found = false;
|
||||
for (possible_values) |item| {
|
||||
if (std.mem.eql(u8, type_, item)) {
|
||||
found = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
const new_type = if (found) type_ else "text";
|
||||
try elementSetAttribute(@alignCast(@ptrCast(input)), "type", new_type);
|
||||
}
|
||||
|
||||
pub fn inputGetValue(input: *Input) ![]const u8 {
|
||||
var s_: ?*String = null;
|
||||
const err = c.dom_html_input_element_get_value(input, &s_);
|
||||
try DOMErr(err);
|
||||
const s = s_ orelse return "";
|
||||
return strToData(s);
|
||||
}
|
||||
pub fn inputSetValue(input: *Input, value: []const u8) !void {
|
||||
const err = c.dom_html_input_element_set_value(input, try strFromData(value));
|
||||
try DOMErr(err);
|
||||
}
|
||||
|
||||
pub fn buttonGetType(button: *Button) ![]const u8 {
|
||||
var s_: ?*String = null;
|
||||
const err = c.dom_html_button_element_get_type(button, &s_);
|
||||
try DOMErr(err);
|
||||
const s = s_ orelse return "button";
|
||||
return strToData(s);
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -16,7 +16,7 @@ test "Browser.fetch" {
|
||||
var runner = try testing.jsRunner(testing.tracking_allocator, .{});
|
||||
defer runner.deinit();
|
||||
|
||||
try @import("polyfill.zig").load(testing.allocator, runner.page.main_context);
|
||||
try @import("polyfill.zig").load(testing.allocator, runner.scope);
|
||||
|
||||
try runner.testCases(&.{
|
||||
.{
|
||||
|
||||
@@ -19,10 +19,11 @@
|
||||
const std = @import("std");
|
||||
const builtin = @import("builtin");
|
||||
|
||||
const log = @import("../../log.zig");
|
||||
const Allocator = std.mem.Allocator;
|
||||
const Env = @import("../env.zig").Env;
|
||||
|
||||
const log = std.log.scoped(.polyfill);
|
||||
|
||||
const modules = [_]struct {
|
||||
name: []const u8,
|
||||
source: []const u8,
|
||||
@@ -30,18 +31,24 @@ const modules = [_]struct {
|
||||
.{ .name = "polyfill-fetch", .source = @import("fetch.zig").source },
|
||||
};
|
||||
|
||||
pub fn load(allocator: Allocator, js_context: *Env.JsContext) !void {
|
||||
pub fn load(allocator: Allocator, scope: *Env.Scope) !void {
|
||||
var try_catch: Env.TryCatch = undefined;
|
||||
try_catch.init(js_context);
|
||||
try_catch.init(scope);
|
||||
defer try_catch.deinit();
|
||||
|
||||
for (modules) |m| {
|
||||
_ = js_context.exec(m.source, m.name) catch |err| {
|
||||
const res = scope.exec(m.source, m.name) catch |err| {
|
||||
if (try try_catch.err(allocator)) |msg| {
|
||||
defer allocator.free(msg);
|
||||
log.fatal(.app, "polyfill error", .{ .name = m.name, .err = msg });
|
||||
log.err("load {s}: {s}", .{ m.name, msg });
|
||||
}
|
||||
return err;
|
||||
};
|
||||
|
||||
if (builtin.mode == .Debug) {
|
||||
const msg = try res.toString(allocator);
|
||||
defer allocator.free(msg);
|
||||
log.debug("load {s}: {s}", .{ m.name, msg });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -22,14 +22,13 @@ const Allocator = std.mem.Allocator;
|
||||
|
||||
const Env = @import("env.zig").Env;
|
||||
const Page = @import("page.zig").Page;
|
||||
const URL = @import("../url.zig").URL;
|
||||
const Browser = @import("browser.zig").Browser;
|
||||
const NavigateOpts = @import("page.zig").NavigateOpts;
|
||||
|
||||
const log = @import("../log.zig");
|
||||
const parser = @import("netsurf.zig");
|
||||
const storage = @import("storage/storage.zig");
|
||||
|
||||
const log = std.log.scoped(.session);
|
||||
|
||||
// Session is like a browser's tab.
|
||||
// It owns the js env and the loader for all the pages of the session.
|
||||
// You can create successively multiple pages for a session, but you must
|
||||
@@ -85,20 +84,19 @@ pub const Session = struct {
|
||||
pub fn createPage(self: *Session) !*Page {
|
||||
std.debug.assert(self.page == null);
|
||||
|
||||
const page_arena = &self.browser.page_arena;
|
||||
_ = page_arena.reset(.{ .retain_with_limit = 1 * 1024 * 1024 });
|
||||
_ = self.browser.state_pool.reset(.{ .retain_with_limit = 4 * 1024 });
|
||||
|
||||
// Start netsurf memory arena.
|
||||
// We need to init this early as JS event handlers may be registered through Runtime.evaluate before the first html doc is loaded
|
||||
try parser.init(page_arena.allocator());
|
||||
try parser.init();
|
||||
|
||||
const page_arena = &self.browser.page_arena;
|
||||
_ = page_arena.reset(.{ .retain_with_limit = 1 * 1024 * 1024 });
|
||||
|
||||
self.page = @as(Page, undefined);
|
||||
const page = &self.page.?;
|
||||
try Page.init(page, page_arena.allocator(), self);
|
||||
|
||||
log.debug(.browser, "create page", .{});
|
||||
// start JS env
|
||||
log.debug("start new js scope", .{});
|
||||
// Inform CDP the main page has been created such that additional context for other Worlds can be created as well
|
||||
self.browser.notification.dispatch(.page_created, page);
|
||||
|
||||
@@ -110,33 +108,35 @@ pub const Session = struct {
|
||||
self.browser.notification.dispatch(.page_remove, .{});
|
||||
|
||||
std.debug.assert(self.page != null);
|
||||
|
||||
// Cleanup is a bit sensitive. We could still have inflight I/O. For
|
||||
// example, we could have an XHR request which is still in the connect
|
||||
// phase. It's important that we clean these up, as they're holding onto
|
||||
// limited resources (like our fixed-sized http state pool).
|
||||
//
|
||||
// First thing we do, is removeJsContext() which will execute the destructor
|
||||
// of any type that registered a destructor (e.g. XMLHttpRequest).
|
||||
// This will shutdown any pending sockets, which begins our cleaning
|
||||
// processed
|
||||
self.executor.removeJsContext();
|
||||
|
||||
// Second thing we do is reset the loop. This increments the loop ctx_id
|
||||
// so that any "stale" timeouts we process will get ignored. We need to
|
||||
// do this BEFORE running the loop because, at this point, things like
|
||||
// window.setTimeout and running microtasks should be ignored
|
||||
// Reset all existing callbacks.
|
||||
self.browser.app.loop.reset();
|
||||
|
||||
self.executor.endScope();
|
||||
self.page = null;
|
||||
|
||||
// clear netsurf memory arena.
|
||||
parser.deinit();
|
||||
|
||||
log.debug(.browser, "remove page", .{});
|
||||
}
|
||||
|
||||
pub fn currentPage(self: *Session) ?*Page {
|
||||
return &(self.page orelse return null);
|
||||
}
|
||||
|
||||
pub fn pageNavigate(self: *Session, url_string: []const u8) !void {
|
||||
// currently, this is only called from the page, so let's hope
|
||||
// it isn't null!
|
||||
std.debug.assert(self.page != null);
|
||||
|
||||
defer _ = self.browser.transfer_arena.reset(.{ .retain_with_limit = 1 * 1024 * 1024 });
|
||||
|
||||
// it's safe to use the transfer arena here, because the page will
|
||||
// eventually clone the URL using its own page_arena (after it gets
|
||||
// the final URL, possibly following redirects)
|
||||
const url = try self.page.?.url.resolve(self.transfer_arena, url_string);
|
||||
|
||||
self.removePage();
|
||||
var page = try self.createPage();
|
||||
return page.navigate(url, .{
|
||||
.reason = .anchor,
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
@@ -3,11 +3,12 @@ const Uri = std.Uri;
|
||||
const Allocator = std.mem.Allocator;
|
||||
const ArenaAllocator = std.heap.ArenaAllocator;
|
||||
|
||||
const log = @import("../../log.zig");
|
||||
const http = @import("../../http/client.zig");
|
||||
const DateTime = @import("../../datetime.zig").DateTime;
|
||||
const public_suffix_list = @import("../../data/public_suffix_list.zig").lookup;
|
||||
|
||||
const log = std.log.scoped(.cookie);
|
||||
|
||||
pub const LookupOpts = struct {
|
||||
request_time: ?i64 = null,
|
||||
origin_uri: ?*const Uri = null,
|
||||
@@ -155,7 +156,7 @@ pub const Jar = struct {
|
||||
var it = header.iterate("set-cookie");
|
||||
while (it.next()) |set_cookie| {
|
||||
const c = Cookie.parse(self.allocator, uri, set_cookie) catch |err| {
|
||||
log.warn(.web_api, "cookie parse failed", .{ .raw = set_cookie, .err = err });
|
||||
log.warn("Couldn't parse cookie '{s}': {}\n", .{ set_cookie, err });
|
||||
continue;
|
||||
};
|
||||
try self.add(c, now);
|
||||
@@ -357,7 +358,7 @@ pub const Cookie = struct {
|
||||
value = value[1..];
|
||||
}
|
||||
|
||||
if (std.mem.indexOfScalarPos(u8, value, 0, '.') == null and std.ascii.eqlIgnoreCase("localhost", value) == false) {
|
||||
if (std.mem.indexOfScalarPos(u8, value, 0, '.') == null) {
|
||||
// can't set a cookie for a TLD
|
||||
return error.InvalidDomain;
|
||||
}
|
||||
@@ -838,17 +839,6 @@ test "Cookie: parse all" {
|
||||
.domain = ".lightpanda.io",
|
||||
.expires = std.time.timestamp() + 30,
|
||||
}, "https://lightpanda.io/cms/users", "user-id=9000; HttpOnly; Max-Age=30; Secure; path=/; Domain=lightpanda.io");
|
||||
|
||||
try expectCookie(.{
|
||||
.name = "app_session",
|
||||
.value = "123",
|
||||
.path = "/",
|
||||
.http_only = true,
|
||||
.secure = false,
|
||||
.domain = ".localhost",
|
||||
.same_site = .lax,
|
||||
.expires = std.time.timestamp() + 7200,
|
||||
}, "http://localhost:8000/login", "app_session=123; Max-Age=7200; path=/; domain=localhost; httponly; samesite=lax");
|
||||
}
|
||||
|
||||
test "Cookie: parse domain" {
|
||||
@@ -859,8 +849,6 @@ test "Cookie: parse domain" {
|
||||
try expectAttribute(.{ .domain = ".dev.lightpanda.io" }, "http://dev.lightpanda.io/", "b;domain=dev.lightpanda.io");
|
||||
try expectAttribute(.{ .domain = ".lightpanda.io" }, "http://dev.lightpanda.io/", "b;domain=lightpanda.io");
|
||||
try expectAttribute(.{ .domain = ".lightpanda.io" }, "http://dev.lightpanda.io/", "b;domain=.lightpanda.io");
|
||||
try expectAttribute(.{ .domain = ".localhost" }, "http://localhost/", "b;domain=localhost");
|
||||
try expectAttribute(.{ .domain = ".localhost" }, "http://localhost/", "b;domain=.localhost");
|
||||
|
||||
try expectError(error.InvalidDomain, "http://lightpanda.io/", "b;domain=io");
|
||||
try expectError(error.InvalidDomain, "http://lightpanda.io/", "b;domain=.io");
|
||||
|
||||
@@ -20,6 +20,8 @@ const std = @import("std");
|
||||
|
||||
const DOMError = @import("../netsurf.zig").DOMError;
|
||||
|
||||
const log = std.log.scoped(.storage);
|
||||
|
||||
pub const cookie = @import("cookie.zig");
|
||||
pub const Cookie = cookie.Cookie;
|
||||
pub const CookieJar = cookie.Jar;
|
||||
@@ -147,7 +149,10 @@ pub const Bottle = struct {
|
||||
}
|
||||
|
||||
pub fn _setItem(self: *Bottle, k: []const u8, v: []const u8) !void {
|
||||
const gop = try self.map.getOrPut(self.alloc, k);
|
||||
const gop = self.map.getOrPut(self.alloc, k) catch |e| {
|
||||
log.debug("set item: {any}", .{e});
|
||||
return DOMError.QuotaExceeded;
|
||||
};
|
||||
|
||||
if (gop.found_existing == false) {
|
||||
gop.key_ptr.* = try self.alloc.dupe(u8, k);
|
||||
|
||||
433
src/browser/url/query.zig
Normal file
433
src/browser/url/query.zig
Normal file
@@ -0,0 +1,433 @@
|
||||
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
|
||||
//
|
||||
// Francis Bouvier <francis@lightpanda.io>
|
||||
// Pierre Tachoire <pierre@lightpanda.io>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU Affero General Public License as
|
||||
// published by the Free Software Foundation, either version 3 of the
|
||||
// License, or (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU Affero General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU Affero General Public License
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
const std = @import("std");
|
||||
|
||||
const Reader = @import("../../str/parser.zig").Reader;
|
||||
const asUint = @import("../../str/parser.zig").asUint;
|
||||
|
||||
// Values is a map with string key of string values.
|
||||
pub const Values = struct {
|
||||
arena: std.heap.ArenaAllocator,
|
||||
map: std.StringArrayHashMapUnmanaged(List),
|
||||
|
||||
const List = std.ArrayListUnmanaged([]const u8);
|
||||
|
||||
pub fn init(allocator: std.mem.Allocator) Values {
|
||||
return .{
|
||||
.map = .{},
|
||||
.arena = std.heap.ArenaAllocator.init(allocator),
|
||||
};
|
||||
}
|
||||
|
||||
pub fn deinit(self: *Values) void {
|
||||
self.arena.deinit();
|
||||
}
|
||||
|
||||
// add the key value couple to the values.
|
||||
// the key and the value are duplicated.
|
||||
pub fn append(self: *Values, k: []const u8, v: []const u8) !void {
|
||||
const allocator = self.arena.allocator();
|
||||
const owned_value = try allocator.dupe(u8, v);
|
||||
|
||||
var gop = try self.map.getOrPut(allocator, k);
|
||||
if (gop.found_existing) {
|
||||
return gop.value_ptr.append(allocator, owned_value);
|
||||
}
|
||||
|
||||
gop.key_ptr.* = try allocator.dupe(u8, k);
|
||||
|
||||
var list = List{};
|
||||
try list.append(allocator, owned_value);
|
||||
gop.value_ptr.* = list;
|
||||
}
|
||||
|
||||
// append by taking the ownership of the key and the value
|
||||
fn appendOwned(self: *Values, k: []const u8, v: []const u8) !void {
|
||||
const allocator = self.arena.allocator();
|
||||
var gop = try self.map.getOrPut(allocator, k);
|
||||
if (gop.found_existing) {
|
||||
return gop.value_ptr.append(allocator, v);
|
||||
}
|
||||
|
||||
var list = List{};
|
||||
try list.append(allocator, v);
|
||||
gop.value_ptr.* = list;
|
||||
}
|
||||
|
||||
pub fn get(self: *const Values, k: []const u8) []const []const u8 {
|
||||
if (self.map.get(k)) |list| {
|
||||
return list.items;
|
||||
}
|
||||
|
||||
return &[_][]const u8{};
|
||||
}
|
||||
|
||||
pub fn first(self: *const Values, k: []const u8) []const u8 {
|
||||
if (self.map.getPtr(k)) |list| {
|
||||
if (list.items.len == 0) return "";
|
||||
return list.items[0];
|
||||
}
|
||||
|
||||
return "";
|
||||
}
|
||||
|
||||
pub fn delete(self: *Values, k: []const u8) void {
|
||||
_ = self.map.fetchSwapRemove(k);
|
||||
}
|
||||
|
||||
pub fn deleteValue(self: *Values, k: []const u8, v: []const u8) void {
|
||||
const list = self.map.getPtr(k) orelse return;
|
||||
|
||||
for (list.items, 0..) |vv, i| {
|
||||
if (std.mem.eql(u8, v, vv)) {
|
||||
_ = list.swapRemove(i);
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn count(self: *const Values) usize {
|
||||
return self.map.count();
|
||||
}
|
||||
|
||||
pub fn encode(self: *const Values, writer: anytype) !void {
|
||||
var it = self.map.iterator();
|
||||
|
||||
const first_entry = it.next() orelse return;
|
||||
try encodeKeyValues(first_entry, writer);
|
||||
|
||||
while (it.next()) |entry| {
|
||||
try writer.writeByte('&');
|
||||
try encodeKeyValues(entry, writer);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
fn encodeKeyValues(entry: anytype, writer: anytype) !void {
|
||||
const key = entry.key_ptr.*;
|
||||
|
||||
try escape(key, writer);
|
||||
const values = entry.value_ptr.items;
|
||||
if (values.len == 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (values[0].len > 0) {
|
||||
try writer.writeByte('=');
|
||||
try escape(values[0], writer);
|
||||
}
|
||||
|
||||
for (values[1..]) |value| {
|
||||
try writer.writeByte('&');
|
||||
try escape(key, writer);
|
||||
if (value.len > 0) {
|
||||
try writer.writeByte('=');
|
||||
try escape(value, writer);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn escape(raw: []const u8, writer: anytype) !void {
|
||||
var start: usize = 0;
|
||||
for (raw, 0..) |char, index| {
|
||||
if ('a' <= char and char <= 'z' or 'A' <= char and char <= 'Z' or '0' <= char and char <= '9') {
|
||||
continue;
|
||||
}
|
||||
|
||||
try writer.print("{s}%{X:0>2}", .{ raw[start..index], char });
|
||||
start = index + 1;
|
||||
}
|
||||
try writer.writeAll(raw[start..]);
|
||||
}
|
||||
|
||||
// Parse the given query.
|
||||
pub fn parseQuery(alloc: std.mem.Allocator, s: []const u8) !Values {
|
||||
var values = Values.init(alloc);
|
||||
errdefer values.deinit();
|
||||
|
||||
const arena = values.arena.allocator();
|
||||
|
||||
const ln = s.len;
|
||||
if (ln == 0) return values;
|
||||
|
||||
var r = Reader{ .data = s };
|
||||
while (true) {
|
||||
const param = r.until('&');
|
||||
if (param.len == 0) break;
|
||||
|
||||
var rr = Reader{ .data = param };
|
||||
const k = rr.until('=');
|
||||
if (k.len == 0) continue;
|
||||
|
||||
_ = rr.skip();
|
||||
const v = rr.tail();
|
||||
|
||||
// decode k and v
|
||||
const kk = try unescape(arena, k);
|
||||
const vv = try unescape(arena, v);
|
||||
|
||||
try values.appendOwned(kk, vv);
|
||||
|
||||
if (!r.skip()) break;
|
||||
}
|
||||
|
||||
return values;
|
||||
}
|
||||
|
||||
// The return'd string may or may not be allocated. Callers should use arenas
|
||||
fn unescape(allocator: std.mem.Allocator, input: []const u8) ![]const u8 {
|
||||
const HEX_CHAR = comptime blk: {
|
||||
var all = std.mem.zeroes([256]bool);
|
||||
for ('a'..('f' + 1)) |b| all[b] = true;
|
||||
for ('A'..('F' + 1)) |b| all[b] = true;
|
||||
for ('0'..('9' + 1)) |b| all[b] = true;
|
||||
break :blk all;
|
||||
};
|
||||
|
||||
const HEX_DECODE = comptime blk: {
|
||||
var all = std.mem.zeroes([256]u8);
|
||||
for ('a'..('z' + 1)) |b| all[b] = b - 'a' + 10;
|
||||
for ('A'..('Z' + 1)) |b| all[b] = b - 'A' + 10;
|
||||
for ('0'..('9' + 1)) |b| all[b] = b - '0';
|
||||
break :blk all;
|
||||
};
|
||||
|
||||
var has_plus = false;
|
||||
var unescaped_len = input.len;
|
||||
|
||||
{
|
||||
// Figure out if we have any spaces and what the final unescaped length
|
||||
// will be (which will let us know if we have anything to unescape in
|
||||
// the first place)
|
||||
var i: usize = 0;
|
||||
while (i < input.len) {
|
||||
const c = input[i];
|
||||
if (c == '%') {
|
||||
if (i + 2 >= input.len or !HEX_CHAR[input[i + 1]] or !HEX_CHAR[input[i + 2]]) {
|
||||
return error.EscapeError;
|
||||
}
|
||||
i += 3;
|
||||
unescaped_len -= 2;
|
||||
} else if (c == '+') {
|
||||
has_plus = true;
|
||||
i += 1;
|
||||
} else {
|
||||
i += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// no encoding, and no plus. nothing to unescape
|
||||
if (unescaped_len == input.len and has_plus == false) {
|
||||
return input;
|
||||
}
|
||||
|
||||
var unescaped = try allocator.alloc(u8, unescaped_len);
|
||||
errdefer allocator.free(unescaped);
|
||||
|
||||
var input_pos: usize = 0;
|
||||
for (0..unescaped_len) |unescaped_pos| {
|
||||
switch (input[input_pos]) {
|
||||
'+' => {
|
||||
unescaped[unescaped_pos] = ' ';
|
||||
input_pos += 1;
|
||||
},
|
||||
'%' => {
|
||||
const encoded = input[input_pos + 1 .. input_pos + 3];
|
||||
const encoded_as_uint = @as(u16, @bitCast(encoded[0..2].*));
|
||||
unescaped[unescaped_pos] = switch (encoded_as_uint) {
|
||||
asUint("20") => ' ',
|
||||
asUint("21") => '!',
|
||||
asUint("22") => '"',
|
||||
asUint("23") => '#',
|
||||
asUint("24") => '$',
|
||||
asUint("25") => '%',
|
||||
asUint("26") => '&',
|
||||
asUint("27") => '\'',
|
||||
asUint("28") => '(',
|
||||
asUint("29") => ')',
|
||||
asUint("2A") => '*',
|
||||
asUint("2B") => '+',
|
||||
asUint("2C") => ',',
|
||||
asUint("2F") => '/',
|
||||
asUint("3A") => ':',
|
||||
asUint("3B") => ';',
|
||||
asUint("3D") => '=',
|
||||
asUint("3F") => '?',
|
||||
asUint("40") => '@',
|
||||
asUint("5B") => '[',
|
||||
asUint("5D") => ']',
|
||||
else => HEX_DECODE[encoded[0]] << 4 | HEX_DECODE[encoded[1]],
|
||||
};
|
||||
input_pos += 3;
|
||||
},
|
||||
else => |c| {
|
||||
unescaped[unescaped_pos] = c;
|
||||
input_pos += 1;
|
||||
},
|
||||
}
|
||||
}
|
||||
return unescaped;
|
||||
}
|
||||
|
||||
const testing = std.testing;
|
||||
test "url.Query: unescape" {
|
||||
const allocator = testing.allocator;
|
||||
const cases = [_]struct { expected: []const u8, input: []const u8, free: bool }{
|
||||
.{ .expected = "", .input = "", .free = false },
|
||||
.{ .expected = "over", .input = "over", .free = false },
|
||||
.{ .expected = "Hello World", .input = "Hello World", .free = false },
|
||||
.{ .expected = "~", .input = "%7E", .free = true },
|
||||
.{ .expected = "~", .input = "%7e", .free = true },
|
||||
.{ .expected = "Hello~World", .input = "Hello%7eWorld", .free = true },
|
||||
.{ .expected = "Hello World", .input = "Hello++World", .free = true },
|
||||
};
|
||||
|
||||
for (cases) |case| {
|
||||
const value = try unescape(allocator, case.input);
|
||||
defer if (case.free) {
|
||||
allocator.free(value);
|
||||
};
|
||||
try testing.expectEqualStrings(case.expected, value);
|
||||
}
|
||||
|
||||
try testing.expectError(error.EscapeError, unescape(undefined, "%"));
|
||||
try testing.expectError(error.EscapeError, unescape(undefined, "%a"));
|
||||
try testing.expectError(error.EscapeError, unescape(undefined, "%1"));
|
||||
try testing.expectError(error.EscapeError, unescape(undefined, "123%45%6"));
|
||||
try testing.expectError(error.EscapeError, unescape(undefined, "%zzzzz"));
|
||||
try testing.expectError(error.EscapeError, unescape(undefined, "%0\xff"));
|
||||
}
|
||||
|
||||
test "url.Query: parseQuery" {
|
||||
try testParseQuery(.{}, "");
|
||||
|
||||
try testParseQuery(.{}, "&");
|
||||
|
||||
try testParseQuery(.{ .a = [_][]const u8{"b"} }, "a=b");
|
||||
|
||||
try testParseQuery(.{ .hello = [_][]const u8{"world"} }, "hello=world");
|
||||
|
||||
try testParseQuery(.{ .hello = [_][]const u8{ "world", "all" } }, "hello=world&hello=all");
|
||||
|
||||
try testParseQuery(.{
|
||||
.a = [_][]const u8{"b"},
|
||||
.b = [_][]const u8{"c"},
|
||||
}, "a=b&b=c");
|
||||
|
||||
try testParseQuery(.{ .a = [_][]const u8{""} }, "a");
|
||||
try testParseQuery(.{ .a = [_][]const u8{ "", "", "" } }, "a&a&a");
|
||||
|
||||
try testParseQuery(.{ .abc = [_][]const u8{""} }, "abc");
|
||||
try testParseQuery(.{
|
||||
.abc = [_][]const u8{""},
|
||||
.dde = [_][]const u8{ "", "" },
|
||||
}, "abc&dde&dde");
|
||||
|
||||
try testParseQuery(.{
|
||||
.@"power is >" = [_][]const u8{"9,000?"},
|
||||
}, "power%20is%20%3E=9%2C000%3F");
|
||||
}
|
||||
|
||||
test "url.Query.Values: get/first/count" {
|
||||
var values = Values.init(testing.allocator);
|
||||
defer values.deinit();
|
||||
|
||||
{
|
||||
// empty
|
||||
try testing.expectEqual(0, values.count());
|
||||
try testing.expectEqual(0, values.get("").len);
|
||||
try testing.expectEqualStrings("", values.first(""));
|
||||
try testing.expectEqual(0, values.get("key").len);
|
||||
try testing.expectEqualStrings("", values.first("key"));
|
||||
}
|
||||
|
||||
{
|
||||
// add 1 value => key
|
||||
try values.appendOwned("key", "value");
|
||||
try testing.expectEqual(1, values.count());
|
||||
try testing.expectEqual(1, values.get("key").len);
|
||||
try testing.expectEqualSlices(
|
||||
[]const u8,
|
||||
&.{"value"},
|
||||
values.get("key"),
|
||||
);
|
||||
try testing.expectEqualStrings("value", values.first("key"));
|
||||
}
|
||||
|
||||
{
|
||||
// add another value for the same key
|
||||
try values.appendOwned("key", "another");
|
||||
try testing.expectEqual(1, values.count());
|
||||
try testing.expectEqual(2, values.get("key").len);
|
||||
try testing.expectEqualSlices(
|
||||
[]const u8,
|
||||
&.{ "value", "another" },
|
||||
values.get("key"),
|
||||
);
|
||||
try testing.expectEqualStrings("value", values.first("key"));
|
||||
}
|
||||
|
||||
{
|
||||
// add a new key (and value)
|
||||
try values.appendOwned("over", "9000!");
|
||||
try testing.expectEqual(2, values.count());
|
||||
try testing.expectEqual(2, values.get("key").len);
|
||||
try testing.expectEqual(1, values.get("over").len);
|
||||
try testing.expectEqualSlices(
|
||||
[]const u8,
|
||||
&.{"9000!"},
|
||||
values.get("over"),
|
||||
);
|
||||
try testing.expectEqualStrings("9000!", values.first("over"));
|
||||
}
|
||||
}
|
||||
|
||||
test "url.Query.Values: encode" {
|
||||
var values = try parseQuery(
|
||||
testing.allocator,
|
||||
"hello=world&i%20will%20not%20fear=%3E%3E&a=b&a=c",
|
||||
);
|
||||
defer values.deinit();
|
||||
|
||||
var buf: std.ArrayListUnmanaged(u8) = .{};
|
||||
defer buf.deinit(testing.allocator);
|
||||
try values.encode(buf.writer(testing.allocator));
|
||||
try testing.expectEqualStrings(
|
||||
"hello=world&i%20will%20not%20fear=%3E%3E&a=b&a=c",
|
||||
buf.items,
|
||||
);
|
||||
}
|
||||
|
||||
fn testParseQuery(expected: anytype, query: []const u8) !void {
|
||||
var values = try parseQuery(testing.allocator, query);
|
||||
defer values.deinit();
|
||||
|
||||
var count: usize = 0;
|
||||
inline for (@typeInfo(@TypeOf(expected)).@"struct".fields) |f| {
|
||||
const actual = values.get(f.name);
|
||||
const expect = @field(expected, f.name);
|
||||
try testing.expectEqual(expect.len, actual.len);
|
||||
for (expect, actual) |e, a| {
|
||||
try testing.expectEqualStrings(e, a);
|
||||
}
|
||||
count += 1;
|
||||
}
|
||||
try testing.expectEqual(count, values.count());
|
||||
}
|
||||
@@ -17,23 +17,13 @@
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
const std = @import("std");
|
||||
const Allocator = std.mem.Allocator;
|
||||
const SessionState = @import("../env.zig").SessionState;
|
||||
|
||||
const parser = @import("../netsurf.zig");
|
||||
const Env = @import("../env.zig").Env;
|
||||
const Page = @import("../page.zig").Page;
|
||||
const FormData = @import("../xhr/form_data.zig").FormData;
|
||||
const HTMLElement = @import("../html/elements.zig").HTMLElement;
|
||||
|
||||
const kv = @import("../key_value.zig");
|
||||
const iterator = @import("../iterator/iterator.zig");
|
||||
const query = @import("query.zig");
|
||||
|
||||
pub const Interfaces = .{
|
||||
URL,
|
||||
URLSearchParams,
|
||||
KeyIterable,
|
||||
ValueIterable,
|
||||
EntryIterable,
|
||||
};
|
||||
|
||||
// https://url.spec.whatwg.org/#url
|
||||
@@ -48,47 +38,25 @@ pub const Interfaces = .{
|
||||
// allocatorate data, I should be able to retrieve the scheme + the following `:`
|
||||
// from rawuri.
|
||||
//
|
||||
// 2. The other way would be to copy the `std.Uri` code to have a dedicated
|
||||
// 2. The other way would bu to copy the `std.Uri` code to ahve a dedicated
|
||||
// parser including the characters we want for the web API.
|
||||
pub const URL = struct {
|
||||
uri: std.Uri,
|
||||
search_params: URLSearchParams,
|
||||
|
||||
const URLArg = union(enum) {
|
||||
url: *URL,
|
||||
element: *parser.ElementHTML,
|
||||
string: []const u8,
|
||||
pub fn constructor(
|
||||
url: []const u8,
|
||||
base: ?[]const u8,
|
||||
state: *SessionState,
|
||||
) !URL {
|
||||
const arena = state.arena;
|
||||
const raw = try std.mem.concat(arena, u8, &[_][]const u8{ url, base orelse "" });
|
||||
|
||||
fn toString(self: URLArg, arena: Allocator) !?[]const u8 {
|
||||
switch (self) {
|
||||
.string => |s| return s,
|
||||
.url => |url| return try url.toString(arena),
|
||||
.element => |e| return try parser.elementGetAttribute(@ptrCast(e), "href"),
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
pub fn constructor(url: URLArg, base: ?URLArg, page: *Page) !URL {
|
||||
const arena = page.arena;
|
||||
const url_str = try url.toString(arena) orelse return error.InvalidArgument;
|
||||
|
||||
var raw: ?[]const u8 = null;
|
||||
if (base) |b| {
|
||||
if (try b.toString(arena)) |bb| {
|
||||
raw = try @import("../../url.zig").URL.stitch(arena, url_str, bb, .{});
|
||||
}
|
||||
}
|
||||
|
||||
if (raw == null) {
|
||||
// if it was a URL, then it's already be owned by the arena
|
||||
raw = if (url == .url) url_str else try arena.dupe(u8, url_str);
|
||||
}
|
||||
|
||||
const uri = std.Uri.parse(raw.?) catch return error.TypeError;
|
||||
const uri = std.Uri.parse(raw) catch return error.TypeError;
|
||||
return init(arena, uri);
|
||||
}
|
||||
|
||||
pub fn init(arena: Allocator, uri: std.Uri) !URL {
|
||||
pub fn init(arena: std.mem.Allocator, uri: std.Uri) !URL {
|
||||
return .{
|
||||
.uri = uri,
|
||||
.search_params = try URLSearchParams.init(
|
||||
@@ -98,8 +66,8 @@ pub const URL = struct {
|
||||
};
|
||||
}
|
||||
|
||||
pub fn get_origin(self: *URL, page: *Page) ![]const u8 {
|
||||
var buf = std.ArrayList(u8).init(page.arena);
|
||||
pub fn get_origin(self: *URL, state: *SessionState) ![]const u8 {
|
||||
var buf = std.ArrayList(u8).init(state.arena);
|
||||
try self.uri.writeToStream(.{
|
||||
.scheme = true,
|
||||
.authentication = false,
|
||||
@@ -112,42 +80,37 @@ pub const URL = struct {
|
||||
}
|
||||
|
||||
// get_href returns the URL by writing all its components.
|
||||
pub fn get_href(self: *URL, page: *Page) ![]const u8 {
|
||||
return self.toString(page.arena);
|
||||
}
|
||||
// The query is replaced by a dump of search params.
|
||||
//
|
||||
pub fn get_href(self: *URL, state: *SessionState) ![]const u8 {
|
||||
const arena = state.arena;
|
||||
// retrieve the query search from search_params.
|
||||
const cur = self.uri.query;
|
||||
defer self.uri.query = cur;
|
||||
var q = std.ArrayList(u8).init(arena);
|
||||
try self.search_params.values.encode(q.writer());
|
||||
self.uri.query = .{ .percent_encoded = q.items };
|
||||
|
||||
pub fn _toString(self: *URL, page: *Page) ![]const u8 {
|
||||
return self.toString(page.arena);
|
||||
return try self.toString(arena);
|
||||
}
|
||||
|
||||
// format the url with all its components.
|
||||
pub fn toString(self: *const URL, arena: Allocator) ![]const u8 {
|
||||
var buf: std.ArrayListUnmanaged(u8) = .empty;
|
||||
pub fn toString(self: *URL, arena: std.mem.Allocator) ![]const u8 {
|
||||
var buf = std.ArrayList(u8).init(arena);
|
||||
|
||||
try self.uri.writeToStream(.{
|
||||
.scheme = true,
|
||||
.authentication = true,
|
||||
.authority = true,
|
||||
.path = uriComponentNullStr(self.uri.path).len > 0,
|
||||
}, buf.writer(arena));
|
||||
|
||||
if (self.search_params.get_size() > 0) {
|
||||
try buf.append(arena, '?');
|
||||
try self.search_params.write(buf.writer(arena));
|
||||
}
|
||||
|
||||
{
|
||||
const fragment = uriComponentNullStr(self.uri.fragment);
|
||||
if (fragment.len > 0) {
|
||||
try buf.append(arena, '#');
|
||||
try buf.appendSlice(arena, fragment);
|
||||
}
|
||||
}
|
||||
|
||||
.query = uriComponentNullStr(self.uri.query).len > 0,
|
||||
.fragment = uriComponentNullStr(self.uri.fragment).len > 0,
|
||||
}, buf.writer());
|
||||
return buf.items;
|
||||
}
|
||||
|
||||
pub fn get_protocol(self: *URL, page: *Page) ![]const u8 {
|
||||
return try std.mem.concat(page.arena, u8, &[_][]const u8{ self.uri.scheme, ":" });
|
||||
pub fn get_protocol(self: *URL, state: *SessionState) ![]const u8 {
|
||||
return try std.mem.concat(state.arena, u8, &[_][]const u8{ self.uri.scheme, ":" });
|
||||
}
|
||||
|
||||
pub fn get_username(self: *URL) []const u8 {
|
||||
@@ -158,8 +121,8 @@ pub const URL = struct {
|
||||
return uriComponentNullStr(self.uri.password);
|
||||
}
|
||||
|
||||
pub fn get_host(self: *URL, page: *Page) ![]const u8 {
|
||||
var buf = std.ArrayList(u8).init(page.arena);
|
||||
pub fn get_host(self: *URL, state: *SessionState) ![]const u8 {
|
||||
var buf = std.ArrayList(u8).init(state.arena);
|
||||
|
||||
try self.uri.writeToStream(.{
|
||||
.scheme = false,
|
||||
@@ -176,8 +139,8 @@ pub const URL = struct {
|
||||
return uriComponentNullStr(self.uri.host);
|
||||
}
|
||||
|
||||
pub fn get_port(self: *URL, page: *Page) ![]const u8 {
|
||||
const arena = page.arena;
|
||||
pub fn get_port(self: *URL, state: *SessionState) ![]const u8 {
|
||||
const arena = state.arena;
|
||||
if (self.uri.port == null) return try arena.dupe(u8, "");
|
||||
|
||||
var buf = std.ArrayList(u8).init(arena);
|
||||
@@ -190,28 +153,19 @@ pub const URL = struct {
|
||||
return uriComponentStr(self.uri.path);
|
||||
}
|
||||
|
||||
pub fn get_search(self: *URL, page: *Page) ![]const u8 {
|
||||
const arena = page.arena;
|
||||
|
||||
if (self.search_params.get_size() == 0) {
|
||||
return "";
|
||||
}
|
||||
pub fn get_search(self: *URL, state: *SessionState) ![]const u8 {
|
||||
const arena = state.arena;
|
||||
if (self.search_params.get_size() == 0) return try arena.dupe(u8, "");
|
||||
|
||||
var buf: std.ArrayListUnmanaged(u8) = .{};
|
||||
|
||||
try buf.append(arena, '?');
|
||||
try self.search_params.encode(buf.writer(arena));
|
||||
try self.search_params.values.encode(buf.writer(arena));
|
||||
return buf.items;
|
||||
}
|
||||
|
||||
pub fn set_search(self: *URL, qs_: ?[]const u8, page: *Page) !void {
|
||||
self.search_params = .{};
|
||||
if (qs_) |qs| {
|
||||
self.search_params = try URLSearchParams.init(page.arena, qs);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_hash(self: *URL, page: *Page) ![]const u8 {
|
||||
const arena = page.arena;
|
||||
pub fn get_hash(self: *URL, state: *SessionState) ![]const u8 {
|
||||
const arena = state.arena;
|
||||
if (self.uri.fragment == null) return try arena.dupe(u8, "");
|
||||
|
||||
return try std.mem.concat(arena, u8, &[_][]const u8{ "#", uriComponentNullStr(self.uri.fragment) });
|
||||
@@ -221,8 +175,8 @@ pub const URL = struct {
|
||||
return &self.search_params;
|
||||
}
|
||||
|
||||
pub fn _toJSON(self: *URL, page: *Page) ![]const u8 {
|
||||
return self.get_href(page);
|
||||
pub fn _toJSON(self: *URL, state: *SessionState) ![]const u8 {
|
||||
return try self.get_href(state);
|
||||
}
|
||||
};
|
||||
|
||||
@@ -242,250 +196,47 @@ fn uriComponentStr(c: std.Uri.Component) []const u8 {
|
||||
}
|
||||
|
||||
// https://url.spec.whatwg.org/#interface-urlsearchparams
|
||||
// TODO array like
|
||||
pub const URLSearchParams = struct {
|
||||
entries: kv.List = .{},
|
||||
values: query.Values,
|
||||
|
||||
const URLSearchParamsOpts = union(enum) {
|
||||
qs: []const u8,
|
||||
form_data: *const FormData,
|
||||
js_obj: Env.JsObject,
|
||||
};
|
||||
pub fn constructor(opts_: ?URLSearchParamsOpts, page: *Page) !URLSearchParams {
|
||||
const opts = opts_ orelse return .{ .entries = .{} };
|
||||
return switch (opts) {
|
||||
.qs => |qs| init(page.arena, qs),
|
||||
.form_data => |fd| .{ .entries = try fd.entries.clone(page.arena) },
|
||||
.js_obj => |js_obj| {
|
||||
const arena = page.arena;
|
||||
var it = js_obj.nameIterator();
|
||||
|
||||
var entries: kv.List = .{};
|
||||
try entries.ensureTotalCapacity(arena, it.count);
|
||||
|
||||
while (try it.next()) |js_name| {
|
||||
const name = try js_name.toString(arena);
|
||||
const js_val = try js_obj.get(name);
|
||||
entries.appendOwnedAssumeCapacity(
|
||||
name,
|
||||
try js_val.toString(arena),
|
||||
);
|
||||
}
|
||||
|
||||
return .{ .entries = entries };
|
||||
},
|
||||
};
|
||||
pub fn constructor(qs: ?[]const u8, state: *SessionState) !URLSearchParams {
|
||||
return init(state.arena, qs);
|
||||
}
|
||||
|
||||
pub fn init(arena: Allocator, qs_: ?[]const u8) !URLSearchParams {
|
||||
pub fn init(arena: std.mem.Allocator, qs: ?[]const u8) !URLSearchParams {
|
||||
return .{
|
||||
.entries = if (qs_) |qs| try parseQuery(arena, qs) else .{},
|
||||
.values = try query.parseQuery(arena, qs orelse ""),
|
||||
};
|
||||
}
|
||||
|
||||
pub fn get_size(self: *const URLSearchParams) u32 {
|
||||
return @intCast(self.entries.count());
|
||||
pub fn get_size(self: *URLSearchParams) u32 {
|
||||
return @intCast(self.values.count());
|
||||
}
|
||||
|
||||
pub fn _append(self: *URLSearchParams, name: []const u8, value: []const u8, page: *Page) !void {
|
||||
return self.entries.append(page.arena, name, value);
|
||||
pub fn _append(self: *URLSearchParams, name: []const u8, value: []const u8) !void {
|
||||
try self.values.append(name, value);
|
||||
}
|
||||
|
||||
pub fn _set(self: *URLSearchParams, name: []const u8, value: []const u8, page: *Page) !void {
|
||||
return self.entries.set(page.arena, name, value);
|
||||
pub fn _delete(self: *URLSearchParams, name: []const u8, value: ?[]const u8) !void {
|
||||
if (value) |v| return self.values.deleteValue(name, v);
|
||||
|
||||
self.values.delete(name);
|
||||
}
|
||||
|
||||
pub fn _delete(self: *URLSearchParams, name: []const u8, value_: ?[]const u8) void {
|
||||
if (value_) |value| {
|
||||
return self.entries.deleteKeyValue(name, value);
|
||||
}
|
||||
return self.entries.delete(name);
|
||||
pub fn _get(self: *URLSearchParams, name: []const u8) ?[]const u8 {
|
||||
return self.values.first(name);
|
||||
}
|
||||
|
||||
pub fn _get(self: *const URLSearchParams, name: []const u8) ?[]const u8 {
|
||||
return self.entries.get(name);
|
||||
}
|
||||
|
||||
pub fn _getAll(self: *const URLSearchParams, name: []const u8, page: *Page) ![]const []const u8 {
|
||||
return self.entries.getAll(page.call_arena, name);
|
||||
}
|
||||
|
||||
pub fn _has(self: *const URLSearchParams, name: []const u8) bool {
|
||||
return self.entries.has(name);
|
||||
}
|
||||
|
||||
pub fn _keys(self: *const URLSearchParams) KeyIterable {
|
||||
return .{ .inner = self.entries.keyIterator() };
|
||||
}
|
||||
|
||||
pub fn _values(self: *const URLSearchParams) ValueIterable {
|
||||
return .{ .inner = self.entries.valueIterator() };
|
||||
}
|
||||
|
||||
pub fn _entries(self: *const URLSearchParams) EntryIterable {
|
||||
return .{ .inner = self.entries.entryIterator() };
|
||||
}
|
||||
|
||||
pub fn _symbol_iterator(self: *const URLSearchParams) EntryIterable {
|
||||
return self._entries();
|
||||
}
|
||||
|
||||
pub fn _toString(self: *const URLSearchParams, page: *Page) ![]const u8 {
|
||||
var arr: std.ArrayListUnmanaged(u8) = .empty;
|
||||
try self.write(arr.writer(page.call_arena));
|
||||
return arr.items;
|
||||
}
|
||||
|
||||
fn write(self: *const URLSearchParams, writer: anytype) !void {
|
||||
return kv.urlEncode(self.entries, .query, writer);
|
||||
}
|
||||
// TODO return generates an error: caught unexpected error 'TypeLookup'
|
||||
// pub fn _getAll(self: *URLSearchParams, name: []const u8) [][]const u8 {
|
||||
// try self.values.get(name);
|
||||
// }
|
||||
|
||||
// TODO
|
||||
pub fn _sort(_: *URLSearchParams) void {}
|
||||
|
||||
fn encode(self: *const URLSearchParams, writer: anytype) !void {
|
||||
return kv.urlEncode(self.entries, .query, writer);
|
||||
}
|
||||
};
|
||||
|
||||
// Parse the given query.
|
||||
fn parseQuery(arena: Allocator, s: []const u8) !kv.List {
|
||||
var list = kv.List{};
|
||||
|
||||
const ln = s.len;
|
||||
if (ln == 0) {
|
||||
return list;
|
||||
}
|
||||
|
||||
var query = if (s[0] == '?') s[1..] else s;
|
||||
while (query.len > 0) {
|
||||
const i = std.mem.indexOfScalarPos(u8, query, 0, '=') orelse query.len;
|
||||
const name = query[0..i];
|
||||
|
||||
var value: ?[]const u8 = null;
|
||||
if (i < query.len) {
|
||||
query = query[i + 1 ..];
|
||||
const j = std.mem.indexOfScalarPos(u8, query, 0, '&') orelse query.len;
|
||||
value = query[0..j];
|
||||
|
||||
query = if (j < query.len) query[j + 1 ..] else "";
|
||||
} else {
|
||||
query = "";
|
||||
}
|
||||
|
||||
try list.appendOwned(
|
||||
arena,
|
||||
try unescape(arena, name),
|
||||
if (value) |v| try unescape(arena, v) else "",
|
||||
);
|
||||
}
|
||||
|
||||
return list;
|
||||
}
|
||||
|
||||
fn unescape(arena: Allocator, input: []const u8) ![]const u8 {
|
||||
const HEX_CHAR = comptime blk: {
|
||||
var all = std.mem.zeroes([256]bool);
|
||||
for ('a'..('f' + 1)) |b| all[b] = true;
|
||||
for ('A'..('F' + 1)) |b| all[b] = true;
|
||||
for ('0'..('9' + 1)) |b| all[b] = true;
|
||||
break :blk all;
|
||||
};
|
||||
|
||||
const HEX_DECODE = comptime blk: {
|
||||
var all = std.mem.zeroes([256]u8);
|
||||
for ('a'..('z' + 1)) |b| all[b] = b - 'a' + 10;
|
||||
for ('A'..('Z' + 1)) |b| all[b] = b - 'A' + 10;
|
||||
for ('0'..('9' + 1)) |b| all[b] = b - '0';
|
||||
break :blk all;
|
||||
};
|
||||
|
||||
var has_plus = false;
|
||||
var unescaped_len = input.len;
|
||||
|
||||
{
|
||||
// Figure out if we have any spaces and what the final unescaped length
|
||||
// will be (which will let us know if we have anything to unescape in
|
||||
// the first place)
|
||||
var i: usize = 0;
|
||||
while (i < input.len) {
|
||||
const c = input[i];
|
||||
if (c == '%') {
|
||||
if (i + 2 >= input.len or !HEX_CHAR[input[i + 1]] or !HEX_CHAR[input[i + 2]]) {
|
||||
return error.EscapeError;
|
||||
}
|
||||
i += 3;
|
||||
unescaped_len -= 2;
|
||||
} else if (c == '+') {
|
||||
has_plus = true;
|
||||
i += 1;
|
||||
} else {
|
||||
i += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// no encoding, and no plus. nothing to unescape
|
||||
if (unescaped_len == input.len and has_plus == false) {
|
||||
// we always dupe, because we know our caller wants it always duped.
|
||||
return arena.dupe(u8, input);
|
||||
}
|
||||
|
||||
var unescaped = try arena.alloc(u8, unescaped_len);
|
||||
errdefer arena.free(unescaped);
|
||||
|
||||
var input_pos: usize = 0;
|
||||
for (0..unescaped_len) |unescaped_pos| {
|
||||
switch (input[input_pos]) {
|
||||
'+' => {
|
||||
unescaped[unescaped_pos] = ' ';
|
||||
input_pos += 1;
|
||||
},
|
||||
'%' => {
|
||||
const encoded = input[input_pos + 1 .. input_pos + 3];
|
||||
const encoded_as_uint = @as(u16, @bitCast(encoded[0..2].*));
|
||||
unescaped[unescaped_pos] = switch (encoded_as_uint) {
|
||||
asUint(u16, "20") => ' ',
|
||||
asUint(u16, "21") => '!',
|
||||
asUint(u16, "22") => '"',
|
||||
asUint(u16, "23") => '#',
|
||||
asUint(u16, "24") => '$',
|
||||
asUint(u16, "25") => '%',
|
||||
asUint(u16, "26") => '&',
|
||||
asUint(u16, "27") => '\'',
|
||||
asUint(u16, "28") => '(',
|
||||
asUint(u16, "29") => ')',
|
||||
asUint(u16, "2A") => '*',
|
||||
asUint(u16, "2B") => '+',
|
||||
asUint(u16, "2C") => ',',
|
||||
asUint(u16, "2F") => '/',
|
||||
asUint(u16, "3A") => ':',
|
||||
asUint(u16, "3B") => ';',
|
||||
asUint(u16, "3D") => '=',
|
||||
asUint(u16, "3F") => '?',
|
||||
asUint(u16, "40") => '@',
|
||||
asUint(u16, "5B") => '[',
|
||||
asUint(u16, "5D") => ']',
|
||||
else => HEX_DECODE[encoded[0]] << 4 | HEX_DECODE[encoded[1]],
|
||||
};
|
||||
input_pos += 3;
|
||||
},
|
||||
else => |c| {
|
||||
unescaped[unescaped_pos] = c;
|
||||
input_pos += 1;
|
||||
},
|
||||
}
|
||||
}
|
||||
return unescaped;
|
||||
}
|
||||
|
||||
fn asUint(comptime T: type, comptime string: []const u8) T {
|
||||
return @bitCast(string[0..string.len].*);
|
||||
}
|
||||
|
||||
const KeyIterable = iterator.Iterable(kv.KeyIterator, "URLSearchParamsKeyIterator");
|
||||
const ValueIterable = iterator.Iterable(kv.ValueIterator, "URLSearchParamsValueIterator");
|
||||
const EntryIterable = iterator.Iterable(kv.EntryIterator, "URLSearchParamsEntryIterator");
|
||||
|
||||
const testing = @import("../../testing.zig");
|
||||
test "Browser.URL" {
|
||||
var runner = try testing.jsRunner(testing.tracking_allocator, .{});
|
||||
@@ -505,27 +256,6 @@ test "Browser.URL" {
|
||||
.{ "url.search", "?query" },
|
||||
.{ "url.hash", "#fragment" },
|
||||
.{ "url.searchParams.get('query')", "" },
|
||||
|
||||
.{ "url.search = 'hello=world'", null },
|
||||
.{ "url.searchParams.size", "1" },
|
||||
.{ "url.searchParams.get('hello')", "world" },
|
||||
|
||||
.{ "url.search = '?over=9000'", null },
|
||||
.{ "url.searchParams.size", "1" },
|
||||
.{ "url.searchParams.get('over')", "9000" },
|
||||
|
||||
.{ "url.search = ''", null },
|
||||
.{ "url.searchParams.size", "0" },
|
||||
|
||||
.{ " const url2 = new URL(url);", null },
|
||||
.{ "url2.href", "https://foo.bar/path#fragment" },
|
||||
|
||||
.{ " try { new URL(document.createElement('a')); } catch (e) { e }", "TypeError: invalid argument" },
|
||||
|
||||
.{ " let a = document.createElement('a');", null },
|
||||
.{ " a.href = 'https://www.lightpanda.io/over?9000=!!';", null },
|
||||
.{ " const url3 = new URL(a);", null },
|
||||
.{ "url3.href", "https://www.lightpanda.io/over?9000=%21%21" },
|
||||
}, .{});
|
||||
|
||||
try runner.testCases(&.{
|
||||
@@ -534,108 +264,16 @@ test "Browser.URL" {
|
||||
.{ "url.searchParams.get('b')", "~" },
|
||||
.{ "url.searchParams.append('c', 'foo')", "undefined" },
|
||||
.{ "url.searchParams.get('c')", "foo" },
|
||||
.{ "url.searchParams.getAll('c').length", "1" },
|
||||
.{ "url.searchParams.getAll('c')[0]", "foo" },
|
||||
.{ "url.searchParams.size", "3" },
|
||||
|
||||
// search is dynamic
|
||||
.{ "url.search", "?a=~&b=~&c=foo" },
|
||||
.{ "url.search", "?a=%7E&b=%7E&c=foo" },
|
||||
// href is dynamic
|
||||
.{ "url.href", "https://foo.bar/path?a=~&b=~&c=foo#fragment" },
|
||||
.{ "url.href", "https://foo.bar/path?a=%7E&b=%7E&c=foo#fragment" },
|
||||
|
||||
.{ "url.searchParams.delete('c', 'foo')", "undefined" },
|
||||
.{ "url.searchParams.get('c')", "null" },
|
||||
.{ "url.searchParams.get('c')", "" },
|
||||
.{ "url.searchParams.delete('a')", "undefined" },
|
||||
.{ "url.searchParams.get('a')", "null" },
|
||||
}, .{});
|
||||
|
||||
try runner.testCases(&.{
|
||||
.{ "var url = new URL('over?9000', 'https://lightpanda.io')", null },
|
||||
.{ "url.href", "https://lightpanda.io/over?9000" },
|
||||
}, .{});
|
||||
}
|
||||
|
||||
test "Browser.URLSearchParams" {
|
||||
var runner = try testing.jsRunner(testing.tracking_allocator, .{});
|
||||
defer runner.deinit();
|
||||
try runner.testCases(&.{
|
||||
.{ "let usp = new URLSearchParams()", null },
|
||||
.{ "usp.get('a')", "null" },
|
||||
.{ "usp.has('a')", "false" },
|
||||
.{ "usp.getAll('a')", "" },
|
||||
.{ "usp.delete('a')", "undefined" },
|
||||
|
||||
.{ "usp.set('a', 1)", "undefined" },
|
||||
.{ "usp.has('a')", "true" },
|
||||
.{ "usp.get('a')", "1" },
|
||||
.{ "usp.getAll('a')", "1" },
|
||||
|
||||
.{ "usp.append('a', 2)", "undefined" },
|
||||
.{ "usp.has('a')", "true" },
|
||||
.{ "usp.get('a')", "1" },
|
||||
.{ "usp.getAll('a')", "1,2" },
|
||||
|
||||
.{ "usp.append('b', '3')", "undefined" },
|
||||
.{ "usp.has('a')", "true" },
|
||||
.{ "usp.get('a')", "1" },
|
||||
.{ "usp.getAll('a')", "1,2" },
|
||||
.{ "usp.has('b')", "true" },
|
||||
.{ "usp.get('b')", "3" },
|
||||
.{ "usp.getAll('b')", "3" },
|
||||
|
||||
.{ "let acc = [];", null },
|
||||
.{ "for (const key of usp.keys()) { acc.push(key) }; acc;", "a,a,b" },
|
||||
|
||||
.{ "acc = [];", null },
|
||||
.{ "for (const value of usp.values()) { acc.push(value) }; acc;", "1,2,3" },
|
||||
|
||||
.{ "acc = [];", null },
|
||||
.{ "for (const entry of usp.entries()) { acc.push(entry) }; acc;", "a,1,a,2,b,3" },
|
||||
|
||||
.{ "acc = [];", null },
|
||||
.{ "for (const entry of usp) { acc.push(entry) }; acc;", "a,1,a,2,b,3" },
|
||||
|
||||
.{ "usp.delete('a')", "undefined" },
|
||||
.{ "usp.has('a')", "false" },
|
||||
.{ "usp.has('b')", "true" },
|
||||
|
||||
.{ "acc = [];", null },
|
||||
.{ "for (const key of usp.keys()) { acc.push(key) }; acc;", "b" },
|
||||
|
||||
.{ "acc = [];", null },
|
||||
.{ "for (const value of usp.values()) { acc.push(value) }; acc;", "3" },
|
||||
|
||||
.{ "acc = [];", null },
|
||||
.{ "for (const entry of usp.entries()) { acc.push(entry) }; acc;", "b,3" },
|
||||
|
||||
.{ "acc = [];", null },
|
||||
.{ "for (const entry of usp) { acc.push(entry) }; acc;", "b,3" },
|
||||
}, .{});
|
||||
|
||||
try runner.testCases(&.{
|
||||
.{ "usp = new URLSearchParams('?hello')", null },
|
||||
.{ "usp.get('hello')", "" },
|
||||
|
||||
.{ "usp = new URLSearchParams('?abc=')", null },
|
||||
.{ "usp.get('abc')", "" },
|
||||
|
||||
.{ "usp = new URLSearchParams('?abc=123&')", null },
|
||||
.{ "usp.get('abc')", "123" },
|
||||
.{ "usp.size", "1" },
|
||||
|
||||
.{ "var fd = new FormData()", null },
|
||||
.{ "fd.append('a', '1')", null },
|
||||
.{ "fd.append('a', '2')", null },
|
||||
.{ "fd.append('b', '3')", null },
|
||||
.{ "ups = new URLSearchParams(fd)", null },
|
||||
.{ "ups.size", "3" },
|
||||
.{ "ups.getAll('a')", "1,2" },
|
||||
.{ "ups.getAll('b')", "3" },
|
||||
.{ "fd.delete('a')", null }, // the two aren't linked, it created a copy
|
||||
.{ "ups.size", "3" },
|
||||
.{ "ups = new URLSearchParams({over: 9000, spice: 'flow'})", null },
|
||||
.{ "ups.size", "2" },
|
||||
.{ "ups.getAll('over')", "9000" },
|
||||
.{ "ups.getAll('spice')", "flow" },
|
||||
.{ "url.searchParams.get('a')", "" },
|
||||
}, .{});
|
||||
}
|
||||
|
||||
@@ -25,7 +25,9 @@ const EventTarget = @import("../dom/event_target.zig").EventTarget;
|
||||
const EventHandler = @import("../events/event.zig").EventHandler;
|
||||
|
||||
const parser = @import("../netsurf.zig");
|
||||
const Page = @import("../page.zig").Page;
|
||||
const SessionState = @import("../env.zig").SessionState;
|
||||
|
||||
const log = std.log.scoped(.xhr);
|
||||
|
||||
pub const XMLHttpRequestEventTarget = struct {
|
||||
pub const prototype = *EventTarget;
|
||||
@@ -44,18 +46,17 @@ pub const XMLHttpRequestEventTarget = struct {
|
||||
self: *XMLHttpRequestEventTarget,
|
||||
alloc: std.mem.Allocator,
|
||||
typ: []const u8,
|
||||
listener: EventHandler.Listener,
|
||||
) !?Function {
|
||||
cbk: Function,
|
||||
) !void {
|
||||
const target = @as(*parser.EventTarget, @ptrCast(self));
|
||||
|
||||
// The only time this can return null if the listener is already
|
||||
// registered. But before calling `register`, all of our functions
|
||||
// remove any existing listener, so it should be impossible to get null
|
||||
// from this function call.
|
||||
const eh = (try EventHandler.register(alloc, target, typ, listener, null)) orelse unreachable;
|
||||
return eh.callback;
|
||||
const eh = try EventHandler.init(alloc, try cbk.withThis(target));
|
||||
try parser.eventTargetAddEventListener(
|
||||
target,
|
||||
typ,
|
||||
&eh.node,
|
||||
false,
|
||||
);
|
||||
}
|
||||
|
||||
fn unregister(self: *XMLHttpRequestEventTarget, typ: []const u8, cbk_id: usize) !void {
|
||||
const et = @as(*parser.EventTarget, @ptrCast(self));
|
||||
// check if event target has already this listener
|
||||
@@ -87,28 +88,41 @@ pub const XMLHttpRequestEventTarget = struct {
|
||||
return self.onloadend_cbk;
|
||||
}
|
||||
|
||||
pub fn set_onloadstart(self: *XMLHttpRequestEventTarget, listener: EventHandler.Listener, page: *Page) !void {
|
||||
pub fn set_onloadstart(self: *XMLHttpRequestEventTarget, handler: Function, state: *SessionState) !void {
|
||||
if (self.onloadstart_cbk) |cbk| try self.unregister("loadstart", cbk.id);
|
||||
self.onloadstart_cbk = try self.register(page.arena, "loadstart", listener);
|
||||
try self.register(state.arena, "loadstart", handler);
|
||||
self.onloadstart_cbk = handler;
|
||||
}
|
||||
pub fn set_onprogress(self: *XMLHttpRequestEventTarget, listener: EventHandler.Listener, page: *Page) !void {
|
||||
pub fn set_onprogress(self: *XMLHttpRequestEventTarget, handler: Function, state: *SessionState) !void {
|
||||
if (self.onprogress_cbk) |cbk| try self.unregister("progress", cbk.id);
|
||||
self.onprogress_cbk = try self.register(page.arena, "progress", listener);
|
||||
try self.register(state.arena, "progress", handler);
|
||||
self.onprogress_cbk = handler;
|
||||
}
|
||||
pub fn set_onabort(self: *XMLHttpRequestEventTarget, listener: EventHandler.Listener, page: *Page) !void {
|
||||
pub fn set_onabort(self: *XMLHttpRequestEventTarget, handler: Function, state: *SessionState) !void {
|
||||
if (self.onabort_cbk) |cbk| try self.unregister("abort", cbk.id);
|
||||
self.onabort_cbk = try self.register(page.arena, "abort", listener);
|
||||
try self.register(state.arena, "abort", handler);
|
||||
self.onabort_cbk = handler;
|
||||
}
|
||||
pub fn set_onload(self: *XMLHttpRequestEventTarget, listener: EventHandler.Listener, page: *Page) !void {
|
||||
pub fn set_onload(self: *XMLHttpRequestEventTarget, handler: Function, state: *SessionState) !void {
|
||||
if (self.onload_cbk) |cbk| try self.unregister("load", cbk.id);
|
||||
self.onload_cbk = try self.register(page.arena, "load", listener);
|
||||
try self.register(state.arena, "load", handler);
|
||||
self.onload_cbk = handler;
|
||||
}
|
||||
pub fn set_ontimeout(self: *XMLHttpRequestEventTarget, listener: EventHandler.Listener, page: *Page) !void {
|
||||
pub fn set_ontimeout(self: *XMLHttpRequestEventTarget, handler: Function, state: *SessionState) !void {
|
||||
if (self.ontimeout_cbk) |cbk| try self.unregister("timeout", cbk.id);
|
||||
self.ontimeout_cbk = try self.register(page.arena, "timeout", listener);
|
||||
try self.register(state.arena, "timeout", handler);
|
||||
self.ontimeout_cbk = handler;
|
||||
}
|
||||
pub fn set_onloadend(self: *XMLHttpRequestEventTarget, listener: EventHandler.Listener, page: *Page) !void {
|
||||
pub fn set_onloadend(self: *XMLHttpRequestEventTarget, handler: Function, state: *SessionState) !void {
|
||||
if (self.onloadend_cbk) |cbk| try self.unregister("loadend", cbk.id);
|
||||
self.onloadend_cbk = try self.register(page.arena, "loadend", listener);
|
||||
try self.register(state.arena, "loadend", handler);
|
||||
self.onloadend_cbk = handler;
|
||||
}
|
||||
|
||||
pub fn deinit(self: *XMLHttpRequestEventTarget, state: *SessionState) void {
|
||||
const arena = state.arena;
|
||||
parser.eventTargetRemoveAllEventListeners(@as(*parser.EventTarget, @ptrCast(self)), arena) catch |e| {
|
||||
log.err("remove all listeners: {any}", .{e});
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
@@ -20,11 +20,11 @@ const std = @import("std");
|
||||
|
||||
const Allocator = std.mem.Allocator;
|
||||
|
||||
const log = @import("../../log.zig");
|
||||
const parser = @import("../netsurf.zig");
|
||||
const Page = @import("../page.zig").Page;
|
||||
const kv = @import("../key_value.zig");
|
||||
const iterator = @import("../iterator/iterator.zig");
|
||||
const SessionState = @import("../env.zig").SessionState;
|
||||
|
||||
const log = std.log.scoped(.form_data);
|
||||
|
||||
pub const Interfaces = .{
|
||||
FormData,
|
||||
@@ -33,95 +33,179 @@ pub const Interfaces = .{
|
||||
EntryIterable,
|
||||
};
|
||||
|
||||
// We store the values in an ArrayList rather than a an
|
||||
// StringArrayHashMap([]const u8) because of the way the iterators (i.e., keys(),
|
||||
// values() and entries()) work. The FormData can contain duplicate keys, and
|
||||
// each iteration yields 1 key=>value pair. So, given:
|
||||
//
|
||||
// let f = new FormData();
|
||||
// f.append('a', '1');
|
||||
// f.append('a', '2');
|
||||
//
|
||||
// Then we'd expect f.keys(), f.values() and f.entries() to yield 2 results:
|
||||
// ['a', '1']
|
||||
// ['a', '2']
|
||||
//
|
||||
// This is much easier to do with an ArrayList than a HashMap, especially given
|
||||
// that the FormData could be mutated while iterating.
|
||||
// The downside is that most of the normal operations are O(N).
|
||||
|
||||
// https://xhr.spec.whatwg.org/#interface-formdata
|
||||
pub const FormData = struct {
|
||||
entries: kv.List,
|
||||
entries: std.ArrayListUnmanaged(Entry),
|
||||
|
||||
pub fn constructor(form_: ?*parser.Form, submitter_: ?*parser.ElementHTML, page: *Page) !FormData {
|
||||
const form = form_ orelse return .{ .entries = .{} };
|
||||
return fromForm(form, submitter_, page);
|
||||
pub fn constructor(form_: ?*parser.Form, submitter_: ?*parser.ElementHTML, state: *SessionState) !FormData {
|
||||
const form = form_ orelse return .{ .entries = .empty };
|
||||
return fromForm(form, submitter_, state, .{});
|
||||
}
|
||||
|
||||
pub fn fromForm(form: *parser.Form, submitter_: ?*parser.ElementHTML, page: *Page) !FormData {
|
||||
const entries = try collectForm(form, submitter_, page);
|
||||
const FromFormOpts = struct {
|
||||
// Uses the state.arena if null. This is needed for when we're handling
|
||||
// form submission from the Page, and we want to capture the form within
|
||||
// the session's transfer_arena.
|
||||
allocator: ?Allocator = null,
|
||||
};
|
||||
pub fn fromForm(form: *parser.Form, submitter_: ?*parser.ElementHTML, state: *SessionState, opts: FromFormOpts) !FormData {
|
||||
const entries = try collectForm(opts.allocator orelse state.arena, form, submitter_, state);
|
||||
return .{ .entries = entries };
|
||||
}
|
||||
|
||||
pub fn _get(self: *const FormData, key: []const u8) ?[]const u8 {
|
||||
return self.entries.get(key);
|
||||
const result = self.find(key) orelse return null;
|
||||
return result.entry.value;
|
||||
}
|
||||
|
||||
pub fn _getAll(self: *const FormData, key: []const u8, page: *Page) ![]const []const u8 {
|
||||
return self.entries.getAll(page.call_arena, key);
|
||||
pub fn _getAll(self: *const FormData, key: []const u8, state: *SessionState) ![][]const u8 {
|
||||
const arena = state.call_arena;
|
||||
var arr: std.ArrayListUnmanaged([]const u8) = .empty;
|
||||
for (self.entries.items) |entry| {
|
||||
if (std.mem.eql(u8, key, entry.key)) {
|
||||
try arr.append(arena, entry.value);
|
||||
}
|
||||
}
|
||||
return arr.items;
|
||||
}
|
||||
|
||||
pub fn _has(self: *const FormData, key: []const u8) bool {
|
||||
return self.entries.has(key);
|
||||
return self.find(key) != null;
|
||||
}
|
||||
|
||||
// TODO: value should be a string or blog
|
||||
// TODO: another optional parameter for the filename
|
||||
pub fn _set(self: *FormData, key: []const u8, value: []const u8, page: *Page) !void {
|
||||
return self.entries.set(page.arena, key, value);
|
||||
pub fn _set(self: *FormData, key: []const u8, value: []const u8, state: *SessionState) !void {
|
||||
self._delete(key);
|
||||
return self._append(key, value, state);
|
||||
}
|
||||
|
||||
// TODO: value should be a string or blog
|
||||
// TODO: another optional parameter for the filename
|
||||
pub fn _append(self: *FormData, key: []const u8, value: []const u8, page: *Page) !void {
|
||||
return self.entries.append(page.arena, key, value);
|
||||
pub fn _append(self: *FormData, key: []const u8, value: []const u8, state: *SessionState) !void {
|
||||
const arena = state.arena;
|
||||
return self.entries.append(arena, .{ .key = try arena.dupe(u8, key), .value = try arena.dupe(u8, value) });
|
||||
}
|
||||
|
||||
pub fn _delete(self: *FormData, key: []const u8) void {
|
||||
return self.entries.delete(key);
|
||||
var i: usize = 0;
|
||||
while (i < self.entries.items.len) {
|
||||
const entry = self.entries.items[i];
|
||||
if (std.mem.eql(u8, key, entry.key)) {
|
||||
_ = self.entries.swapRemove(i);
|
||||
} else {
|
||||
i += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn _keys(self: *const FormData) KeyIterable {
|
||||
return .{ .inner = self.entries.keyIterator() };
|
||||
return .{ .inner = .{ .entries = &self.entries } };
|
||||
}
|
||||
|
||||
pub fn _values(self: *const FormData) ValueIterable {
|
||||
return .{ .inner = self.entries.valueIterator() };
|
||||
return .{ .inner = .{ .entries = &self.entries } };
|
||||
}
|
||||
|
||||
pub fn _entries(self: *const FormData) EntryIterable {
|
||||
return .{ .inner = self.entries.entryIterator() };
|
||||
return .{ .inner = .{ .entries = &self.entries } };
|
||||
}
|
||||
|
||||
pub fn _symbol_iterator(self: *const FormData) EntryIterable {
|
||||
return self._entries();
|
||||
}
|
||||
|
||||
pub fn write(self: *const FormData, encoding_: ?[]const u8, writer: anytype) !void {
|
||||
const encoding = encoding_ orelse {
|
||||
return kv.urlEncode(self.entries, .form, writer);
|
||||
};
|
||||
const FindResult = struct {
|
||||
index: usize,
|
||||
entry: Entry,
|
||||
};
|
||||
|
||||
if (std.ascii.eqlIgnoreCase(encoding, "application/x-www-form-urlencoded")) {
|
||||
return kv.urlEncode(self.entries, .form, writer);
|
||||
fn find(self: *const FormData, key: []const u8) ?FindResult {
|
||||
for (self.entries.items, 0..) |entry, i| {
|
||||
if (std.mem.eql(u8, key, entry.key)) {
|
||||
return .{ .index = i, .entry = entry };
|
||||
}
|
||||
}
|
||||
|
||||
log.warn(.web_api, "not implemented", .{
|
||||
.feature = "form data encoding",
|
||||
.encoding = encoding,
|
||||
});
|
||||
return error.EncodingNotSupported;
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
const KeyIterable = iterator.Iterable(kv.KeyIterator, "FormDataKeyIterator");
|
||||
const ValueIterable = iterator.Iterable(kv.ValueIterator, "FormDataValueIterator");
|
||||
const EntryIterable = iterator.Iterable(kv.EntryIterator, "FormDataEntryIterator");
|
||||
const Entry = struct {
|
||||
key: []const u8,
|
||||
value: []const u8,
|
||||
};
|
||||
|
||||
// TODO: handle disabled fieldsets
|
||||
fn collectForm(form: *parser.Form, submitter_: ?*parser.ElementHTML, page: *Page) !kv.List {
|
||||
const arena = page.arena;
|
||||
const KeyIterable = iterator.Iterable(KeyIterator, "FormDataKeyIterator");
|
||||
const ValueIterable = iterator.Iterable(ValueIterator, "FormDataValueIterator");
|
||||
const EntryIterable = iterator.Iterable(EntryIterator, "FormDataEntryIterator");
|
||||
|
||||
const KeyIterator = struct {
|
||||
index: usize = 0,
|
||||
entries: *const std.ArrayListUnmanaged(Entry),
|
||||
|
||||
pub fn _next(self: *KeyIterator) ?[]const u8 {
|
||||
const index = self.index;
|
||||
if (index == self.entries.items.len) {
|
||||
return null;
|
||||
}
|
||||
self.index += 1;
|
||||
return self.entries.items[index].key;
|
||||
}
|
||||
};
|
||||
|
||||
const ValueIterator = struct {
|
||||
index: usize = 0,
|
||||
entries: *const std.ArrayListUnmanaged(Entry),
|
||||
|
||||
pub fn _next(self: *ValueIterator) ?[]const u8 {
|
||||
const index = self.index;
|
||||
if (index == self.entries.items.len) {
|
||||
return null;
|
||||
}
|
||||
self.index += 1;
|
||||
return self.entries.items[index].value;
|
||||
}
|
||||
};
|
||||
|
||||
const EntryIterator = struct {
|
||||
index: usize = 0,
|
||||
entries: *const std.ArrayListUnmanaged(Entry),
|
||||
|
||||
pub fn _next(self: *EntryIterator) ?struct { []const u8, []const u8 } {
|
||||
const index = self.index;
|
||||
if (index == self.entries.items.len) {
|
||||
return null;
|
||||
}
|
||||
self.index += 1;
|
||||
const entry = self.entries.items[index];
|
||||
return .{ entry.key, entry.value };
|
||||
}
|
||||
};
|
||||
|
||||
fn collectForm(arena: Allocator, form: *parser.Form, submitter_: ?*parser.ElementHTML, state: *SessionState) !std.ArrayListUnmanaged(Entry) {
|
||||
const collection = try parser.formGetCollection(form);
|
||||
const len = try parser.htmlCollectionGetLength(collection);
|
||||
|
||||
var entries: kv.List = .{};
|
||||
var entries: std.ArrayListUnmanaged(Entry) = .empty;
|
||||
try entries.ensureTotalCapacity(arena, len);
|
||||
|
||||
var submitter_included = false;
|
||||
const submitter_name_ = try getSubmitterName(submitter_);
|
||||
|
||||
for (0..len) |i| {
|
||||
@@ -137,15 +221,18 @@ fn collectForm(form: *parser.Form, submitter_: ?*parser.ElementHTML, page: *Page
|
||||
const tag = try parser.elementHTMLGetTagType(@as(*parser.ElementHTML, @ptrCast(element)));
|
||||
switch (tag) {
|
||||
.input => {
|
||||
const tpe = try parser.inputGetType(@ptrCast(element));
|
||||
const tpe = try parser.elementGetAttribute(element, "type") orelse "";
|
||||
if (std.ascii.eqlIgnoreCase(tpe, "image")) {
|
||||
if (submitter_name_) |submitter_name| {
|
||||
if (std.mem.eql(u8, submitter_name, name)) {
|
||||
const key_x = try std.fmt.allocPrint(arena, "{s}.x", .{name});
|
||||
const key_y = try std.fmt.allocPrint(arena, "{s}.y", .{name});
|
||||
try entries.appendOwned(arena, key_x, "0");
|
||||
try entries.appendOwned(arena, key_y, "0");
|
||||
submitter_included = true;
|
||||
try entries.append(arena, .{
|
||||
.key = try std.fmt.allocPrint(arena, "{s}.x", .{name}),
|
||||
.value = "0",
|
||||
});
|
||||
try entries.append(arena, .{
|
||||
.key = try std.fmt.allocPrint(arena, "{s}.y", .{name}),
|
||||
.value = "0",
|
||||
});
|
||||
}
|
||||
}
|
||||
continue;
|
||||
@@ -160,52 +247,41 @@ fn collectForm(form: *parser.Form, submitter_: ?*parser.ElementHTML, page: *Page
|
||||
if (submitter_name_ == null or !std.mem.eql(u8, submitter_name_.?, name)) {
|
||||
continue;
|
||||
}
|
||||
submitter_included = true;
|
||||
}
|
||||
const value = try parser.inputGetValue(@ptrCast(element));
|
||||
try entries.appendOwned(arena, name, value);
|
||||
const value = (try parser.elementGetAttribute(element, "value")) orelse "";
|
||||
try entries.append(arena, .{ .key = name, .value = value });
|
||||
},
|
||||
.select => {
|
||||
const select: *parser.Select = @ptrCast(node);
|
||||
try collectSelectValues(arena, select, name, &entries, page);
|
||||
try collectSelectValues(arena, select, name, &entries, state);
|
||||
},
|
||||
.textarea => {
|
||||
const textarea: *parser.TextArea = @ptrCast(node);
|
||||
const value = try parser.textareaGetValue(textarea);
|
||||
try entries.appendOwned(arena, name, value);
|
||||
try entries.append(arena, .{ .key = name, .value = value });
|
||||
},
|
||||
.button => if (submitter_name_) |submitter_name| {
|
||||
if (std.mem.eql(u8, submitter_name, name)) {
|
||||
const value = (try parser.elementGetAttribute(element, "value")) orelse "";
|
||||
try entries.appendOwned(arena, name, value);
|
||||
submitter_included = true;
|
||||
try entries.append(arena, .{ .key = name, .value = value });
|
||||
}
|
||||
},
|
||||
else => {
|
||||
log.warn(.web_api, "unsupported form element", .{ .tag = @tagName(tag) });
|
||||
log.warn("unsupported form element: {s}\n", .{@tagName(tag)});
|
||||
continue;
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
if (submitter_included == false) {
|
||||
if (submitter_name_) |submitter_name| {
|
||||
// this can happen if the submitter is outside the form, but associated
|
||||
// with the form via a form=ID attribute
|
||||
const value = (try parser.elementGetAttribute(@ptrCast(submitter_.?), "value")) orelse "";
|
||||
try entries.appendOwned(arena, submitter_name, value);
|
||||
}
|
||||
}
|
||||
|
||||
return entries;
|
||||
}
|
||||
|
||||
fn collectSelectValues(arena: Allocator, select: *parser.Select, name: []const u8, entries: *kv.List, page: *Page) !void {
|
||||
fn collectSelectValues(arena: Allocator, select: *parser.Select, name: []const u8, entries: *std.ArrayListUnmanaged(Entry), state: *SessionState) !void {
|
||||
const HTMLSelectElement = @import("../html/select.zig").HTMLSelectElement;
|
||||
|
||||
// Go through the HTMLSelectElement because it has specific logic for handling
|
||||
// the default selected option, which libdom doesn't properly handle
|
||||
const selected_index = try HTMLSelectElement.get_selectedIndex(select, page);
|
||||
const selected_index = try HTMLSelectElement.get_selectedIndex(select, state);
|
||||
if (selected_index == -1) {
|
||||
return;
|
||||
}
|
||||
@@ -216,11 +292,11 @@ fn collectSelectValues(arena: Allocator, select: *parser.Select, name: []const u
|
||||
if (is_multiple == false) {
|
||||
const option = try parser.optionCollectionItem(options, @intCast(selected_index));
|
||||
|
||||
if (try parser.elementGetAttribute(@alignCast(@ptrCast(option)), "disabled") != null) {
|
||||
if (try parser.elementGetAttribute(@ptrCast(option), "disabled") != null) {
|
||||
return;
|
||||
}
|
||||
const value = try parser.optionGetValue(option);
|
||||
return entries.appendOwned(arena, name, value);
|
||||
return entries.append(arena, .{ .key = name, .value = value });
|
||||
}
|
||||
|
||||
const len = try parser.optionCollectionGetLength(options);
|
||||
@@ -228,13 +304,13 @@ fn collectSelectValues(arena: Allocator, select: *parser.Select, name: []const u
|
||||
// we can go directly to the first one
|
||||
for (@intCast(selected_index)..len) |i| {
|
||||
const option = try parser.optionCollectionItem(options, @intCast(i));
|
||||
if (try parser.elementGetAttribute(@alignCast(@ptrCast(option)), "disabled") != null) {
|
||||
if (try parser.elementGetAttribute(@ptrCast(option), "disabled") != null) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (try parser.optionGetSelected(option)) {
|
||||
const value = try parser.optionGetValue(option);
|
||||
try entries.appendOwned(arena, name, value);
|
||||
try entries.append(arena, .{ .key = name, .value = value });
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -249,7 +325,7 @@ fn getSubmitterName(submitter_: ?*parser.ElementHTML) !?[]const u8 {
|
||||
switch (tag) {
|
||||
.button => return name,
|
||||
.input => {
|
||||
const tpe = try parser.inputGetType(@ptrCast(element));
|
||||
const tpe = (try parser.elementGetAttribute(element, "type")) orelse "";
|
||||
// only an image type can be a sumbitter
|
||||
if (std.ascii.eqlIgnoreCase(tpe, "image") or std.ascii.eqlIgnoreCase(tpe, "submit")) {
|
||||
return name;
|
||||
@@ -268,7 +344,7 @@ test "Browser.FormData" {
|
||||
\\ <input id="is_disabled" disabled value="nope2">
|
||||
\\
|
||||
\\ <input name="txt-1" value="txt-1-v">
|
||||
\\ <input name="txt-2" value="txt-~-v" type=password>
|
||||
\\ <input name="txt-2" value="txt-2-v" type=password>
|
||||
\\
|
||||
\\ <input name="chk-3" value="chk-3-va" type=checkbox>
|
||||
\\ <input name="chk-3" value="chk-3-vb" type=checkbox checked>
|
||||
@@ -366,7 +442,7 @@ test "Browser.FormData" {
|
||||
\\ acc.slice(0, -1)
|
||||
,
|
||||
\\txt-1=txt-1-v
|
||||
\\txt-2=txt-~-v
|
||||
\\txt-2=txt-2-v
|
||||
\\chk-3=chk-3-vb
|
||||
\\chk-3=chk-3-vc
|
||||
\\rdi-1=rdi-1-vc
|
||||
@@ -381,34 +457,3 @@ test "Browser.FormData" {
|
||||
},
|
||||
}, .{});
|
||||
}
|
||||
|
||||
test "Browser.FormData: urlEncode" {
|
||||
var arr: std.ArrayListUnmanaged(u8) = .empty;
|
||||
defer arr.deinit(testing.allocator);
|
||||
|
||||
{
|
||||
var fd = FormData{ .entries = .{} };
|
||||
try testing.expectError(error.EncodingNotSupported, fd.write("unknown", arr.writer(testing.allocator)));
|
||||
|
||||
try fd.write(null, arr.writer(testing.allocator));
|
||||
try testing.expectEqual("", arr.items);
|
||||
|
||||
try fd.write("application/x-www-form-urlencoded", arr.writer(testing.allocator));
|
||||
try testing.expectEqual("", arr.items);
|
||||
}
|
||||
|
||||
{
|
||||
var fd = FormData{ .entries = kv.List.fromOwnedSlice(@constCast(&[_]kv.KeyValue{
|
||||
.{ .key = "a", .value = "1" },
|
||||
.{ .key = "it's over", .value = "9000 !!!" },
|
||||
.{ .key = "em~ot", .value = "ok: ☺" },
|
||||
})) };
|
||||
const expected = "a=1&it%27s+over=9000+%21%21%21&em%7Eot=ok%3A+%E2%98%BA";
|
||||
try fd.write(null, arr.writer(testing.allocator));
|
||||
try testing.expectEqual(expected, arr.items);
|
||||
|
||||
arr.clearRetainingCapacity();
|
||||
try fd.write("application/x-www-form-urlencoded", arr.writer(testing.allocator));
|
||||
try testing.expectEqual(expected, arr.items);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -24,7 +24,6 @@ const DOMException = @import("../dom/exceptions.zig").DOMException;
|
||||
pub const ProgressEvent = struct {
|
||||
pub const prototype = *Event;
|
||||
pub const Exception = DOMException;
|
||||
pub const union_make_copy = true;
|
||||
|
||||
pub const EventInit = struct {
|
||||
lengthComputable: bool = false,
|
||||
|
||||
@@ -24,15 +24,15 @@ const DOMError = @import("../netsurf.zig").DOMError;
|
||||
const ProgressEvent = @import("progress_event.zig").ProgressEvent;
|
||||
const XMLHttpRequestEventTarget = @import("event_target.zig").XMLHttpRequestEventTarget;
|
||||
|
||||
const log = @import("../../log.zig");
|
||||
const URL = @import("../../url.zig").URL;
|
||||
const Mime = @import("../mime.zig").Mime;
|
||||
const parser = @import("../netsurf.zig");
|
||||
const http = @import("../../http/client.zig");
|
||||
const Page = @import("../page.zig").Page;
|
||||
const Loop = @import("../../runtime/loop.zig").Loop;
|
||||
const SessionState = @import("../env.zig").SessionState;
|
||||
const CookieJar = @import("../storage/storage.zig").CookieJar;
|
||||
|
||||
const log = std.log.scoped(.xhr);
|
||||
|
||||
// XHR interfaces
|
||||
// https://xhr.spec.whatwg.org/#interface-xmlhttprequest
|
||||
pub const Interfaces = .{
|
||||
@@ -79,9 +79,10 @@ const XMLHttpRequestBodyInit = union(enum) {
|
||||
|
||||
pub const XMLHttpRequest = struct {
|
||||
proto: XMLHttpRequestEventTarget = XMLHttpRequestEventTarget{},
|
||||
loop: *Loop,
|
||||
arena: Allocator,
|
||||
request: ?*http.Request = null,
|
||||
request: ?http.Request = null,
|
||||
|
||||
priv_state: PrivState = .new,
|
||||
|
||||
method: http.Request.Method,
|
||||
state: State,
|
||||
@@ -93,7 +94,6 @@ pub const XMLHttpRequest = struct {
|
||||
sync: bool = true,
|
||||
err: ?anyerror = null,
|
||||
last_dispatch: i64 = 0,
|
||||
request_body: ?[]const u8 = null,
|
||||
|
||||
cookie_jar: *CookieJar,
|
||||
// the URI of the page where this request is originating from
|
||||
@@ -241,28 +241,20 @@ pub const XMLHttpRequest = struct {
|
||||
|
||||
const min_delay: u64 = 50000000; // 50ms
|
||||
|
||||
pub fn constructor(page: *Page) !XMLHttpRequest {
|
||||
const arena = page.arena;
|
||||
pub fn constructor(session_state: *SessionState) !XMLHttpRequest {
|
||||
const arena = session_state.arena;
|
||||
return .{
|
||||
.url = null,
|
||||
.arena = arena,
|
||||
.loop = page.loop,
|
||||
.headers = Headers.init(arena),
|
||||
.response_headers = Headers.init(arena),
|
||||
.method = undefined,
|
||||
.state = .unsent,
|
||||
.origin_url = &page.url,
|
||||
.cookie_jar = page.cookie_jar,
|
||||
.url = null,
|
||||
.origin_url = session_state.url,
|
||||
.cookie_jar = session_state.cookie_jar,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn destructor(self: *XMLHttpRequest) void {
|
||||
if (self.request) |req| {
|
||||
req.abort();
|
||||
self.request = null;
|
||||
}
|
||||
}
|
||||
|
||||
pub fn reset(self: *XMLHttpRequest) void {
|
||||
self.url = null;
|
||||
|
||||
@@ -280,6 +272,8 @@ pub const XMLHttpRequest = struct {
|
||||
self.response_status = 0;
|
||||
|
||||
self.send_flag = false;
|
||||
|
||||
self.priv_state = .new;
|
||||
}
|
||||
|
||||
pub fn get_readyState(self: *XMLHttpRequest) u16 {
|
||||
@@ -329,6 +323,8 @@ pub const XMLHttpRequest = struct {
|
||||
const arena = self.arena;
|
||||
|
||||
self.url = try self.origin_url.resolve(arena, url);
|
||||
|
||||
log.debug("open url ({s})", .{self.url.?});
|
||||
self.sync = if (asyn) |b| !b else false;
|
||||
|
||||
self.state = .opened;
|
||||
@@ -338,30 +334,19 @@ pub const XMLHttpRequest = struct {
|
||||
// dispatch request event.
|
||||
// errors are logged only.
|
||||
fn dispatchEvt(self: *XMLHttpRequest, typ: []const u8) void {
|
||||
log.debug(.script_event, "dispatch event", .{
|
||||
.type = typ,
|
||||
.source = "xhr",
|
||||
.method = self.method,
|
||||
.url = self.url,
|
||||
});
|
||||
self._dispatchEvt(typ) catch |err| {
|
||||
log.err(.app, "dispatch event error", .{
|
||||
.err = err,
|
||||
.type = typ,
|
||||
.source = "xhr",
|
||||
.method = self.method,
|
||||
.url = self.url,
|
||||
});
|
||||
const evt = parser.eventCreate() catch |e| {
|
||||
return log.err("dispatch event create: {any}", .{e});
|
||||
};
|
||||
}
|
||||
|
||||
fn _dispatchEvt(self: *XMLHttpRequest, typ: []const u8) !void {
|
||||
const evt = try parser.eventCreate();
|
||||
// We can we defer event destroy once the event is dispatched.
|
||||
defer parser.eventDestroy(evt);
|
||||
|
||||
try parser.eventInit(evt, typ, .{ .bubbles = true, .cancelable = true });
|
||||
_ = try parser.eventTargetDispatchEvent(@as(*parser.EventTarget, @ptrCast(self)), evt);
|
||||
parser.eventInit(evt, typ, .{ .bubbles = true, .cancelable = true }) catch |e| {
|
||||
return log.err("dispatch event init: {any}", .{e});
|
||||
};
|
||||
_ = parser.eventTargetDispatchEvent(@as(*parser.EventTarget, @ptrCast(self)), evt) catch |e| {
|
||||
return log.err("dispatch event: {any}", .{e});
|
||||
};
|
||||
}
|
||||
|
||||
fn dispatchProgressEvent(
|
||||
@@ -369,39 +354,22 @@ pub const XMLHttpRequest = struct {
|
||||
typ: []const u8,
|
||||
opts: ProgressEvent.EventInit,
|
||||
) void {
|
||||
log.debug(.script_event, "dispatch progress event", .{
|
||||
.type = typ,
|
||||
.source = "xhr",
|
||||
.method = self.method,
|
||||
.url = self.url,
|
||||
});
|
||||
self._dispatchProgressEvent(typ, opts) catch |err| {
|
||||
log.err(.app, "dispatch progress event error", .{
|
||||
.err = err,
|
||||
.type = typ,
|
||||
.source = "xhr",
|
||||
.method = self.method,
|
||||
.url = self.url,
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
fn _dispatchProgressEvent(
|
||||
self: *XMLHttpRequest,
|
||||
typ: []const u8,
|
||||
opts: ProgressEvent.EventInit,
|
||||
) !void {
|
||||
var evt = try ProgressEvent.constructor(typ, .{
|
||||
log.debug("dispatch progress event: {s}", .{typ});
|
||||
var evt = ProgressEvent.constructor(typ, .{
|
||||
// https://xhr.spec.whatwg.org/#firing-events-using-the-progressevent-interface
|
||||
.lengthComputable = opts.total > 0,
|
||||
.total = opts.total,
|
||||
.loaded = opts.loaded,
|
||||
});
|
||||
}) catch |e| {
|
||||
return log.err("construct progress event: {any}", .{e});
|
||||
};
|
||||
|
||||
_ = try parser.eventTargetDispatchEvent(
|
||||
_ = parser.eventTargetDispatchEvent(
|
||||
@as(*parser.EventTarget, @ptrCast(self)),
|
||||
@as(*parser.Event, @ptrCast(&evt)),
|
||||
);
|
||||
) catch |e| {
|
||||
return log.err("dispatch progress event: {any}", .{e});
|
||||
};
|
||||
}
|
||||
|
||||
const methods = [_]struct {
|
||||
@@ -441,30 +409,18 @@ pub const XMLHttpRequest = struct {
|
||||
}
|
||||
|
||||
// TODO body can be either a XMLHttpRequestBodyInit or a document
|
||||
pub fn _send(self: *XMLHttpRequest, body: ?[]const u8, page: *Page) !void {
|
||||
pub fn _send(self: *XMLHttpRequest, body: ?[]const u8, session_state: *SessionState) !void {
|
||||
if (self.state != .opened) return DOMError.InvalidState;
|
||||
if (self.send_flag) return DOMError.InvalidState;
|
||||
|
||||
log.debug(.http, "request", .{ .method = self.method, .url = self.url, .source = "xhr" });
|
||||
log.debug("{any} {any}", .{ self.method, self.url });
|
||||
|
||||
self.send_flag = true;
|
||||
if (body) |b| {
|
||||
self.request_body = try self.arena.dupe(u8, b);
|
||||
}
|
||||
self.priv_state = .open;
|
||||
|
||||
try page.request_factory.initAsync(
|
||||
page.arena,
|
||||
self.method,
|
||||
&self.url.?.uri,
|
||||
self,
|
||||
onHttpRequestReady,
|
||||
self.loop,
|
||||
);
|
||||
}
|
||||
|
||||
fn onHttpRequestReady(ctx: *anyopaque, request: *http.Request) !void {
|
||||
// on error, our caller will cleanup request
|
||||
const self: *XMLHttpRequest = @alignCast(@ptrCast(ctx));
|
||||
self.request = try session_state.request_factory.create(self.method, &self.url.?.uri);
|
||||
var request = &self.request.?;
|
||||
errdefer request.deinit();
|
||||
|
||||
for (self.headers.list.items) |hdr| {
|
||||
try request.addHeader(hdr.name, hdr.value, .{});
|
||||
@@ -472,7 +428,7 @@ pub const XMLHttpRequest = struct {
|
||||
|
||||
{
|
||||
var arr: std.ArrayListUnmanaged(u8) = .{};
|
||||
try self.cookie_jar.forRequest(&self.url.?.uri, arr.writer(self.arena), .{
|
||||
try self.cookie_jar.forRequest(&self.url.?.uri, arr.writer(session_state.arena), .{
|
||||
.navigation = false,
|
||||
.origin_uri = &self.origin_url.uri,
|
||||
});
|
||||
@@ -486,33 +442,28 @@ pub const XMLHttpRequest = struct {
|
||||
// if the request method is GET or HEAD.
|
||||
// https://xhr.spec.whatwg.org/#the-send()-method
|
||||
// var used_body: ?XMLHttpRequestBodyInit = null;
|
||||
if (self.request_body) |b| {
|
||||
if (body) |b| {
|
||||
if (self.method != .GET and self.method != .HEAD) {
|
||||
request.body = b;
|
||||
request.body = try session_state.arena.dupe(u8, b);
|
||||
try request.addHeader("Content-Type", "text/plain; charset=UTF-8", .{});
|
||||
}
|
||||
}
|
||||
|
||||
try request.sendAsync(self.loop, self, .{});
|
||||
self.request = request;
|
||||
try request.sendAsync(session_state.loop, self, .{});
|
||||
}
|
||||
|
||||
pub fn onHttpResponse(self: *XMLHttpRequest, progress_: anyerror!http.Progress) !void {
|
||||
const progress = progress_ catch |err| {
|
||||
// The request has been closed internally by the client, it isn't safe
|
||||
// for us to keep it around.
|
||||
self.request = null;
|
||||
self.onErr(err);
|
||||
return err;
|
||||
};
|
||||
|
||||
if (progress.first) {
|
||||
const header = progress.header;
|
||||
log.debug(.http, "request header", .{
|
||||
.source = "xhr",
|
||||
.url = self.url,
|
||||
.status = header.status,
|
||||
});
|
||||
log.info("{any} {any} {d}", .{ self.method, self.url, header.status });
|
||||
|
||||
self.priv_state = .done;
|
||||
|
||||
for (header.headers.items) |hdr| {
|
||||
try self.response_headers.append(hdr.name, hdr.value);
|
||||
}
|
||||
@@ -558,16 +509,6 @@ pub const XMLHttpRequest = struct {
|
||||
return;
|
||||
}
|
||||
|
||||
log.info(.http, "request complete", .{
|
||||
.source = "xhr",
|
||||
.url = self.url,
|
||||
.status = self.response_status,
|
||||
});
|
||||
|
||||
// Not that the request is done, the http/client will free the request
|
||||
// object. It isn't safe to keep it around.
|
||||
self.request = null;
|
||||
|
||||
self.state = .done;
|
||||
self.send_flag = false;
|
||||
self.dispatchEvt("readystatechange");
|
||||
@@ -579,23 +520,20 @@ pub const XMLHttpRequest = struct {
|
||||
}
|
||||
|
||||
fn onErr(self: *XMLHttpRequest, err: anyerror) void {
|
||||
self.priv_state = .done;
|
||||
|
||||
self.err = err;
|
||||
self.state = .done;
|
||||
self.send_flag = false;
|
||||
self.dispatchEvt("readystatechange");
|
||||
self.dispatchProgressEvent("error", .{});
|
||||
self.dispatchProgressEvent("loadend", .{});
|
||||
|
||||
const level: log.Level = if (err == DOMError.Abort) .debug else .err;
|
||||
log.log(.http, level, "error", .{
|
||||
.url = self.url,
|
||||
.err = err,
|
||||
.source = "xhr",
|
||||
});
|
||||
log.debug("{any} {any} {any}", .{ self.method, self.url, self.err });
|
||||
}
|
||||
|
||||
pub fn _abort(self: *XMLHttpRequest) void {
|
||||
self.onErr(DOMError.Abort);
|
||||
self.destructor();
|
||||
}
|
||||
|
||||
pub fn get_responseType(self: *XMLHttpRequest) []const u8 {
|
||||
@@ -695,7 +633,7 @@ pub const XMLHttpRequest = struct {
|
||||
// response object to a new ArrayBuffer object representing this’s
|
||||
// received bytes. If this throws an exception, then set this’s
|
||||
// response object to failure and return null.
|
||||
log.err(.web_api, "not implemented", .{ .feature = "XHR ArrayBuffer resposne type" });
|
||||
log.err("response type ArrayBuffer not implemented", .{});
|
||||
return null;
|
||||
}
|
||||
|
||||
@@ -704,7 +642,7 @@ pub const XMLHttpRequest = struct {
|
||||
// response object to a new Blob object representing this’s
|
||||
// received bytes with type set to the result of get a final MIME
|
||||
// type for this.
|
||||
log.err(.web_api, "not implemented", .{ .feature = "XHR Blob resposne type" });
|
||||
log.err("response type Blob not implemented", .{});
|
||||
return null;
|
||||
}
|
||||
|
||||
@@ -756,8 +694,7 @@ pub const XMLHttpRequest = struct {
|
||||
}
|
||||
|
||||
var fbs = std.io.fixedBufferStream(self.response_bytes.items);
|
||||
const Elements = @import("../html/elements.zig");
|
||||
const doc = parser.documentHTMLParse(fbs.reader(), ccharset, &Elements.createElement) catch {
|
||||
const doc = parser.documentHTMLParse(fbs.reader(), ccharset) catch {
|
||||
self.response_obj = .{ .Failure = {} };
|
||||
return;
|
||||
};
|
||||
@@ -780,7 +717,7 @@ pub const XMLHttpRequest = struct {
|
||||
self.response_bytes.items,
|
||||
.{},
|
||||
) catch |e| {
|
||||
log.warn(.http, "invalid json", .{ .err = e, .url = self.url, .source = "xhr" });
|
||||
log.err("parse JSON: {}", .{e});
|
||||
self.response_obj = .{ .Failure = {} };
|
||||
return;
|
||||
};
|
||||
|
||||
@@ -18,7 +18,7 @@
|
||||
//
|
||||
const std = @import("std");
|
||||
|
||||
const Page = @import("../page.zig").Page;
|
||||
const SessionState = @import("../env.zig").SessionState;
|
||||
|
||||
const dump = @import("../dump.zig");
|
||||
const parser = @import("../netsurf.zig");
|
||||
@@ -33,12 +33,12 @@ pub const XMLSerializer = struct {
|
||||
return .{};
|
||||
}
|
||||
|
||||
pub fn _serializeToString(_: *const XMLSerializer, root: *parser.Node, page: *Page) ![]const u8 {
|
||||
var buf = std.ArrayList(u8).init(page.arena);
|
||||
switch (try parser.nodeType(root)) {
|
||||
.document => try dump.writeHTML(@as(*parser.Document, @ptrCast(root)), buf.writer()),
|
||||
.document_type => try dump.writeDocType(@as(*parser.DocumentType, @ptrCast(root)), buf.writer()),
|
||||
else => try dump.writeNode(root, buf.writer()),
|
||||
pub fn _serializeToString(_: *const XMLSerializer, root: *parser.Node, state: *SessionState) ![]const u8 {
|
||||
var buf = std.ArrayList(u8).init(state.arena);
|
||||
if (try parser.nodeType(root) == .document) {
|
||||
try dump.writeHTML(@as(*parser.Document, @ptrCast(root)), buf.writer());
|
||||
} else {
|
||||
try dump.writeNode(root, buf.writer());
|
||||
}
|
||||
return buf.items;
|
||||
}
|
||||
@@ -54,11 +54,3 @@ test "Browser.XMLSerializer" {
|
||||
.{ "s.serializeToString(document.getElementById('para'))", "<p id=\"para\"> And</p>" },
|
||||
}, .{});
|
||||
}
|
||||
test "Browser.XMLSerializer with DOCTYPE" {
|
||||
var runner = try testing.jsRunner(testing.tracking_allocator, .{ .html = "<!DOCTYPE html><html><head></head><body></body></html>" });
|
||||
defer runner.deinit();
|
||||
|
||||
try runner.testCases(&.{
|
||||
.{ "new XMLSerializer().serializeToString(document.doctype)", "<!DOCTYPE html>" },
|
||||
}, .{});
|
||||
}
|
||||
|
||||
@@ -19,11 +19,12 @@
|
||||
const std = @import("std");
|
||||
const Allocator = std.mem.Allocator;
|
||||
|
||||
const log = @import("../log.zig");
|
||||
const parser = @import("../browser/netsurf.zig");
|
||||
|
||||
pub const Id = u32;
|
||||
|
||||
const log = std.log.scoped(.cdp_node);
|
||||
|
||||
const Node = @This();
|
||||
|
||||
id: Id,
|
||||
@@ -212,7 +213,7 @@ pub const Writer = struct {
|
||||
// The only error our jsonStringify method can return is
|
||||
// @TypeOf(w).Error. In other words, our code can't return its own
|
||||
// error, we can only return a writer error. Kinda sucks.
|
||||
log.err(.cdp, "json stringify", .{ .err = err });
|
||||
log.err("json stringify: {}", .{err});
|
||||
return error.OutOfMemory;
|
||||
};
|
||||
}
|
||||
|
||||
@@ -20,7 +20,6 @@ const std = @import("std");
|
||||
const Allocator = std.mem.Allocator;
|
||||
const json = std.json;
|
||||
|
||||
const log = @import("../log.zig");
|
||||
const App = @import("../app.zig").App;
|
||||
const Env = @import("../browser/env.zig").Env;
|
||||
const asUint = @import("../str/parser.zig").asUint;
|
||||
@@ -31,6 +30,8 @@ const Inspector = @import("../browser/env.zig").Env.Inspector;
|
||||
const Incrementing = @import("../id.zig").Incrementing;
|
||||
const Notification = @import("../notification.zig").Notification;
|
||||
|
||||
const log = std.log.scoped(.cdp);
|
||||
|
||||
pub const URL_BASE = "chrome://newtab/";
|
||||
pub const LOADER_ID = "LOADERID24DD2FD56CF1EF33C965C79C";
|
||||
|
||||
@@ -71,9 +72,6 @@ pub fn CDPT(comptime TypeProvider: type) type {
|
||||
// Used for processing notifications within a browser context.
|
||||
notification_arena: std.heap.ArenaAllocator,
|
||||
|
||||
// Extra headers to add to all requests. TBD under which conditions this should be reset.
|
||||
extra_headers: std.ArrayListUnmanaged(std.http.Header) = .empty,
|
||||
|
||||
const Self = @This();
|
||||
|
||||
pub fn init(app: *App, client: TypeProvider.Client) !Self {
|
||||
@@ -412,13 +410,11 @@ pub fn BrowserContext(comptime CDP_T: type) type {
|
||||
}
|
||||
|
||||
pub fn networkEnable(self: *Self) !void {
|
||||
try self.cdp.browser.notification.register(.http_request_fail, self, onHttpRequestFail);
|
||||
try self.cdp.browser.notification.register(.http_request_start, self, onHttpRequestStart);
|
||||
try self.cdp.browser.notification.register(.http_request_complete, self, onHttpRequestComplete);
|
||||
}
|
||||
|
||||
pub fn networkDisable(self: *Self) void {
|
||||
self.cdp.browser.notification.unregister(.http_request_fail, self);
|
||||
self.cdp.browser.notification.unregister(.http_request_start, self);
|
||||
self.cdp.browser.notification.unregister(.http_request_complete, self);
|
||||
}
|
||||
@@ -450,12 +446,6 @@ pub fn BrowserContext(comptime CDP_T: type) type {
|
||||
return @import("domains/network.zig").httpRequestStart(self.notification_arena, self, data);
|
||||
}
|
||||
|
||||
pub fn onHttpRequestFail(ctx: *anyopaque, data: *const Notification.RequestFail) !void {
|
||||
const self: *Self = @alignCast(@ptrCast(ctx));
|
||||
defer self.resetNotificationArena();
|
||||
return @import("domains/network.zig").httpRequestFail(self.notification_arena, self, data);
|
||||
}
|
||||
|
||||
pub fn onHttpRequestComplete(ctx: *anyopaque, data: *const Notification.RequestComplete) !void {
|
||||
const self: *Self = @alignCast(@ptrCast(ctx));
|
||||
defer self.resetNotificationArena();
|
||||
@@ -473,25 +463,36 @@ pub fn BrowserContext(comptime CDP_T: type) type {
|
||||
}
|
||||
|
||||
pub fn onInspectorResponse(ctx: *anyopaque, _: u32, msg: []const u8) void {
|
||||
if (std.log.defaultLogEnabled(.debug)) {
|
||||
// msg should be {"id":<id>,...
|
||||
std.debug.assert(std.mem.startsWith(u8, msg, "{\"id\":"));
|
||||
|
||||
const id_end = std.mem.indexOfScalar(u8, msg, ',') orelse {
|
||||
log.warn("invalid inspector response message: {s}", .{msg});
|
||||
return;
|
||||
};
|
||||
const id = msg[6..id_end];
|
||||
log.debug("Res (inspector) > id {s}", .{id});
|
||||
}
|
||||
sendInspectorMessage(@alignCast(@ptrCast(ctx)), msg) catch |err| {
|
||||
log.err(.cdp, "send inspector response", .{ .err = err });
|
||||
log.err("Failed to send inspector response: {any}", .{err});
|
||||
};
|
||||
}
|
||||
|
||||
pub fn onInspectorEvent(ctx: *anyopaque, msg: []const u8) void {
|
||||
if (log.enabled(.cdp, .debug)) {
|
||||
if (std.log.defaultLogEnabled(.debug)) {
|
||||
// msg should be {"method":<method>,...
|
||||
std.debug.assert(std.mem.startsWith(u8, msg, "{\"method\":"));
|
||||
const method_end = std.mem.indexOfScalar(u8, msg, ',') orelse {
|
||||
log.err(.cdp, "invalid inspector event", .{ .msg = msg });
|
||||
log.warn("invalid inspector event message: {s}", .{msg});
|
||||
return;
|
||||
};
|
||||
const method = msg[10..method_end];
|
||||
log.debug(.cdp, "inspector event", .{ .method = method });
|
||||
log.debug("Event (inspector) > method {s}", .{method});
|
||||
}
|
||||
|
||||
sendInspectorMessage(@alignCast(@ptrCast(ctx)), msg) catch |err| {
|
||||
log.err(.cdp, "send inspector event", .{ .err = err });
|
||||
log.err("Failed to send inspector event: {any}", .{err});
|
||||
};
|
||||
}
|
||||
|
||||
@@ -517,7 +518,7 @@ pub fn BrowserContext(comptime CDP_T: type) type {
|
||||
|
||||
var buf: std.ArrayListUnmanaged(u8) = .{};
|
||||
buf.ensureTotalCapacity(arena.allocator(), message_len) catch |err| {
|
||||
log.err(.cdp, "inspector buffer", .{ .err = err });
|
||||
log.err("Failed to expand inspector buffer: {any}", .{err});
|
||||
return;
|
||||
};
|
||||
|
||||
@@ -555,8 +556,8 @@ const IsolatedWorld = struct {
|
||||
self.executor.deinit();
|
||||
}
|
||||
pub fn removeContext(self: *IsolatedWorld) !void {
|
||||
if (self.executor.js_context == null) return error.NoIsolatedContextToRemove;
|
||||
self.executor.removeJsContext();
|
||||
if (self.executor.scope == null) return error.NoIsolatedContextToRemove;
|
||||
self.executor.endScope();
|
||||
}
|
||||
|
||||
// The isolate world must share at least some of the state with the related page, specifically the DocumentHTML
|
||||
@@ -565,8 +566,8 @@ const IsolatedWorld = struct {
|
||||
// This also means this pointer becomes invalid after removePage untill a new page is created.
|
||||
// Currently we have only 1 page/frame and thus also only 1 state in the isolate world.
|
||||
pub fn createContext(self: *IsolatedWorld, page: *Page) !void {
|
||||
if (self.executor.js_context != null) return error.Only1IsolatedContextSupported;
|
||||
_ = try self.executor.createJsContext(&page.window, page, {}, false);
|
||||
if (self.executor.scope != null) return error.Only1IsolatedContextSupported;
|
||||
_ = try self.executor.startScope(&page.window, &page.state, {}, false);
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
@@ -259,13 +259,13 @@ fn resolveNode(cmd: anytype) !void {
|
||||
const bc = cmd.browser_context orelse return error.BrowserContextNotLoaded;
|
||||
const page = bc.session.currentPage() orelse return error.PageNotLoaded;
|
||||
|
||||
var js_context = page.main_context;
|
||||
var scope = page.scope;
|
||||
if (params.executionContextId) |context_id| {
|
||||
if (js_context.v8_context.debugContextId() != context_id) {
|
||||
if (scope.context.debugContextId() != context_id) {
|
||||
var isolated_world = bc.isolated_world orelse return error.ContextNotFound;
|
||||
js_context = &(isolated_world.executor.js_context orelse return error.ContextNotFound);
|
||||
scope = &(isolated_world.executor.scope orelse return error.ContextNotFound);
|
||||
|
||||
if (js_context.v8_context.debugContextId() != context_id) return error.ContextNotFound;
|
||||
if (scope.context.debugContextId() != context_id) return error.ContextNotFound;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -275,7 +275,7 @@ fn resolveNode(cmd: anytype) !void {
|
||||
// node._node is a *parser.Node we need this to be able to find its most derived type e.g. Node -> Element -> HTMLElement
|
||||
// So we use the Node.Union when retrieve the value from the environment
|
||||
const remote_object = try bc.inspector.getRemoteObject(
|
||||
js_context,
|
||||
scope,
|
||||
params.objectGroup orelse "",
|
||||
try dom_node.Node.toInterface(node._node),
|
||||
);
|
||||
@@ -368,7 +368,7 @@ fn getNode(arena: Allocator, browser_context: anytype, node_id: ?Node.Id, backen
|
||||
if (object_id) |object_id_| {
|
||||
// Retrieve the object from which ever context it is in.
|
||||
const parser_node = try browser_context.inspector.getNodePtr(arena, object_id_);
|
||||
return try browser_context.node_registry.register(@alignCast(@ptrCast(parser_node)));
|
||||
return try browser_context.node_registry.register(@ptrCast(parser_node));
|
||||
}
|
||||
return error.MissingParams;
|
||||
}
|
||||
@@ -383,7 +383,6 @@ fn getContentQuads(cmd: anytype) !void {
|
||||
})) orelse return error.InvalidParams;
|
||||
|
||||
const bc = cmd.browser_context orelse return error.BrowserContextNotLoaded;
|
||||
const page = bc.session.currentPage() orelse return error.PageNotLoaded;
|
||||
|
||||
const node = try getNode(cmd.arena, bc, params.nodeId, params.backendNodeId, params.objectId);
|
||||
|
||||
@@ -398,7 +397,7 @@ fn getContentQuads(cmd: anytype) !void {
|
||||
// Elements like SVGElement may have multiple quads.
|
||||
|
||||
const element = parser.nodeToElement(node._node);
|
||||
const rect = try Element._getBoundingClientRect(element, page);
|
||||
const rect = try Element._getBoundingClientRect(element, &bc.session.page.?.state);
|
||||
const quad = rectToQuad(rect);
|
||||
|
||||
return cmd.sendResult(.{ .quads = &.{quad} }, .{});
|
||||
@@ -412,7 +411,6 @@ fn getBoxModel(cmd: anytype) !void {
|
||||
})) orelse return error.InvalidParams;
|
||||
|
||||
const bc = cmd.browser_context orelse return error.BrowserContextNotLoaded;
|
||||
const page = bc.session.currentPage() orelse return error.PageNotLoaded;
|
||||
|
||||
const node = try getNode(cmd.arena, bc, params.nodeId, params.backendNodeId, params.objectId);
|
||||
|
||||
@@ -420,7 +418,7 @@ fn getBoxModel(cmd: anytype) !void {
|
||||
if (try parser.nodeType(node._node) != .element) return error.NodeIsNotAnElement;
|
||||
const element = parser.nodeToElement(node._node);
|
||||
|
||||
const rect = try Element._getBoundingClientRect(element, page);
|
||||
const rect = try Element._getBoundingClientRect(element, &bc.session.page.?.state);
|
||||
const quad = rectToQuad(rect);
|
||||
|
||||
return cmd.sendResult(.{ .model = BoxModel{
|
||||
|
||||
@@ -21,60 +21,14 @@ const Page = @import("../../browser/page.zig").Page;
|
||||
|
||||
pub fn processMessage(cmd: anytype) !void {
|
||||
const action = std.meta.stringToEnum(enum {
|
||||
dispatchKeyEvent,
|
||||
dispatchMouseEvent,
|
||||
}, cmd.input.action) orelse return error.UnknownMethod;
|
||||
|
||||
switch (action) {
|
||||
.dispatchKeyEvent => return dispatchKeyEvent(cmd),
|
||||
.dispatchMouseEvent => return dispatchMouseEvent(cmd),
|
||||
}
|
||||
}
|
||||
|
||||
// https://chromedevtools.github.io/devtools-protocol/tot/Input/#method-dispatchKeyEvent
|
||||
fn dispatchKeyEvent(cmd: anytype) !void {
|
||||
const params = (try cmd.params(struct {
|
||||
type: Type,
|
||||
key: []const u8 = "",
|
||||
code: []const u8 = "",
|
||||
modifiers: u4 = 0,
|
||||
// Many optional parameters are not implemented yet, see documentation url.
|
||||
|
||||
const Type = enum {
|
||||
keyDown,
|
||||
keyUp,
|
||||
rawKeyDown,
|
||||
char,
|
||||
};
|
||||
})) orelse return error.InvalidParams;
|
||||
|
||||
try cmd.sendResult(null, .{});
|
||||
|
||||
// quickly ignore types we know we don't handle
|
||||
switch (params.type) {
|
||||
.keyUp, .rawKeyDown, .char => return,
|
||||
.keyDown => {},
|
||||
}
|
||||
|
||||
const bc = cmd.browser_context orelse return;
|
||||
const page = bc.session.currentPage() orelse return;
|
||||
|
||||
const keyboard_event = Page.KeyboardEvent{
|
||||
.key = params.key,
|
||||
.code = params.code,
|
||||
.type = switch (params.type) {
|
||||
.keyDown => .keydown,
|
||||
else => unreachable,
|
||||
},
|
||||
.alt = params.modifiers & 1 == 1,
|
||||
.ctrl = params.modifiers & 2 == 2,
|
||||
.meta = params.modifiers & 4 == 4,
|
||||
.shift = params.modifiers & 8 == 8,
|
||||
};
|
||||
try page.keyboardEvent(keyboard_event);
|
||||
// result already sent
|
||||
}
|
||||
|
||||
// https://chromedevtools.github.io/devtools-protocol/tot/Input/#method-dispatchMouseEvent
|
||||
fn dispatchMouseEvent(cmd: anytype) !void {
|
||||
const params = (try cmd.params(struct {
|
||||
@@ -136,7 +90,7 @@ fn clickNavigate(cmd: anytype, uri: std.Uri) !void {
|
||||
.disposition = "currentTab",
|
||||
}, .{ .session_id = bc.session_id.? });
|
||||
|
||||
try bc.session.removePage();
|
||||
bc.session.removePage();
|
||||
_ = try bc.session.createPage(null);
|
||||
|
||||
try @import("page.zig").navigateToUrl(cmd, url, false);
|
||||
|
||||
@@ -18,7 +18,6 @@
|
||||
|
||||
const std = @import("std");
|
||||
const Notification = @import("../../notification.zig").Notification;
|
||||
const log = @import("../../log.zig");
|
||||
|
||||
const Allocator = std.mem.Allocator;
|
||||
|
||||
@@ -27,14 +26,12 @@ pub fn processMessage(cmd: anytype) !void {
|
||||
enable,
|
||||
disable,
|
||||
setCacheDisabled,
|
||||
setExtraHTTPHeaders,
|
||||
}, cmd.input.action) orelse return error.UnknownMethod;
|
||||
|
||||
switch (action) {
|
||||
.enable => return enable(cmd),
|
||||
.disable => return disable(cmd),
|
||||
.setCacheDisabled => return cmd.sendResult(null, .{}),
|
||||
.setExtraHTTPHeaders => return setExtraHTTPHeaders(cmd),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -50,60 +47,6 @@ fn disable(cmd: anytype) !void {
|
||||
return cmd.sendResult(null, .{});
|
||||
}
|
||||
|
||||
fn setExtraHTTPHeaders(cmd: anytype) !void {
|
||||
const params = (try cmd.params(struct {
|
||||
headers: std.json.ArrayHashMap([]const u8),
|
||||
})) orelse return error.InvalidParams;
|
||||
|
||||
const bc = cmd.browser_context orelse return error.BrowserContextNotLoaded;
|
||||
|
||||
// Copy the headers onto the browser context arena
|
||||
const arena = bc.arena;
|
||||
const extra_headers = &bc.cdp.extra_headers;
|
||||
|
||||
extra_headers.clearRetainingCapacity();
|
||||
try extra_headers.ensureTotalCapacity(arena, params.headers.map.count());
|
||||
var it = params.headers.map.iterator();
|
||||
while (it.next()) |header| {
|
||||
extra_headers.appendAssumeCapacity(.{ .name = try arena.dupe(u8, header.key_ptr.*), .value = try arena.dupe(u8, header.value_ptr.*) });
|
||||
}
|
||||
|
||||
return cmd.sendResult(null, .{});
|
||||
}
|
||||
|
||||
// Upsert a header into the headers array.
|
||||
// returns true if the header was added, false if it was updated
|
||||
fn putAssumeCapacity(headers: *std.ArrayListUnmanaged(std.http.Header), extra: std.http.Header) bool {
|
||||
for (headers.items) |*header| {
|
||||
if (std.mem.eql(u8, header.name, extra.name)) {
|
||||
header.value = extra.value;
|
||||
return false;
|
||||
}
|
||||
}
|
||||
headers.appendAssumeCapacity(extra);
|
||||
return true;
|
||||
}
|
||||
|
||||
pub fn httpRequestFail(arena: Allocator, bc: anytype, request: *const Notification.RequestFail) !void {
|
||||
// It's possible that the request failed because we aborted when the client
|
||||
// sent Target.closeTarget. In that case, bc.session_id will be cleared
|
||||
// already, and we can skip sending these messages to the client.
|
||||
const session_id = bc.session_id orelse return;
|
||||
|
||||
// Isn't possible to do a network request within a Browser (which our
|
||||
// notification is tied to), without a page.
|
||||
std.debug.assert(bc.session.page != null);
|
||||
|
||||
// We're missing a bunch of fields, but, for now, this seems like enough
|
||||
try bc.cdp.sendEvent("Network.loadingFailed", .{
|
||||
.requestId = try std.fmt.allocPrint(arena, "REQ-{d}", .{request.id}),
|
||||
// Seems to be what chrome answers with. I assume it depends on the type of error?
|
||||
.type = "Ping",
|
||||
.errorText = request.err,
|
||||
.canceled = false,
|
||||
}, .{ .session_id = session_id });
|
||||
}
|
||||
|
||||
pub fn httpRequestStart(arena: Allocator, bc: anytype, request: *const Notification.RequestStart) !void {
|
||||
// Isn't possible to do a network request within a Browser (which our
|
||||
// notification is tied to), without a page.
|
||||
@@ -116,13 +59,6 @@ pub fn httpRequestStart(arena: Allocator, bc: anytype, request: *const Notificat
|
||||
const target_id = bc.target_id orelse unreachable;
|
||||
const page = bc.session.currentPage() orelse unreachable;
|
||||
|
||||
// Modify request with extra CDP headers
|
||||
try request.headers.ensureTotalCapacity(request.arena, request.headers.items.len + cdp.extra_headers.items.len);
|
||||
for (cdp.extra_headers.items) |extra| {
|
||||
const new = putAssumeCapacity(request.headers, extra);
|
||||
if (!new) log.debug(.cdp, "request header overwritten", .{ .name = extra.name });
|
||||
}
|
||||
|
||||
const document_url = try urlToString(arena, &page.url.uri, .{
|
||||
.scheme = true,
|
||||
.authentication = true,
|
||||
@@ -144,8 +80,8 @@ pub fn httpRequestStart(arena: Allocator, bc: anytype, request: *const Notificat
|
||||
});
|
||||
|
||||
var headers: std.StringArrayHashMapUnmanaged([]const u8) = .empty;
|
||||
try headers.ensureTotalCapacity(arena, request.headers.items.len);
|
||||
for (request.headers.items) |header| {
|
||||
try headers.ensureTotalCapacity(arena, request.headers.len);
|
||||
for (request.headers) |header| {
|
||||
headers.putAssumeCapacity(header.name, header.value);
|
||||
}
|
||||
|
||||
@@ -193,13 +129,13 @@ pub fn httpRequestComplete(arena: Allocator, bc: anytype, request: *const Notifi
|
||||
// We're missing a bunch of fields, but, for now, this seems like enough
|
||||
try cdp.sendEvent("Network.responseReceived", .{
|
||||
.requestId = try std.fmt.allocPrint(arena, "REQ-{d}", .{request.id}),
|
||||
.frameId = target_id,
|
||||
.loaderId = bc.loader_id,
|
||||
.response = .{
|
||||
.url = url,
|
||||
.status = request.status,
|
||||
.headers = std.json.ArrayHashMap([]const u8){ .map = headers },
|
||||
},
|
||||
.frameId = target_id,
|
||||
}, .{ .session_id = session_id });
|
||||
}
|
||||
|
||||
@@ -208,30 +144,3 @@ fn urlToString(arena: Allocator, url: *const std.Uri, opts: std.Uri.WriteToStrea
|
||||
try url.writeToStream(opts, buf.writer(arena));
|
||||
return buf.items;
|
||||
}
|
||||
|
||||
const testing = @import("../testing.zig");
|
||||
test "cdp.network setExtraHTTPHeaders" {
|
||||
var ctx = testing.context();
|
||||
defer ctx.deinit();
|
||||
|
||||
// _ = try ctx.loadBrowserContext(.{ .id = "NID-A", .session_id = "NESI-A" });
|
||||
try ctx.processMessage(.{ .id = 10, .method = "Target.createTarget", .params = .{ .url = "about/blank" } });
|
||||
|
||||
try ctx.processMessage(.{
|
||||
.id = 3,
|
||||
.method = "Network.setExtraHTTPHeaders",
|
||||
.params = .{ .headers = .{ .foo = "bar" } },
|
||||
});
|
||||
|
||||
try ctx.processMessage(.{
|
||||
.id = 4,
|
||||
.method = "Network.setExtraHTTPHeaders",
|
||||
.params = .{ .headers = .{ .food = "bars" } },
|
||||
});
|
||||
|
||||
const bc = ctx.cdp().browser_context.?;
|
||||
try testing.expectEqual(bc.cdp.extra_headers.items.len, 1);
|
||||
|
||||
try ctx.processMessage(.{ .id = 5, .method = "Target.attachToTarget", .params = .{ .targetId = bc.target_id.? } });
|
||||
try testing.expectEqual(bc.cdp.extra_headers.items.len, 0);
|
||||
}
|
||||
|
||||
@@ -117,14 +117,14 @@ fn createIsolatedWorld(cmd: anytype) !void {
|
||||
const world = try bc.createIsolatedWorld(params.worldName, params.grantUniveralAccess);
|
||||
const page = bc.session.currentPage() orelse return error.PageNotLoaded;
|
||||
try pageCreated(bc, page);
|
||||
const js_context = &world.executor.js_context.?;
|
||||
const scope = &world.executor.scope.?;
|
||||
|
||||
// Create the auxdata json for the contextCreated event
|
||||
// Calling contextCreated will assign a Id to the context and send the contextCreated event
|
||||
const aux_data = try std.fmt.allocPrint(cmd.arena, "{{\"isDefault\":false,\"type\":\"isolated\",\"frameId\":\"{s}\"}}", .{params.frameId});
|
||||
bc.inspector.contextCreated(js_context, world.name, "", aux_data, false);
|
||||
bc.inspector.contextCreated(scope, world.name, "", aux_data, false);
|
||||
|
||||
return cmd.sendResult(.{ .executionContextId = js_context.v8_context.debugContextId() }, .{});
|
||||
return cmd.sendResult(.{ .executionContextId = scope.context.debugContextId() }, .{});
|
||||
}
|
||||
|
||||
fn navigate(cmd: anytype) !void {
|
||||
@@ -163,38 +163,24 @@ pub fn pageNavigate(arena: Allocator, bc: anytype, event: *const Notification.Pa
|
||||
std.debug.assert(bc.session.page != null);
|
||||
|
||||
var cdp = bc.cdp;
|
||||
|
||||
if (event.opts.reason != .address_bar) {
|
||||
bc.loader_id = bc.cdp.loader_id_gen.next();
|
||||
}
|
||||
|
||||
const loader_id = bc.loader_id;
|
||||
const target_id = bc.target_id orelse unreachable;
|
||||
const session_id = bc.session_id orelse unreachable;
|
||||
|
||||
bc.reset();
|
||||
|
||||
const reason_: ?[]const u8 = switch (event.opts.reason) {
|
||||
.anchor => "anchorClick",
|
||||
.script => "scriptInitiated",
|
||||
.form => switch (event.opts.method) {
|
||||
.GET => "formSubmissionGet",
|
||||
.POST => "formSubmissionPost",
|
||||
else => unreachable,
|
||||
},
|
||||
.address_bar => null,
|
||||
};
|
||||
if (reason_) |reason| {
|
||||
const is_anchor = event.opts.reason == .anchor;
|
||||
if (is_anchor) {
|
||||
try cdp.sendEvent("Page.frameScheduledNavigation", .{
|
||||
.frameId = target_id,
|
||||
.delay = 0,
|
||||
.reason = reason,
|
||||
.reason = "anchorClick",
|
||||
.url = event.url.raw,
|
||||
}, .{ .session_id = session_id });
|
||||
|
||||
try cdp.sendEvent("Page.frameRequestedNavigation", .{
|
||||
.frameId = target_id,
|
||||
.reason = reason,
|
||||
.reason = "anchorClick",
|
||||
.url = event.url.raw,
|
||||
.disposition = "currentTab",
|
||||
}, .{ .session_id = session_id });
|
||||
@@ -238,7 +224,7 @@ pub fn pageNavigate(arena: Allocator, bc: anytype, event: *const Notification.Pa
|
||||
}, .{ .session_id = session_id });
|
||||
}
|
||||
|
||||
if (reason_ != null) {
|
||||
if (is_anchor) {
|
||||
try cdp.sendEvent("Page.frameClearedScheduledNavigation", .{
|
||||
.frameId = target_id,
|
||||
}, .{ .session_id = session_id });
|
||||
@@ -253,7 +239,7 @@ pub fn pageNavigate(arena: Allocator, bc: anytype, event: *const Notification.Pa
|
||||
const page = bc.session.currentPage() orelse return error.PageNotLoaded;
|
||||
const aux_data = try std.fmt.allocPrint(arena, "{{\"isDefault\":true,\"type\":\"default\",\"frameId\":\"{s}\"}}", .{target_id});
|
||||
bc.inspector.contextCreated(
|
||||
page.main_context,
|
||||
page.scope,
|
||||
"",
|
||||
try page.origin(arena),
|
||||
aux_data,
|
||||
@@ -264,7 +250,7 @@ pub fn pageNavigate(arena: Allocator, bc: anytype, event: *const Notification.Pa
|
||||
const aux_json = try std.fmt.allocPrint(arena, "{{\"isDefault\":false,\"type\":\"isolated\",\"frameId\":\"{s}\"}}", .{target_id});
|
||||
// Calling contextCreated will assign a new Id to the context and send the contextCreated event
|
||||
bc.inspector.contextCreated(
|
||||
&isolated_world.executor.js_context.?,
|
||||
&isolated_world.executor.scope.?,
|
||||
isolated_world.name,
|
||||
"://",
|
||||
aux_json,
|
||||
@@ -286,7 +272,7 @@ pub fn pageCreated(bc: anytype, page: *Page) !void {
|
||||
try isolated_world.createContext(page);
|
||||
|
||||
const polyfill = @import("../../browser/polyfill/polyfill.zig");
|
||||
try polyfill.load(bc.arena, &isolated_world.executor.js_context.?);
|
||||
try polyfill.load(bc.arena, &isolated_world.executor.scope.?);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -17,7 +17,8 @@
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
const std = @import("std");
|
||||
const log = @import("../../log.zig");
|
||||
|
||||
const log = std.log.scoped(.cdp);
|
||||
|
||||
// TODO: hard coded IDs
|
||||
const LOADER_ID = "LOADERID42AA389647D702B4D805F49A";
|
||||
@@ -127,7 +128,7 @@ fn createTarget(cmd: anytype) !void {
|
||||
{
|
||||
const aux_data = try std.fmt.allocPrint(cmd.arena, "{{\"isDefault\":true,\"type\":\"default\",\"frameId\":\"{s}\"}}", .{target_id});
|
||||
bc.inspector.contextCreated(
|
||||
page.main_context,
|
||||
page.scope,
|
||||
"",
|
||||
try page.origin(cmd.arena),
|
||||
aux_data,
|
||||
@@ -300,7 +301,7 @@ fn sendMessageToTarget(cmd: anytype) !void {
|
||||
};
|
||||
|
||||
cmd.cdp.dispatch(cmd.arena, &capture, params.message) catch |err| {
|
||||
log.err(.cdp, "internal dispatch error", .{ .err = err, .id = cmd.input.id, .message = params.message });
|
||||
log.err("send message {d} ({s}): {any}", .{ cmd.input.id orelse -1, params.message, err });
|
||||
return err;
|
||||
};
|
||||
|
||||
@@ -389,9 +390,6 @@ fn doAttachtoTarget(cmd: anytype, target_id: []const u8) !void {
|
||||
std.debug.assert(bc.session_id == null);
|
||||
const session_id = cmd.cdp.session_id_gen.next();
|
||||
|
||||
// extra_headers should not be kept on a new page or tab, currently we have only 1 page, we clear it just in case
|
||||
bc.cdp.extra_headers.clearRetainingCapacity();
|
||||
|
||||
try cmd.sendEvent("Target.attachedToTarget", AttachToTarget{
|
||||
.sessionId = session_id,
|
||||
.targetInfo = TargetInfo{
|
||||
|
||||
@@ -27,11 +27,12 @@ const MemoryPool = std.heap.MemoryPool;
|
||||
const ArenaAllocator = std.heap.ArenaAllocator;
|
||||
|
||||
const tls = @import("tls");
|
||||
const log = @import("../log.zig");
|
||||
const IO = @import("../runtime/loop.zig").IO;
|
||||
const Loop = @import("../runtime/loop.zig").Loop;
|
||||
const Notification = @import("../notification.zig").Notification;
|
||||
|
||||
const log = std.log.scoped(.http_client);
|
||||
|
||||
// We might need to peek at the body to try and sniff the content-type.
|
||||
// While we only need a few bytes, in most cases we need to ignore leading
|
||||
// whitespace, so we want to get a reasonable-sized chunk.
|
||||
@@ -51,20 +52,18 @@ pub const Client = struct {
|
||||
root_ca: tls.config.CertBundle,
|
||||
tls_verify_host: bool = true,
|
||||
connection_manager: ConnectionManager,
|
||||
request_pool: std.heap.MemoryPool(Request),
|
||||
|
||||
const Opts = struct {
|
||||
max_concurrent: usize = 3,
|
||||
http_proxy: ?std.Uri = null,
|
||||
tls_verify_host: bool = true,
|
||||
http_proxy: ?std.Uri = null,
|
||||
max_idle_connection: usize = 10,
|
||||
};
|
||||
|
||||
pub fn init(allocator: Allocator, opts: Opts) !Client {
|
||||
pub fn init(allocator: Allocator, max_concurrent: usize, opts: Opts) !Client {
|
||||
var root_ca: tls.config.CertBundle = if (builtin.is_test) .{} else try tls.config.CertBundle.fromSystem(allocator);
|
||||
errdefer root_ca.deinit(allocator);
|
||||
|
||||
const state_pool = try StatePool.init(allocator, opts.max_concurrent);
|
||||
const state_pool = try StatePool.init(allocator, max_concurrent);
|
||||
errdefer state_pool.deinit(allocator);
|
||||
|
||||
const connection_manager = ConnectionManager.init(allocator, opts.max_idle_connection);
|
||||
@@ -78,7 +77,6 @@ pub const Client = struct {
|
||||
.http_proxy = opts.http_proxy,
|
||||
.tls_verify_host = opts.tls_verify_host,
|
||||
.connection_manager = connection_manager,
|
||||
.request_pool = std.heap.MemoryPool(Request).init(allocator),
|
||||
};
|
||||
}
|
||||
|
||||
@@ -89,161 +87,36 @@ pub const Client = struct {
|
||||
}
|
||||
self.state_pool.deinit(allocator);
|
||||
self.connection_manager.deinit();
|
||||
self.request_pool.deinit();
|
||||
}
|
||||
|
||||
pub fn request(self: *Client, method: Request.Method, uri: *const Uri) !*Request {
|
||||
const state = self.state_pool.acquireWait();
|
||||
errdefer self.state_pool.release(state);
|
||||
pub fn request(self: *Client, method: Request.Method, uri: *const Uri) !Request {
|
||||
const state = self.state_pool.acquire();
|
||||
|
||||
const req = try self.request_pool.create();
|
||||
errdefer self.request_pool.destroy(req);
|
||||
|
||||
req.* = try Request.init(self, state, method, uri);
|
||||
return req;
|
||||
}
|
||||
|
||||
pub fn initAsync(
|
||||
self: *Client,
|
||||
arena: Allocator,
|
||||
method: Request.Method,
|
||||
uri: *const Uri,
|
||||
ctx: *anyopaque,
|
||||
callback: AsyncQueue.Callback,
|
||||
loop: *Loop,
|
||||
opts: RequestOpts,
|
||||
) !void {
|
||||
|
||||
// See the page's DelayedNavitation for why we're doing this. TL;DR -
|
||||
// we need to keep 1 slot available for the blocking page navigation flow
|
||||
// (Almost worth keeping a dedicate State just for that flow, but keep
|
||||
// thinking we need a more permanent solution (i.e. making everything
|
||||
// non-blocking).
|
||||
if (self.freeSlotCount() > 1) {
|
||||
if (self.state_pool.acquireOrNull()) |state| {
|
||||
// if we have state ready, we can skip the loop and immediately
|
||||
// kick this request off.
|
||||
return self.asyncRequestReady(method, uri, ctx, callback, state, opts);
|
||||
}
|
||||
errdefer {
|
||||
state.reset();
|
||||
self.state_pool.release(state);
|
||||
}
|
||||
|
||||
// This cannot be a client-owned MemoryPool. The page can end before
|
||||
// this is ever completed (and the check callback will never be called).
|
||||
// As long as the loop doesn't guarantee that callbacks will be called,
|
||||
// this _has_ to be the page arena.
|
||||
const queue = try arena.create(AsyncQueue);
|
||||
queue.* = .{
|
||||
.ctx = ctx,
|
||||
.uri = uri,
|
||||
.opts = opts,
|
||||
.client = self,
|
||||
.method = method,
|
||||
.callback = callback,
|
||||
.node = .{ .func = AsyncQueue.check },
|
||||
};
|
||||
_ = try loop.timeout(10 * std.time.ns_per_ms, &queue.node);
|
||||
return Request.init(self, state, method, uri);
|
||||
}
|
||||
|
||||
// Either called directly from initAsync (if we have a state ready)
|
||||
// Or from when the AsyncQueue(T) is ready.
|
||||
fn asyncRequestReady(
|
||||
self: *Client,
|
||||
method: Request.Method,
|
||||
uri: *const Uri,
|
||||
ctx: *anyopaque,
|
||||
callback: AsyncQueue.Callback,
|
||||
state: *State,
|
||||
opts: RequestOpts,
|
||||
) !void {
|
||||
errdefer self.state_pool.release(state);
|
||||
|
||||
// We need the request on the heap, because it can have a longer lifetime
|
||||
// than the code making the request. That sounds odd, but consider the
|
||||
// case of an XHR request: it can still be inflight (e.g. waiting for
|
||||
// the response) when the page gets unloaded. Once the page is unloaded
|
||||
// the page arena is reset and the XHR instance becomes invalid. If the
|
||||
// XHR instance owns the `Request`, we'd crash once an async callback
|
||||
// executes.
|
||||
const req = try self.request_pool.create();
|
||||
errdefer self.request_pool.destroy(req);
|
||||
|
||||
req.* = try Request.init(self, state, method, uri);
|
||||
if (opts.notification) |notification| {
|
||||
req.notification = notification;
|
||||
}
|
||||
|
||||
errdefer req.deinit();
|
||||
try callback(ctx, req);
|
||||
}
|
||||
|
||||
pub fn requestFactory(self: *Client, opts: RequestOpts) RequestFactory {
|
||||
pub fn requestFactory(self: *Client, notification: ?*Notification) RequestFactory {
|
||||
return .{
|
||||
.opts = opts,
|
||||
.client = self,
|
||||
.notification = notification,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn freeSlotCount(self: *Client) usize {
|
||||
return self.state_pool.freeSlotCount();
|
||||
}
|
||||
};
|
||||
|
||||
const RequestOpts = struct {
|
||||
notification: ?*Notification = null,
|
||||
};
|
||||
|
||||
// A factory for creating requests with a given set of options.
|
||||
pub const RequestFactory = struct {
|
||||
client: *Client,
|
||||
opts: RequestOpts,
|
||||
notification: ?*Notification,
|
||||
|
||||
pub fn initAsync(
|
||||
self: RequestFactory,
|
||||
arena: Allocator,
|
||||
method: Request.Method,
|
||||
uri: *const Uri,
|
||||
ctx: *anyopaque,
|
||||
callback: AsyncQueue.Callback,
|
||||
loop: *Loop,
|
||||
) !void {
|
||||
return self.client.initAsync(arena, method, uri, ctx, callback, loop, self.opts);
|
||||
}
|
||||
};
|
||||
|
||||
const AsyncQueue = struct {
|
||||
ctx: *anyopaque,
|
||||
method: Request.Method,
|
||||
uri: *const Uri,
|
||||
client: *Client,
|
||||
opts: RequestOpts,
|
||||
node: Loop.CallbackNode,
|
||||
callback: Callback,
|
||||
|
||||
const Callback = *const fn (*anyopaque, *Request) anyerror!void;
|
||||
|
||||
fn check(node: *Loop.CallbackNode, repeat_delay: *?u63) void {
|
||||
const self: *AsyncQueue = @fieldParentPtr("node", node);
|
||||
self._check(repeat_delay) catch |err| {
|
||||
log.err(.http_client, "async queue check", .{ .err = err });
|
||||
};
|
||||
}
|
||||
|
||||
fn _check(self: *AsyncQueue, repeat_delay: *?u63) !void {
|
||||
const client = self.client;
|
||||
const state = client.state_pool.acquireOrNull() orelse {
|
||||
// re-run this function in 10 milliseconds
|
||||
repeat_delay.* = 10 * std.time.ns_per_ms;
|
||||
return;
|
||||
};
|
||||
|
||||
try client.asyncRequestReady(
|
||||
self.method,
|
||||
self.uri,
|
||||
self.ctx,
|
||||
self.callback,
|
||||
state,
|
||||
self.opts,
|
||||
);
|
||||
pub fn create(self: RequestFactory, method: Request.Method, uri: *const Uri) !Request {
|
||||
var req = try self.client.request(method, uri);
|
||||
req.notification = self.notification;
|
||||
return req;
|
||||
}
|
||||
};
|
||||
|
||||
@@ -366,24 +239,12 @@ pub const Request = struct {
|
||||
// Because of things like redirects and error handling, it is possible for
|
||||
// the notification functions to be called multiple times, so we guard them
|
||||
// with these booleans
|
||||
_notified_fail: bool,
|
||||
_notified_start: bool,
|
||||
_notified_complete: bool,
|
||||
|
||||
// The notifier that we emit request notifications to, if any.
|
||||
notification: ?*Notification,
|
||||
|
||||
// Aborting an async request is complicated, as we need to wait until all
|
||||
// in-flight IO events are completed. Our AsyncHandler is a generic type
|
||||
// that we don't have the necessary type information for in the Request,
|
||||
// so we need to rely on anyopaque.
|
||||
_aborter: ?Aborter,
|
||||
|
||||
const Aborter = struct {
|
||||
ctx: *anyopaque,
|
||||
func: *const fn (*anyopaque) void,
|
||||
};
|
||||
|
||||
pub const Method = enum {
|
||||
GET,
|
||||
PUT,
|
||||
@@ -422,12 +283,10 @@ pub const Request = struct {
|
||||
._request_host = decomposed.request_host,
|
||||
._state = state,
|
||||
._client = client,
|
||||
._aborter = null,
|
||||
._connection = null,
|
||||
._keepalive = false,
|
||||
._redirect_count = 0,
|
||||
._has_host_header = false,
|
||||
._notified_fail = false,
|
||||
._notified_start = false,
|
||||
._notified_complete = false,
|
||||
._connection_from_keepalive = false,
|
||||
@@ -437,17 +296,8 @@ pub const Request = struct {
|
||||
|
||||
pub fn deinit(self: *Request) void {
|
||||
self.releaseConnection();
|
||||
_ = self._state.reset();
|
||||
self._client.state_pool.release(self._state);
|
||||
self._client.request_pool.destroy(self);
|
||||
}
|
||||
|
||||
pub fn abort(self: *Request) void {
|
||||
self.requestFailed("aborted");
|
||||
const aborter = self._aborter orelse {
|
||||
self.deinit();
|
||||
return;
|
||||
};
|
||||
aborter.func(aborter.ctx);
|
||||
}
|
||||
|
||||
const DecomposedURL = struct {
|
||||
@@ -502,7 +352,7 @@ pub const Request = struct {
|
||||
|
||||
self._client.connection_manager.keepIdle(connection) catch |err| {
|
||||
self.destroyConnection(connection);
|
||||
log.err(.http_client, "release to pool error", .{ .err = err });
|
||||
log.err("failed to release connection to pool: {}", .{err});
|
||||
};
|
||||
}
|
||||
|
||||
@@ -570,10 +420,6 @@ pub const Request = struct {
|
||||
}
|
||||
|
||||
fn doSendSync(self: *Request, use_pool: bool) anyerror!Response {
|
||||
// https://github.com/ziglang/zig/issues/20369
|
||||
// errdefer |err| self.requestFailed(@errorName(err));
|
||||
errdefer self.requestFailed("network error");
|
||||
|
||||
if (use_pool) {
|
||||
if (self.findExistingConnection(true)) |connection| {
|
||||
self._connection = connection;
|
||||
@@ -611,12 +457,7 @@ pub const Request = struct {
|
||||
|
||||
var handler = SyncHandler{ .request = self };
|
||||
return handler.send() catch |err| {
|
||||
log.warn(.http_client, "sync error", .{
|
||||
.err = err,
|
||||
.method = self.method,
|
||||
.url = self.request_uri,
|
||||
.redirects = self._redirect_count,
|
||||
});
|
||||
log.warn("HTTP error: {any} ({any} {any} {d})", .{ err, self.method, self.request_uri, self._redirect_count });
|
||||
return err;
|
||||
};
|
||||
}
|
||||
@@ -685,7 +526,7 @@ pub const Request = struct {
|
||||
.host = self._connect_host,
|
||||
.root_ca = self._client.root_ca,
|
||||
.insecure_skip_verify = self._tls_verify_host == false,
|
||||
// .key_log_callback = tls.config.key_log.callback,
|
||||
.key_log_callback = tls.config.key_log.callback,
|
||||
}),
|
||||
};
|
||||
|
||||
@@ -696,15 +537,9 @@ pub const Request = struct {
|
||||
|
||||
if (self._connection_from_keepalive) {
|
||||
// we're already connected
|
||||
async_handler.pending_connect = false;
|
||||
return async_handler.conn.connected();
|
||||
}
|
||||
|
||||
self._aborter = .{
|
||||
.ctx = async_handler,
|
||||
.func = AsyncHandlerT.abort,
|
||||
};
|
||||
|
||||
return loop.connect(
|
||||
AsyncHandlerT,
|
||||
async_handler,
|
||||
@@ -768,7 +603,7 @@ pub const Request = struct {
|
||||
// to a GET.
|
||||
self.method = .GET;
|
||||
}
|
||||
log.debug(.http, "redirecting", .{ .method = self.method, .url = self.request_uri });
|
||||
log.info("redirecting to: {any} {any}", .{ self.method, self.request_uri });
|
||||
|
||||
if (self.body != null and self.method == .GET) {
|
||||
// If we have a body and the method is a GET, then we must be following
|
||||
@@ -857,28 +692,14 @@ pub const Request = struct {
|
||||
}
|
||||
self._notified_start = true;
|
||||
notification.dispatch(.http_request_start, &.{
|
||||
.arena = self.arena,
|
||||
.id = self.id,
|
||||
.url = self.request_uri,
|
||||
.method = self.method,
|
||||
.headers = &self.headers,
|
||||
.headers = self.headers.items,
|
||||
.has_body = self.body != null,
|
||||
});
|
||||
}
|
||||
|
||||
fn requestFailed(self: *Request, err: []const u8) void {
|
||||
const notification = self.notification orelse return;
|
||||
if (self._notified_fail) {
|
||||
return;
|
||||
}
|
||||
self._notified_fail = true;
|
||||
notification.dispatch(.http_request_fail, &.{
|
||||
.id = self.id,
|
||||
.err = err,
|
||||
.url = self.request_uri,
|
||||
});
|
||||
}
|
||||
|
||||
fn requestCompleted(self: *Request, response: ResponseHeader) void {
|
||||
const notification = self.notification orelse return;
|
||||
if (self._notified_complete) {
|
||||
@@ -907,6 +728,13 @@ fn AsyncHandler(comptime H: type, comptime L: type) type {
|
||||
// that we have valid, but unprocessed, data up to.
|
||||
read_pos: usize = 0,
|
||||
|
||||
// Depending on which version of TLS, there are different places during
|
||||
// the handshake that we want to start receiving from. We can't have
|
||||
// overlapping receives (works fine on MacOS (kqueue) but not Linux (
|
||||
// io_uring)). Using this boolean as a guard, to make sure we only have
|
||||
// 1 in-flight receive is easier than trying to understand TLS.
|
||||
is_receiving: bool = false,
|
||||
|
||||
// need a separate read and write buf because, with TLS, messages are
|
||||
// not strictly req->resp.
|
||||
write_buf: []u8,
|
||||
@@ -943,14 +771,6 @@ fn AsyncHandler(comptime H: type, comptime L: type) type {
|
||||
// gzipped responses *cough*)
|
||||
full_body: ?std.ArrayListUnmanaged(u8) = null,
|
||||
|
||||
// Shutting down an async request requires that we wait for all inflight
|
||||
// IO to be completed. So we need to track what inflight requests we
|
||||
// have and whether or not we're shutting down
|
||||
shutdown: bool = false,
|
||||
pending_write: bool = false,
|
||||
pending_receive: bool = false,
|
||||
pending_connect: bool = true,
|
||||
|
||||
const Self = @This();
|
||||
const SendQueue = std.DoublyLinkedList([]const u8);
|
||||
|
||||
@@ -964,25 +784,13 @@ fn AsyncHandler(comptime H: type, comptime L: type) type {
|
||||
wait,
|
||||
done,
|
||||
need_more,
|
||||
handler_error,
|
||||
};
|
||||
|
||||
fn deinit(self: *Self) void {
|
||||
self.request.deinit();
|
||||
}
|
||||
|
||||
fn abort(ctx: *anyopaque) void {
|
||||
var self: *Self = @alignCast(@ptrCast(ctx));
|
||||
self.shutdown = true;
|
||||
posix.shutdown(self.request._connection.?.socket, .both) catch {};
|
||||
self.maybeShutdown();
|
||||
}
|
||||
|
||||
fn connected(self: *Self, _: *IO.Completion, result: IO.ConnectError!void) void {
|
||||
self.pending_connect = false;
|
||||
if (self.shutdown) {
|
||||
return self.maybeShutdown();
|
||||
}
|
||||
result catch |err| return self.handleError("Connection failed", err);
|
||||
self.conn.connected() catch |err| {
|
||||
self.handleError("connected handler error", err);
|
||||
@@ -1003,7 +811,6 @@ fn AsyncHandler(comptime H: type, comptime L: type) type {
|
||||
return;
|
||||
}
|
||||
|
||||
self.pending_write = true;
|
||||
self.loop.send(
|
||||
Self,
|
||||
self,
|
||||
@@ -1017,10 +824,6 @@ fn AsyncHandler(comptime H: type, comptime L: type) type {
|
||||
}
|
||||
|
||||
fn sent(self: *Self, _: *IO.Completion, n_: IO.SendError!usize) void {
|
||||
self.pending_write = false;
|
||||
if (self.shutdown) {
|
||||
return self.maybeShutdown();
|
||||
}
|
||||
const n = n_ catch |err| {
|
||||
return self.handleError("Write error", err);
|
||||
};
|
||||
@@ -1038,7 +841,6 @@ fn AsyncHandler(comptime H: type, comptime L: type) type {
|
||||
}
|
||||
|
||||
if (next) |next_| {
|
||||
self.pending_write = true;
|
||||
// we still have data to send
|
||||
self.loop.send(
|
||||
Self,
|
||||
@@ -1063,11 +865,11 @@ fn AsyncHandler(comptime H: type, comptime L: type) type {
|
||||
// while handshaking and potentially while sending data. So we're always
|
||||
// receiving.
|
||||
fn receive(self: *Self) void {
|
||||
if (self.pending_receive) {
|
||||
if (self.is_receiving) {
|
||||
return;
|
||||
}
|
||||
|
||||
self.pending_receive = true;
|
||||
self.is_receiving = true;
|
||||
self.loop.recv(
|
||||
Self,
|
||||
self,
|
||||
@@ -1081,11 +883,7 @@ fn AsyncHandler(comptime H: type, comptime L: type) type {
|
||||
}
|
||||
|
||||
fn received(self: *Self, _: *IO.Completion, n_: IO.RecvError!usize) void {
|
||||
self.pending_receive = false;
|
||||
if (self.shutdown) {
|
||||
return self.maybeShutdown();
|
||||
}
|
||||
|
||||
self.is_receiving = false;
|
||||
const n = n_ catch |err| {
|
||||
return self.handleError("Read error", err);
|
||||
};
|
||||
@@ -1108,35 +906,12 @@ fn AsyncHandler(comptime H: type, comptime L: type) type {
|
||||
switch (status) {
|
||||
.wait => {},
|
||||
.need_more => self.receive(),
|
||||
.handler_error => {
|
||||
// handler should never have been called if we're redirecting
|
||||
std.debug.assert(self.redirect == null);
|
||||
self.request.requestCompleted(self.reader.response);
|
||||
self.deinit();
|
||||
return;
|
||||
},
|
||||
.done => {
|
||||
const redirect = self.redirect orelse {
|
||||
var handler = self.handler;
|
||||
self.request.requestCompleted(self.reader.response);
|
||||
self.deinit();
|
||||
|
||||
// Emit the done chunk. We expect the caller to do
|
||||
// processing once the full request is completed. By
|
||||
// emiting this AFTER we've relreased the connection,
|
||||
// we free the connection and its state for re-use.
|
||||
// If we don't do this this way, we can end up with
|
||||
// _a lot_ of pending request/states.
|
||||
// DO NOT USE `self` here, it's no longer valid.
|
||||
handler.onHttpResponse(.{
|
||||
.data = null,
|
||||
.done = true,
|
||||
.first = false,
|
||||
.header = .{},
|
||||
}) catch {};
|
||||
return;
|
||||
};
|
||||
|
||||
self.request.redirectAsync(redirect, self.loop, self.handler) catch |err| {
|
||||
self.handleError("Setup async redirect", err);
|
||||
return;
|
||||
@@ -1147,17 +922,6 @@ fn AsyncHandler(comptime H: type, comptime L: type) type {
|
||||
}
|
||||
}
|
||||
|
||||
fn maybeShutdown(self: *Self) void {
|
||||
std.debug.assert(self.shutdown);
|
||||
if (self.pending_write or self.pending_receive or self.pending_connect) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Who knows what state we're in, safer to not try to re-use the connection
|
||||
self.request._keepalive = false;
|
||||
self.request.deinit();
|
||||
}
|
||||
|
||||
// If our socket came from the connection pool, it's possible that we're
|
||||
// failing because it's since timed out. If
|
||||
fn maybeRetryRequest(self: *Self) bool {
|
||||
@@ -1278,26 +1042,25 @@ fn AsyncHandler(comptime H: type, comptime L: type) type {
|
||||
self.handleError("decompression error", err);
|
||||
return .done;
|
||||
};
|
||||
|
||||
self.handler.onHttpResponse(.{
|
||||
.data = chunk,
|
||||
.first = first,
|
||||
.done = false,
|
||||
.done = next == null,
|
||||
.header = reader.response,
|
||||
}) catch return .handler_error;
|
||||
}) catch return .done;
|
||||
|
||||
first = false;
|
||||
}
|
||||
}
|
||||
} else if (result.data != null or would_be_first) {
|
||||
} else if (result.data != null or done or would_be_first) {
|
||||
// If we have data. Or if the request is done. Or if this is the
|
||||
// first time we have a complete header. Emit the chunk.
|
||||
self.handler.onHttpResponse(.{
|
||||
.done = false,
|
||||
.done = done,
|
||||
.data = result.data,
|
||||
.first = would_be_first,
|
||||
.header = reader.response,
|
||||
}) catch return .handler_error;
|
||||
}) catch return .done;
|
||||
}
|
||||
|
||||
if (done == true) {
|
||||
@@ -1313,17 +1076,10 @@ fn AsyncHandler(comptime H: type, comptime L: type) type {
|
||||
}
|
||||
|
||||
fn handleError(self: *Self, comptime msg: []const u8, err: anyerror) void {
|
||||
log.err(.http_client, msg, .{
|
||||
.err = err,
|
||||
.method = self.request.method,
|
||||
.url = self.request.request_uri,
|
||||
});
|
||||
|
||||
log.err(msg ++ ": {any} ({any} {any})", .{ err, self.request.method, self.request.request_uri });
|
||||
self.handler.onHttpResponse(err) catch {};
|
||||
// just to be safe
|
||||
self.request._keepalive = false;
|
||||
|
||||
self.request.requestFailed(@errorName(err));
|
||||
self.request.deinit();
|
||||
}
|
||||
|
||||
@@ -1541,10 +1297,7 @@ const SyncHandler = struct {
|
||||
// See CompressedReader for an explanation. This isn't great code. Sorry.
|
||||
if (reader.response.get("content-encoding")) |ce| {
|
||||
if (std.ascii.eqlIgnoreCase(ce, "gzip") == false) {
|
||||
log.warn(.http_client, "unsupported content encoding", .{
|
||||
.content_encoding = ce,
|
||||
.uri = request.request_uri,
|
||||
});
|
||||
log.warn("unsupported content encoding '{s}' for: {}", .{ ce, request.request_uri });
|
||||
return error.UnsupportedContentEncoding;
|
||||
}
|
||||
|
||||
@@ -1799,9 +1552,6 @@ const Reader = struct {
|
||||
|
||||
header_done: bool,
|
||||
|
||||
// Whether or not the current header has to be skipped [because it's too long].
|
||||
skip_current_header: bool,
|
||||
|
||||
fn init(state: *State, keepalive: *bool) Reader {
|
||||
return .{
|
||||
.pos = 0,
|
||||
@@ -1809,7 +1559,6 @@ const Reader = struct {
|
||||
.body_reader = null,
|
||||
.header_done = false,
|
||||
.keepalive = keepalive,
|
||||
.skip_current_header = false,
|
||||
.header_buf = state.header_buf,
|
||||
.arena = state.arena.allocator(),
|
||||
};
|
||||
@@ -1849,17 +1598,6 @@ const Reader = struct {
|
||||
var done = false;
|
||||
var unprocessed = data;
|
||||
|
||||
if (self.skip_current_header) {
|
||||
const index = std.mem.indexOfScalarPos(u8, data, 0, '\n') orelse {
|
||||
// discard all of this data, since it belongs to a header we
|
||||
// want to skip
|
||||
return .{ .done = false, .data = null, .unprocessed = null };
|
||||
};
|
||||
self.pos = 0;
|
||||
self.skip_current_header = false;
|
||||
unprocessed = data[index + 1 ..];
|
||||
}
|
||||
|
||||
// Data from a previous call to process that we weren't able to parse
|
||||
const pos = self.pos;
|
||||
const header_buf = self.header_buf;
|
||||
@@ -1874,26 +1612,21 @@ const Reader = struct {
|
||||
// data doesn't represent a complete header line. We need more data
|
||||
const end = pos + data.len;
|
||||
if (end > header_buf.len) {
|
||||
self.prepareToSkipLongHeader();
|
||||
} else {
|
||||
self.pos = end;
|
||||
@memcpy(self.header_buf[pos..end], data);
|
||||
return error.HeaderTooLarge;
|
||||
}
|
||||
self.pos = end;
|
||||
@memcpy(self.header_buf[pos..end], data);
|
||||
return .{ .done = false, .data = null, .unprocessed = null };
|
||||
}) + 1;
|
||||
|
||||
const end = pos + line_end;
|
||||
if (end > header_buf.len) {
|
||||
unprocessed = &.{};
|
||||
self.prepareToSkipLongHeader();
|
||||
// we can disable this immediately, since we've essentially
|
||||
// finished skipping it this point.
|
||||
self.skip_current_header = false;
|
||||
} else {
|
||||
@memcpy(header_buf[pos..end], data[0..line_end]);
|
||||
done, unprocessed = try self.parseHeader(header_buf[0..end]);
|
||||
return error.HeaderTooLarge;
|
||||
}
|
||||
|
||||
@memcpy(header_buf[pos..end], data[0..line_end]);
|
||||
done, unprocessed = try self.parseHeader(header_buf[0..end]);
|
||||
|
||||
// we gave parseHeader exactly 1 header line, there should be no leftovers
|
||||
std.debug.assert(unprocessed.len == 0);
|
||||
|
||||
@@ -1916,11 +1649,10 @@ const Reader = struct {
|
||||
const p = self.pos; // don't use pos, self.pos might have been altered
|
||||
const end = p + unprocessed.len;
|
||||
if (end > header_buf.len) {
|
||||
self.prepareToSkipLongHeader();
|
||||
} else {
|
||||
@memcpy(header_buf[p..end], unprocessed);
|
||||
self.pos = end;
|
||||
return error.HeaderTooLarge;
|
||||
}
|
||||
@memcpy(header_buf[p..end], unprocessed);
|
||||
self.pos = end;
|
||||
return .{ .done = false, .data = null, .unprocessed = null };
|
||||
}
|
||||
}
|
||||
@@ -1980,13 +1712,6 @@ const Reader = struct {
|
||||
return .{ .done = false, .data = null, .unprocessed = null };
|
||||
}
|
||||
|
||||
fn prepareToSkipLongHeader(self: *Reader) void {
|
||||
self.skip_current_header = true;
|
||||
const buf = self.header_buf;
|
||||
const pos = std.mem.indexOfScalar(u8, buf, ':') orelse @min(buf.len, 20);
|
||||
log.warn(.http_client, "skipping long header", .{ .name = buf[0..pos] });
|
||||
}
|
||||
|
||||
// returns true when done
|
||||
// returns any remaining unprocessed data
|
||||
// When done == true, the remaining data must belong to the body
|
||||
@@ -2033,15 +1758,6 @@ const Reader = struct {
|
||||
};
|
||||
const name_end = pos + sep;
|
||||
|
||||
if (value_end - pos > MAX_HEADER_LINE_LEN) {
|
||||
// at this point, we could return this header, but then it would
|
||||
// be inconsistent with long headers that are split up and need
|
||||
// to be buffered.
|
||||
log.warn(.http_client, "skipping long header", .{ .name = data[pos..name_end] });
|
||||
pos = value_end + 1;
|
||||
continue;
|
||||
}
|
||||
|
||||
const value_start = name_end + 1;
|
||||
|
||||
if (value_end == value_start or data[value_end - 1] != '\r') {
|
||||
@@ -2490,7 +2206,7 @@ const State = struct {
|
||||
}
|
||||
|
||||
fn reset(self: *State) void {
|
||||
_ = self.arena.reset(.{ .retain_with_limit = 64 * 1024 });
|
||||
_ = self.arena.reset(.{ .retain_with_limit = 1024 * 1024 });
|
||||
}
|
||||
|
||||
fn deinit(self: *State) void {
|
||||
@@ -2543,17 +2259,10 @@ const StatePool = struct {
|
||||
allocator.free(self.states);
|
||||
}
|
||||
|
||||
pub fn freeSlotCount(self: *StatePool) usize {
|
||||
self.mutex.lock();
|
||||
defer self.mutex.unlock();
|
||||
return self.available;
|
||||
}
|
||||
|
||||
pub fn acquireWait(self: *StatePool) *State {
|
||||
const states = self.states;
|
||||
|
||||
pub fn acquire(self: *StatePool) *State {
|
||||
self.mutex.lock();
|
||||
while (true) {
|
||||
const states = self.states;
|
||||
const available = self.available;
|
||||
if (available == 0) {
|
||||
self.cond.wait(&self.mutex);
|
||||
@@ -2567,33 +2276,13 @@ const StatePool = struct {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn acquireOrNull(self: *StatePool) ?*State {
|
||||
const states = self.states;
|
||||
|
||||
self.mutex.lock();
|
||||
defer self.mutex.unlock();
|
||||
|
||||
const available = self.available;
|
||||
if (available == 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const index = available - 1;
|
||||
const state = states[index];
|
||||
self.available = index;
|
||||
return state;
|
||||
}
|
||||
|
||||
pub fn release(self: *StatePool, state: *State) void {
|
||||
state.reset();
|
||||
var states = self.states;
|
||||
|
||||
self.mutex.lock();
|
||||
var states = self.states;
|
||||
const available = self.available;
|
||||
states[available] = state;
|
||||
self.available = available + 1;
|
||||
self.mutex.unlock();
|
||||
|
||||
self.cond.signal();
|
||||
}
|
||||
};
|
||||
@@ -2780,11 +2469,9 @@ test "HttpClient Reader: fuzz" {
|
||||
}
|
||||
|
||||
{
|
||||
// skips large headers
|
||||
const data = "HTTP/1.1 200 OK\r\na: b\r\n" ++ ("a" ** 5000) ++ ": wow\r\nx:zz\r\n\r\n";
|
||||
try testReader(&state, &res, data);
|
||||
try testing.expectEqual(200, res.status);
|
||||
try res.assertHeaders(&.{ "a", "b", "x", "zz" });
|
||||
// header too big
|
||||
const data = "HTTP/1.1 200 OK\r\n" ++ ("a" ** 1500);
|
||||
try testing.expectError(error.HeaderTooLarge, testReader(&state, &res, data));
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -2994,19 +2681,11 @@ test "HttpClient: sync GET redirect" {
|
||||
}
|
||||
|
||||
test "HttpClient: async connect error" {
|
||||
defer testing.reset();
|
||||
var loop = try Loop.init(testing.allocator);
|
||||
defer loop.deinit();
|
||||
|
||||
const Handler = struct {
|
||||
loop: *Loop,
|
||||
reset: *Thread.ResetEvent,
|
||||
|
||||
fn requestReady(ctx: *anyopaque, req: *Request) !void {
|
||||
const self: *@This() = @alignCast(@ptrCast(ctx));
|
||||
try req.sendAsync(self.loop, self, .{});
|
||||
}
|
||||
|
||||
fn onHttpResponse(self: *@This(), res: anyerror!Progress) !void {
|
||||
_ = res catch |err| {
|
||||
if (err == error.ConnectionRefused) {
|
||||
@@ -3024,35 +2703,14 @@ test "HttpClient: async connect error" {
|
||||
var client = try testClient();
|
||||
defer client.deinit();
|
||||
|
||||
var handler = Handler{
|
||||
.loop = &loop,
|
||||
.reset = &reset,
|
||||
};
|
||||
|
||||
const uri = try Uri.parse("HTTP://127.0.0.1:9920");
|
||||
try client.initAsync(
|
||||
testing.arena_allocator,
|
||||
.GET,
|
||||
&uri,
|
||||
&handler,
|
||||
Handler.requestReady,
|
||||
&loop,
|
||||
.{},
|
||||
);
|
||||
|
||||
for (0..10) |_| {
|
||||
try loop.io.run_for_ns(std.time.ns_per_ms * 10);
|
||||
if (reset.isSet()) {
|
||||
break;
|
||||
}
|
||||
} else {
|
||||
return error.Timeout;
|
||||
}
|
||||
var req = try client.request(.GET, &uri);
|
||||
try req.sendAsync(&loop, Handler{ .reset = &reset }, .{});
|
||||
try loop.io.run_for_ns(std.time.ns_per_ms);
|
||||
try reset.timedWait(std.time.ns_per_s);
|
||||
}
|
||||
|
||||
test "HttpClient: async no body" {
|
||||
defer testing.reset();
|
||||
|
||||
var client = try testClient();
|
||||
defer client.deinit();
|
||||
|
||||
@@ -3060,7 +2718,8 @@ test "HttpClient: async no body" {
|
||||
defer handler.deinit();
|
||||
|
||||
const uri = try Uri.parse("HTTP://127.0.0.1:9582/http_client/simple");
|
||||
try client.initAsync(testing.arena_allocator, .GET, &uri, &handler, CaptureHandler.requestReady, &handler.loop, .{});
|
||||
var req = try client.request(.GET, &uri);
|
||||
try req.sendAsync(&handler.loop, &handler, .{});
|
||||
try handler.waitUntilDone();
|
||||
|
||||
const res = handler.response;
|
||||
@@ -3070,8 +2729,6 @@ test "HttpClient: async no body" {
|
||||
}
|
||||
|
||||
test "HttpClient: async with body" {
|
||||
defer testing.reset();
|
||||
|
||||
var client = try testClient();
|
||||
defer client.deinit();
|
||||
|
||||
@@ -3079,7 +2736,8 @@ test "HttpClient: async with body" {
|
||||
defer handler.deinit();
|
||||
|
||||
const uri = try Uri.parse("HTTP://127.0.0.1:9582/http_client/echo");
|
||||
try client.initAsync(testing.arena_allocator, .GET, &uri, &handler, CaptureHandler.requestReady, &handler.loop, .{});
|
||||
var req = try client.request(.GET, &uri);
|
||||
try req.sendAsync(&handler.loop, &handler, .{});
|
||||
try handler.waitUntilDone();
|
||||
|
||||
const res = handler.response;
|
||||
@@ -3094,8 +2752,6 @@ test "HttpClient: async with body" {
|
||||
}
|
||||
|
||||
test "HttpClient: async with gzip body" {
|
||||
defer testing.reset();
|
||||
|
||||
var client = try testClient();
|
||||
defer client.deinit();
|
||||
|
||||
@@ -3103,7 +2759,8 @@ test "HttpClient: async with gzip body" {
|
||||
defer handler.deinit();
|
||||
|
||||
const uri = try Uri.parse("HTTP://127.0.0.1:9582/http_client/gzip");
|
||||
try client.initAsync(testing.arena_allocator, .GET, &uri, &handler, CaptureHandler.requestReady, &handler.loop, .{});
|
||||
var req = try client.request(.GET, &uri);
|
||||
try req.sendAsync(&handler.loop, &handler, .{});
|
||||
try handler.waitUntilDone();
|
||||
|
||||
const res = handler.response;
|
||||
@@ -3117,8 +2774,6 @@ test "HttpClient: async with gzip body" {
|
||||
}
|
||||
|
||||
test "HttpClient: async redirect" {
|
||||
defer testing.reset();
|
||||
|
||||
var client = try testClient();
|
||||
defer client.deinit();
|
||||
|
||||
@@ -3126,7 +2781,8 @@ test "HttpClient: async redirect" {
|
||||
defer handler.deinit();
|
||||
|
||||
const uri = try Uri.parse("HTTP://127.0.0.1:9582/http_client/redirect");
|
||||
try client.initAsync(testing.arena_allocator, .GET, &uri, &handler, CaptureHandler.requestReady, &handler.loop, .{});
|
||||
var req = try client.request(.GET, &uri);
|
||||
try req.sendAsync(&handler.loop, &handler, .{});
|
||||
|
||||
// Called twice on purpose. The initial GET resutls in the # of pending
|
||||
// events to reach 0. This causes our `run_for_ns` to return. But we then
|
||||
@@ -3147,7 +2803,6 @@ test "HttpClient: async redirect" {
|
||||
}
|
||||
|
||||
test "HttpClient: async tls no body" {
|
||||
defer testing.reset();
|
||||
var client = try testClient();
|
||||
defer client.deinit();
|
||||
for (0..5) |_| {
|
||||
@@ -3155,7 +2810,8 @@ test "HttpClient: async tls no body" {
|
||||
defer handler.deinit();
|
||||
|
||||
const uri = try Uri.parse("HTTPs://127.0.0.1:9581/http_client/simple");
|
||||
try client.initAsync(testing.arena_allocator, .GET, &uri, &handler, CaptureHandler.requestReady, &handler.loop, .{});
|
||||
var req = try client.request(.GET, &uri);
|
||||
try req.sendAsync(&handler.loop, &handler, .{ .tls_verify_host = false });
|
||||
try handler.waitUntilDone();
|
||||
|
||||
const res = handler.response;
|
||||
@@ -3171,7 +2827,6 @@ test "HttpClient: async tls no body" {
|
||||
}
|
||||
|
||||
test "HttpClient: async tls with body x" {
|
||||
defer testing.reset();
|
||||
for (0..5) |_| {
|
||||
var client = try testClient();
|
||||
defer client.deinit();
|
||||
@@ -3180,7 +2835,8 @@ test "HttpClient: async tls with body x" {
|
||||
defer handler.deinit();
|
||||
|
||||
const uri = try Uri.parse("HTTPs://127.0.0.1:9581/http_client/body");
|
||||
try client.initAsync(testing.arena_allocator, .GET, &uri, &handler, CaptureHandler.requestReady, &handler.loop, .{});
|
||||
var req = try client.request(.GET, &uri);
|
||||
try req.sendAsync(&handler.loop, &handler, .{ .tls_verify_host = false });
|
||||
try handler.waitUntilDone();
|
||||
|
||||
const res = handler.response;
|
||||
@@ -3195,7 +2851,6 @@ test "HttpClient: async tls with body x" {
|
||||
}
|
||||
|
||||
test "HttpClient: async redirect from TLS to Plaintext" {
|
||||
defer testing.reset();
|
||||
for (0..1) |_| {
|
||||
var client = try testClient();
|
||||
defer client.deinit();
|
||||
@@ -3204,7 +2859,8 @@ test "HttpClient: async redirect from TLS to Plaintext" {
|
||||
defer handler.deinit();
|
||||
|
||||
const uri = try Uri.parse("https://127.0.0.1:9581/http_client/redirect/insecure");
|
||||
try client.initAsync(testing.arena_allocator, .GET, &uri, &handler, CaptureHandler.requestReady, &handler.loop, .{});
|
||||
var req = try client.request(.GET, &uri);
|
||||
try req.sendAsync(&handler.loop, &handler, .{ .tls_verify_host = false });
|
||||
try handler.waitUntilDone();
|
||||
|
||||
const res = handler.response;
|
||||
@@ -3220,7 +2876,6 @@ test "HttpClient: async redirect from TLS to Plaintext" {
|
||||
}
|
||||
|
||||
test "HttpClient: async redirect plaintext to TLS" {
|
||||
defer testing.reset();
|
||||
for (0..5) |_| {
|
||||
var client = try testClient();
|
||||
defer client.deinit();
|
||||
@@ -3229,7 +2884,8 @@ test "HttpClient: async redirect plaintext to TLS" {
|
||||
defer handler.deinit();
|
||||
|
||||
const uri = try Uri.parse("http://127.0.0.1:9582/http_client/redirect/secure");
|
||||
try client.initAsync(testing.arena_allocator, .GET, &uri, &handler, CaptureHandler.requestReady, &handler.loop, .{});
|
||||
var req = try client.request(.GET, &uri);
|
||||
try req.sendAsync(&handler.loop, &handler, .{ .tls_verify_host = false });
|
||||
try handler.waitUntilDone();
|
||||
|
||||
const res = handler.response;
|
||||
@@ -3351,11 +3007,6 @@ const CaptureHandler = struct {
|
||||
self.loop.deinit();
|
||||
}
|
||||
|
||||
fn requestReady(ctx: *anyopaque, req: *Request) !void {
|
||||
const self: *CaptureHandler = @alignCast(@ptrCast(ctx));
|
||||
try req.sendAsync(&self.loop, self, .{ .tls_verify_host = false });
|
||||
}
|
||||
|
||||
fn onHttpResponse(self: *CaptureHandler, progress_: anyerror!Progress) !void {
|
||||
self.process(progress_) catch |err| {
|
||||
std.debug.print("capture handler error: {}\n", .{err});
|
||||
@@ -3366,8 +3017,7 @@ const CaptureHandler = struct {
|
||||
const progress = try progress_;
|
||||
const allocator = self.response.arena.allocator();
|
||||
try self.response.body.appendSlice(allocator, progress.data orelse "");
|
||||
if (progress.first) {
|
||||
std.debug.assert(!progress.done);
|
||||
if (progress.done) {
|
||||
self.response.status = progress.header.status;
|
||||
try self.response.headers.ensureTotalCapacity(allocator, progress.header.headers.items.len);
|
||||
for (progress.header.headers.items) |header| {
|
||||
@@ -3376,9 +3026,6 @@ const CaptureHandler = struct {
|
||||
.value = try allocator.dupe(u8, header.value),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (progress.done) {
|
||||
self.reset.set();
|
||||
}
|
||||
}
|
||||
@@ -3437,5 +3084,5 @@ fn testReader(state: *State, res: *TestResponse, data: []const u8) !void {
|
||||
}
|
||||
|
||||
fn testClient() !Client {
|
||||
return try Client.init(testing.allocator, .{ .max_concurrent = 1 });
|
||||
return try Client.init(testing.allocator, 1, .{});
|
||||
}
|
||||
|
||||
384
src/log.zig
384
src/log.zig
@@ -1,384 +0,0 @@
|
||||
// Copyright (C) 2023-2024 Lightpanda (Selecy SAS)
|
||||
//
|
||||
// Francis Bouvier <francis@lightpanda.io>
|
||||
// Pierre Tachoire <pierre@lightpanda.io>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU Affero General Public License as
|
||||
// published by the Free Software Foundation, either version 3 of the
|
||||
// License, or (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU Affero General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU Affero General Public License
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
const std = @import("std");
|
||||
const builtin = @import("builtin");
|
||||
|
||||
const Thread = std.Thread;
|
||||
const Allocator = std.mem.Allocator;
|
||||
|
||||
const is_debug = builtin.mode == .Debug;
|
||||
|
||||
pub const Scope = enum {
|
||||
app,
|
||||
browser,
|
||||
cdp,
|
||||
console,
|
||||
http,
|
||||
http_client,
|
||||
js,
|
||||
loop,
|
||||
script_event,
|
||||
telemetry,
|
||||
user_script,
|
||||
unknown_prop,
|
||||
web_api,
|
||||
xhr,
|
||||
};
|
||||
|
||||
const Opts = struct {
|
||||
format: Format = if (is_debug) .pretty else .logfmt,
|
||||
level: Level = if (is_debug) .info else .warn,
|
||||
filter_scopes: []const Scope = &.{.unknown_prop},
|
||||
};
|
||||
|
||||
pub var opts = Opts{};
|
||||
|
||||
// synchronizes writes to the output
|
||||
var out_lock: Thread.Mutex = .{};
|
||||
|
||||
// synchronizes access to last_log
|
||||
var last_log_lock: Thread.Mutex = .{};
|
||||
|
||||
pub fn enabled(comptime scope: Scope, level: Level) bool {
|
||||
if (@intFromEnum(level) < @intFromEnum(opts.level)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (comptime builtin.mode == .Debug) {
|
||||
for (opts.filter_scopes) |fs| {
|
||||
if (fs == scope) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
// Ugliness to support complex debug parameters. Could add better support for
|
||||
// this directly in writeValue, but we [currently] only need this in one place
|
||||
// and I kind of don't want to encourage / make this easy.
|
||||
pub fn separator() []const u8 {
|
||||
return if (opts.format == .pretty) "\n " else "; ";
|
||||
}
|
||||
|
||||
pub const Level = enum {
|
||||
debug,
|
||||
info,
|
||||
warn,
|
||||
err,
|
||||
fatal,
|
||||
};
|
||||
|
||||
pub const Format = enum {
|
||||
logfmt,
|
||||
pretty,
|
||||
};
|
||||
|
||||
pub fn debug(comptime scope: Scope, comptime msg: []const u8, data: anytype) void {
|
||||
log(scope, .debug, msg, data);
|
||||
}
|
||||
|
||||
pub fn info(comptime scope: Scope, comptime msg: []const u8, data: anytype) void {
|
||||
log(scope, .info, msg, data);
|
||||
}
|
||||
|
||||
pub fn warn(comptime scope: Scope, comptime msg: []const u8, data: anytype) void {
|
||||
log(scope, .warn, msg, data);
|
||||
}
|
||||
|
||||
pub fn err(comptime scope: Scope, comptime msg: []const u8, data: anytype) void {
|
||||
log(scope, .err, msg, data);
|
||||
}
|
||||
|
||||
pub fn fatal(comptime scope: Scope, comptime msg: []const u8, data: anytype) void {
|
||||
log(scope, .fatal, msg, data);
|
||||
}
|
||||
|
||||
pub fn log(comptime scope: Scope, level: Level, comptime msg: []const u8, data: anytype) void {
|
||||
if (enabled(scope, level) == false) {
|
||||
return;
|
||||
}
|
||||
|
||||
std.debug.lockStdErr();
|
||||
defer std.debug.unlockStdErr();
|
||||
|
||||
logTo(scope, level, msg, data, std.io.getStdErr().writer()) catch |log_err| {
|
||||
std.debug.print("$time={d} $level=fatal $scope={s} $msg=\"log err\" err={s} log_msg=\"{s}\"", .{ timestamp(), @errorName(log_err), @tagName(scope), msg });
|
||||
};
|
||||
}
|
||||
|
||||
fn logTo(comptime scope: Scope, level: Level, comptime msg: []const u8, data: anytype, out: anytype) !void {
|
||||
comptime {
|
||||
if (msg.len > 30) {
|
||||
@compileError("log msg cannot be more than 30 characters: '" ++ msg ++ "'");
|
||||
}
|
||||
for (msg) |b| {
|
||||
switch (b) {
|
||||
'A'...'Z', 'a'...'z', ' ', '0'...'9', '_', '-', '.', '{', '}' => {},
|
||||
else => @compileError("log msg contains an invalid character '" ++ msg ++ "'"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var bw = std.io.bufferedWriter(out);
|
||||
switch (opts.format) {
|
||||
.logfmt => try logLogfmt(scope, level, msg, data, bw.writer()),
|
||||
.pretty => try logPretty(scope, level, msg, data, bw.writer()),
|
||||
}
|
||||
bw.flush() catch return;
|
||||
}
|
||||
|
||||
fn logLogfmt(comptime scope: Scope, level: Level, comptime msg: []const u8, data: anytype, writer: anytype) !void {
|
||||
try writer.writeAll("$time=");
|
||||
try writer.print("{d}", .{timestamp()});
|
||||
|
||||
try writer.writeAll(" $scope=");
|
||||
try writer.writeAll(@tagName(scope));
|
||||
|
||||
try writer.writeAll(" $level=");
|
||||
try writer.writeAll(if (level == .err) "error" else @tagName(level));
|
||||
|
||||
const full_msg = comptime blk: {
|
||||
// only wrap msg in quotes if it contains a space
|
||||
const prefix = " $msg=";
|
||||
if (std.mem.indexOfScalar(u8, msg, ' ') == null) {
|
||||
break :blk prefix ++ msg;
|
||||
}
|
||||
break :blk prefix ++ "\"" ++ msg ++ "\"";
|
||||
};
|
||||
try writer.writeAll(full_msg);
|
||||
inline for (@typeInfo(@TypeOf(data)).@"struct".fields) |f| {
|
||||
const key = " " ++ f.name ++ "=";
|
||||
try writer.writeAll(key);
|
||||
try writeValue(.logfmt, @field(data, f.name), writer);
|
||||
}
|
||||
try writer.writeByte('\n');
|
||||
}
|
||||
|
||||
fn logPretty(comptime scope: Scope, level: Level, comptime msg: []const u8, data: anytype, writer: anytype) !void {
|
||||
if (scope == .console and level == .fatal and comptime std.mem.eql(u8, msg, "lightpanda")) {
|
||||
try writer.writeAll("\x1b[0;104mWARN ");
|
||||
} else {
|
||||
try writer.writeAll(switch (level) {
|
||||
.debug => "\x1b[0;36mDEBUG\x1b[0m ",
|
||||
.info => "\x1b[0;32mINFO\x1b[0m ",
|
||||
.warn => "\x1b[0;33mWARN\x1b[0m ",
|
||||
.err => "\x1b[0;31mERROR ",
|
||||
.fatal => "\x1b[0;35mFATAL ",
|
||||
});
|
||||
}
|
||||
|
||||
const prefix = @tagName(scope) ++ " : " ++ msg;
|
||||
try writer.writeAll(prefix);
|
||||
|
||||
{
|
||||
// msg.len cannot be > 30, and @tagName(scope).len cannot be > 15
|
||||
// so this is safe
|
||||
const padding = 55 - prefix.len;
|
||||
for (0..padding / 2) |_| {
|
||||
try writer.writeAll(" .");
|
||||
}
|
||||
if (@mod(padding, 2) == 1) {
|
||||
try writer.writeByte(' ');
|
||||
}
|
||||
try writer.print(" \x1b[0m[+{d}ms]", .{elapsed()});
|
||||
try writer.writeByte('\n');
|
||||
}
|
||||
|
||||
inline for (@typeInfo(@TypeOf(data)).@"struct".fields) |f| {
|
||||
const key = " " ++ f.name ++ " = ";
|
||||
try writer.writeAll(key);
|
||||
try writeValue(.pretty, @field(data, f.name), writer);
|
||||
try writer.writeByte('\n');
|
||||
}
|
||||
try writer.writeByte('\n');
|
||||
}
|
||||
|
||||
pub fn writeValue(comptime format: Format, value: anytype, writer: anytype) !void {
|
||||
const T = @TypeOf(value);
|
||||
switch (@typeInfo(T)) {
|
||||
.optional => {
|
||||
if (value) |v| {
|
||||
return writeValue(format, v, writer);
|
||||
}
|
||||
return writer.writeAll("null");
|
||||
},
|
||||
.comptime_int, .int, .comptime_float, .float => {
|
||||
return writer.print("{d}", .{value});
|
||||
},
|
||||
.bool => {
|
||||
return writer.writeAll(if (value) "true" else "false");
|
||||
},
|
||||
.error_set => return writer.writeAll(@errorName(value)),
|
||||
.@"enum" => return writer.writeAll(@tagName(value)),
|
||||
.array => return writeValue(format, &value, writer),
|
||||
.pointer => |ptr| switch (ptr.size) {
|
||||
.slice => switch (ptr.child) {
|
||||
u8 => return writeString(format, value, writer),
|
||||
else => {},
|
||||
},
|
||||
.one => switch (@typeInfo(ptr.child)) {
|
||||
.array => |arr| if (arr.child == u8) {
|
||||
return writeString(format, value, writer);
|
||||
},
|
||||
else => return writer.print("{}", .{value}),
|
||||
},
|
||||
else => {},
|
||||
},
|
||||
.@"union" => return writer.print("{}", .{value}),
|
||||
.@"struct" => return writer.print("{}", .{value}),
|
||||
else => {},
|
||||
}
|
||||
|
||||
@compileError("cannot log a: " ++ @typeName(T));
|
||||
}
|
||||
|
||||
fn writeString(comptime format: Format, value: []const u8, writer: anytype) !void {
|
||||
if (format == .pretty) {
|
||||
return writer.writeAll(value);
|
||||
}
|
||||
|
||||
var space_count: usize = 0;
|
||||
var escape_count: usize = 0;
|
||||
var binary_count: usize = 0;
|
||||
|
||||
for (value) |b| {
|
||||
switch (b) {
|
||||
'\r', '\n', '"' => escape_count += 1,
|
||||
' ' => space_count += 1,
|
||||
'\t', '!', '#'...'~' => {}, // printable characters
|
||||
else => binary_count += 1,
|
||||
}
|
||||
}
|
||||
|
||||
if (binary_count > 0) {
|
||||
// TODO: use a different encoding if the ratio of binary data / printable is low
|
||||
return std.base64.standard_no_pad.Encoder.encodeWriter(writer, value);
|
||||
}
|
||||
|
||||
if (escape_count == 0) {
|
||||
if (space_count == 0) {
|
||||
return writer.writeAll(value);
|
||||
}
|
||||
try writer.writeByte('"');
|
||||
try writer.writeAll(value);
|
||||
try writer.writeByte('"');
|
||||
return;
|
||||
}
|
||||
|
||||
try writer.writeByte('"');
|
||||
|
||||
var rest = value;
|
||||
while (rest.len > 0) {
|
||||
const pos = std.mem.indexOfAny(u8, rest, "\r\n\"") orelse {
|
||||
try writer.writeAll(rest);
|
||||
break;
|
||||
};
|
||||
try writer.writeAll(rest[0..pos]);
|
||||
try writer.writeByte('\\');
|
||||
switch (rest[pos]) {
|
||||
'"' => try writer.writeByte('"'),
|
||||
'\r' => try writer.writeByte('r'),
|
||||
'\n' => try writer.writeByte('n'),
|
||||
else => unreachable,
|
||||
}
|
||||
rest = rest[pos + 1 ..];
|
||||
}
|
||||
return writer.writeByte('"');
|
||||
}
|
||||
|
||||
fn timestamp() i64 {
|
||||
if (comptime @import("builtin").is_test) {
|
||||
return 1739795092929;
|
||||
}
|
||||
return std.time.milliTimestamp();
|
||||
}
|
||||
|
||||
var last_log: i64 = 0;
|
||||
fn elapsed() i64 {
|
||||
const now = timestamp();
|
||||
|
||||
last_log_lock.lock();
|
||||
const previous = last_log;
|
||||
last_log = now;
|
||||
last_log_lock.unlock();
|
||||
|
||||
if (previous == 0) {
|
||||
return 0;
|
||||
}
|
||||
if (previous > now) {
|
||||
return 0;
|
||||
}
|
||||
return now - previous;
|
||||
}
|
||||
|
||||
const testing = @import("testing.zig");
|
||||
test "log: data" {
|
||||
var buf: std.ArrayListUnmanaged(u8) = .{};
|
||||
defer buf.deinit(testing.allocator);
|
||||
|
||||
{
|
||||
try logTo(.browser, .err, "nope", .{}, buf.writer(testing.allocator));
|
||||
try testing.expectEqual("$time=1739795092929 $scope=browser $level=error $msg=nope\n", buf.items);
|
||||
}
|
||||
|
||||
{
|
||||
buf.clearRetainingCapacity();
|
||||
const string = try testing.allocator.dupe(u8, "spice_must_flow");
|
||||
defer testing.allocator.free(string);
|
||||
|
||||
try logTo(.http, .warn, "a msg", .{
|
||||
.cint = 5,
|
||||
.cfloat = 3.43,
|
||||
.int = @as(i16, -49),
|
||||
.float = @as(f32, 0.0003232),
|
||||
.bt = true,
|
||||
.bf = false,
|
||||
.nn = @as(?i32, 33),
|
||||
.n = @as(?i32, null),
|
||||
.lit = "over9000!",
|
||||
.slice = string,
|
||||
.err = error.Nope,
|
||||
.level = Level.warn,
|
||||
}, buf.writer(testing.allocator));
|
||||
|
||||
try testing.expectEqual("$time=1739795092929 $scope=http $level=warn $msg=\"a msg\" " ++
|
||||
"cint=5 cfloat=3.43 int=-49 float=0.0003232 bt=true bf=false " ++
|
||||
"nn=33 n=null lit=over9000! slice=spice_must_flow " ++
|
||||
"err=Nope level=warn\n", buf.items);
|
||||
}
|
||||
}
|
||||
|
||||
test "log: string escape" {
|
||||
var buf: std.ArrayListUnmanaged(u8) = .{};
|
||||
defer buf.deinit(testing.allocator);
|
||||
|
||||
const prefix = "$time=1739795092929 $scope=app $level=error $msg=test ";
|
||||
{
|
||||
try logTo(.app, .err, "test", .{ .string = "hello world" }, buf.writer(testing.allocator));
|
||||
try testing.expectEqual(prefix ++ "string=\"hello world\"\n", buf.items);
|
||||
}
|
||||
|
||||
{
|
||||
buf.clearRetainingCapacity();
|
||||
try logTo(.app, .err, "test", .{ .string = "\n \thi \" \" " }, buf.writer(testing.allocator));
|
||||
try testing.expectEqual(prefix ++ "string=\"\\n \thi \\\" \\\" \"\n", buf.items);
|
||||
}
|
||||
}
|
||||
278
src/main.zig
278
src/main.zig
@@ -20,7 +20,6 @@ const std = @import("std");
|
||||
const builtin = @import("builtin");
|
||||
const Allocator = std.mem.Allocator;
|
||||
|
||||
const log = @import("log.zig");
|
||||
const server = @import("server.zig");
|
||||
const App = @import("app.zig").App;
|
||||
const Platform = @import("runtime/js.zig").Platform;
|
||||
@@ -29,28 +28,27 @@ const Browser = @import("browser/browser.zig").Browser;
|
||||
const build_config = @import("build_config");
|
||||
const parser = @import("browser/netsurf.zig");
|
||||
|
||||
const log = std.log.scoped(.cli);
|
||||
|
||||
pub const std_options = std.Options{
|
||||
// Set the log level to info
|
||||
.log_level = @enumFromInt(@intFromEnum(build_config.log_level)),
|
||||
|
||||
// Define logFn to override the std implementation
|
||||
.logFn = logFn,
|
||||
};
|
||||
|
||||
pub fn main() !void {
|
||||
// allocator
|
||||
// - in Debug mode we use the General Purpose Allocator to detect memory leaks
|
||||
// - in Release mode we use the c allocator
|
||||
var gpa: std.heap.DebugAllocator(.{}) = .init;
|
||||
var gpa = std.heap.GeneralPurposeAllocator(.{}){};
|
||||
const alloc = if (builtin.mode == .Debug) gpa.allocator() else std.heap.c_allocator;
|
||||
|
||||
defer if (builtin.mode == .Debug) {
|
||||
if (gpa.detectLeaks()) std.posix.exit(1);
|
||||
};
|
||||
|
||||
run(alloc) catch |err| {
|
||||
// If explicit filters were set, they won't be valid anymore because
|
||||
// the args_arena is gone. We need to set it to something that's not
|
||||
// invalid. (We should just move the args_arena up to main)
|
||||
log.opts.filter_scopes = &.{};
|
||||
log.fatal(.app, "exit", .{ .err = err });
|
||||
std.posix.exit(1);
|
||||
};
|
||||
}
|
||||
|
||||
fn run(alloc: Allocator) !void {
|
||||
var args_arena = std.heap.ArenaAllocator.init(alloc);
|
||||
defer args_arena.deinit();
|
||||
const args = try parseArgs(args_arena.allocator());
|
||||
@@ -67,16 +65,6 @@ fn run(alloc: Allocator) !void {
|
||||
else => {},
|
||||
}
|
||||
|
||||
if (args.logLevel()) |ll| {
|
||||
log.opts.level = ll;
|
||||
}
|
||||
if (args.logFormat()) |lf| {
|
||||
log.opts.format = lf;
|
||||
}
|
||||
if (args.logFilterScopes()) |lfs| {
|
||||
log.opts.filter_scopes = lfs;
|
||||
}
|
||||
|
||||
const platform = try Platform.init();
|
||||
defer platform.deinit();
|
||||
|
||||
@@ -90,20 +78,19 @@ fn run(alloc: Allocator) !void {
|
||||
|
||||
switch (args.mode) {
|
||||
.serve => |opts| {
|
||||
log.debug(.app, "startup", .{ .mode = "serve" });
|
||||
const address = std.net.Address.parseIp4(opts.host, opts.port) catch |err| {
|
||||
log.fatal(.app, "invalid server address", .{ .err = err, .host = opts.host, .port = opts.port });
|
||||
log.err("address (host:port) {any}\n", .{err});
|
||||
return args.printUsageAndExit(false);
|
||||
};
|
||||
|
||||
const timeout = std.time.ns_per_s * @as(u64, opts.timeout);
|
||||
server.run(app, address, timeout) catch |err| {
|
||||
log.fatal(.app, "server run error", .{ .err = err });
|
||||
log.err("Server error", .{});
|
||||
return err;
|
||||
};
|
||||
},
|
||||
.fetch => |opts| {
|
||||
log.debug(.app, "startup", .{ .mode = "fetch", .dump = opts.dump, .url = opts.url });
|
||||
log.debug("Fetch mode: url {s}, dump {any}", .{ opts.url, opts.dump });
|
||||
const url = try @import("url.zig").URL.parse(opts.url, null);
|
||||
|
||||
// browser
|
||||
@@ -117,11 +104,11 @@ fn run(alloc: Allocator) !void {
|
||||
|
||||
_ = page.navigate(url, .{}) catch |err| switch (err) {
|
||||
error.UnsupportedUriScheme, error.UriMissingHost => {
|
||||
log.fatal(.app, "invalid fetch URL", .{ .err = err, .url = url });
|
||||
log.err("'{s}' is not a valid URL ({any})\n", .{ url, err });
|
||||
return args.printUsageAndExit(false);
|
||||
},
|
||||
else => {
|
||||
log.fatal(.app, "fetch error", .{ .err = err, .url = url });
|
||||
log.err("'{s}' fetching error ({any})\n", .{ url, err });
|
||||
return err;
|
||||
},
|
||||
};
|
||||
@@ -143,36 +130,15 @@ const Command = struct {
|
||||
|
||||
fn tlsVerifyHost(self: *const Command) bool {
|
||||
return switch (self.mode) {
|
||||
inline .serve, .fetch => |opts| opts.common.tls_verify_host,
|
||||
else => unreachable,
|
||||
inline .serve, .fetch => |opts| opts.tls_verify_host,
|
||||
else => true,
|
||||
};
|
||||
}
|
||||
|
||||
fn httpProxy(self: *const Command) ?std.Uri {
|
||||
return switch (self.mode) {
|
||||
inline .serve, .fetch => |opts| opts.common.http_proxy,
|
||||
else => unreachable,
|
||||
};
|
||||
}
|
||||
|
||||
fn logLevel(self: *const Command) ?log.Level {
|
||||
return switch (self.mode) {
|
||||
inline .serve, .fetch => |opts| opts.common.log_level,
|
||||
else => unreachable,
|
||||
};
|
||||
}
|
||||
|
||||
fn logFormat(self: *const Command) ?log.Format {
|
||||
return switch (self.mode) {
|
||||
inline .serve, .fetch => |opts| opts.common.log_format,
|
||||
else => unreachable,
|
||||
};
|
||||
}
|
||||
|
||||
fn logFilterScopes(self: *const Command) ?[]const log.Scope {
|
||||
return switch (self.mode) {
|
||||
inline .serve, .fetch => |opts| opts.common.log_filter_scopes,
|
||||
else => unreachable,
|
||||
inline .serve, .fetch => |opts| opts.http_proxy,
|
||||
else => null,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -187,47 +153,18 @@ const Command = struct {
|
||||
host: []const u8,
|
||||
port: u16,
|
||||
timeout: u16,
|
||||
common: Common,
|
||||
tls_verify_host: bool,
|
||||
http_proxy: ?std.Uri,
|
||||
};
|
||||
|
||||
const Fetch = struct {
|
||||
url: []const u8,
|
||||
dump: bool = false,
|
||||
common: Common,
|
||||
};
|
||||
|
||||
const Common = struct {
|
||||
http_proxy: ?std.Uri = null,
|
||||
tls_verify_host: bool = true,
|
||||
log_level: ?log.Level = null,
|
||||
log_format: ?log.Format = null,
|
||||
log_filter_scopes: ?[]log.Scope = null,
|
||||
tls_verify_host: bool,
|
||||
http_proxy: ?std.Uri,
|
||||
};
|
||||
|
||||
fn printUsageAndExit(self: *const Command, success: bool) void {
|
||||
const common_options =
|
||||
\\
|
||||
\\--insecure_disable_tls_host_verification
|
||||
\\ Disables host verification on all HTTP requests.
|
||||
\\ This is an advanced option which should only be
|
||||
\\ set if you understand and accept the risk of
|
||||
\\ disabling host verification.
|
||||
\\
|
||||
\\--http_proxy The HTTP proxy to use for all HTTP requests.
|
||||
\\ Defaults to none.
|
||||
\\
|
||||
\\--log_level The log level: debug, info, warn, error or fatal.
|
||||
\\ Defaults to
|
||||
++ (if (builtin.mode == .Debug) " info." else "warn.") ++
|
||||
\\
|
||||
\\
|
||||
\\--log_format The log format: pretty or logfmt.
|
||||
\\ Defaults to
|
||||
++ (if (builtin.mode == .Debug) " pretty." else " logfmt.") ++
|
||||
\\
|
||||
\\
|
||||
;
|
||||
|
||||
const usage =
|
||||
\\usage: {s} command [options] [URL]
|
||||
\\
|
||||
@@ -241,7 +178,14 @@ const Command = struct {
|
||||
\\--dump Dumps document to stdout.
|
||||
\\ Defaults to false.
|
||||
\\
|
||||
++ common_options ++
|
||||
\\--insecure_disable_tls_host_verification
|
||||
\\ Disables host verification on all HTTP requests.
|
||||
\\ This is an advanced option which should only be
|
||||
\\ set if you understand and accept the risk of
|
||||
\\ disabling host verification.
|
||||
\\
|
||||
\\--http_proxy The HTTP proxy to use for all HTTP requests.
|
||||
\\ Defaults to none.
|
||||
\\
|
||||
\\serve command
|
||||
\\Starts a websocket CDP server
|
||||
@@ -255,9 +199,16 @@ const Command = struct {
|
||||
\\ Defaults to 9222
|
||||
\\
|
||||
\\--timeout Inactivity timeout in seconds before disconnecting clients
|
||||
\\ Defaults to 10 (seconds)
|
||||
\\ Defaults to 3 (seconds)
|
||||
\\
|
||||
++ common_options ++
|
||||
\\--insecure_disable_tls_host_verification
|
||||
\\ Disables host verification on all HTTP requests.
|
||||
\\ This is an advanced option which should only be
|
||||
\\ set if you understand and accept the risk of
|
||||
\\ disabling host verification.
|
||||
\\
|
||||
\\--http_proxy The HTTP proxy to use for all HTTP requests.
|
||||
\\ Defaults to none.
|
||||
\\
|
||||
\\version command
|
||||
\\Displays the version of {s}
|
||||
@@ -342,13 +293,14 @@ fn parseServeArgs(
|
||||
) !Command.Serve {
|
||||
var host: []const u8 = "127.0.0.1";
|
||||
var port: u16 = 9222;
|
||||
var timeout: u16 = 10;
|
||||
var common: Command.Common = .{};
|
||||
var timeout: u16 = 3;
|
||||
var tls_verify_host = true;
|
||||
var http_proxy: ?std.Uri = null;
|
||||
|
||||
while (args.next()) |opt| {
|
||||
if (std.mem.eql(u8, "--host", opt)) {
|
||||
const str = args.next() orelse {
|
||||
log.fatal(.app, "missing argument value", .{ .arg = "--host" });
|
||||
log.err("--host argument requires an value", .{});
|
||||
return error.InvalidArgument;
|
||||
};
|
||||
host = try allocator.dupe(u8, str);
|
||||
@@ -357,12 +309,12 @@ fn parseServeArgs(
|
||||
|
||||
if (std.mem.eql(u8, "--port", opt)) {
|
||||
const str = args.next() orelse {
|
||||
log.fatal(.app, "missing argument value", .{ .arg = "--port" });
|
||||
log.err("--port argument requires an value", .{});
|
||||
return error.InvalidArgument;
|
||||
};
|
||||
|
||||
port = std.fmt.parseInt(u16, str, 10) catch |err| {
|
||||
log.fatal(.app, "invalid argument value", .{ .arg = "--port", .err = err });
|
||||
log.err("--port value is invalid: {}", .{err});
|
||||
return error.InvalidArgument;
|
||||
};
|
||||
continue;
|
||||
@@ -370,30 +322,41 @@ fn parseServeArgs(
|
||||
|
||||
if (std.mem.eql(u8, "--timeout", opt)) {
|
||||
const str = args.next() orelse {
|
||||
log.fatal(.app, "missing argument value", .{ .arg = "--timeout" });
|
||||
log.err("--timeout argument requires an value", .{});
|
||||
return error.InvalidArgument;
|
||||
};
|
||||
|
||||
timeout = std.fmt.parseInt(u16, str, 10) catch |err| {
|
||||
log.fatal(.app, "invalid argument value", .{ .arg = "--timeout", .err = err });
|
||||
log.err("--timeout value is invalid: {}", .{err});
|
||||
return error.InvalidArgument;
|
||||
};
|
||||
continue;
|
||||
}
|
||||
|
||||
if (try parseCommonArg(allocator, opt, args, &common)) {
|
||||
if (std.mem.eql(u8, "--insecure_tls_verify_host", opt)) {
|
||||
tls_verify_host = false;
|
||||
continue;
|
||||
}
|
||||
|
||||
log.fatal(.app, "unknown argument", .{ .mode = "serve", .arg = opt });
|
||||
if (std.mem.eql(u8, "--http_proxy", opt)) {
|
||||
const str = args.next() orelse {
|
||||
log.err("--http_proxy argument requires an value", .{});
|
||||
return error.InvalidArgument;
|
||||
};
|
||||
http_proxy = try std.Uri.parse(try allocator.dupe(u8, str));
|
||||
continue;
|
||||
}
|
||||
|
||||
log.err("Unknown option to serve command: '{s}'", .{opt});
|
||||
return error.UnkownOption;
|
||||
}
|
||||
|
||||
return .{
|
||||
.host = host,
|
||||
.port = port,
|
||||
.common = common,
|
||||
.timeout = timeout,
|
||||
.http_proxy = http_proxy,
|
||||
.tls_verify_host = tls_verify_host,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -403,7 +366,8 @@ fn parseFetchArgs(
|
||||
) !Command.Fetch {
|
||||
var dump: bool = false;
|
||||
var url: ?[]const u8 = null;
|
||||
var common: Command.Common = .{};
|
||||
var tls_verify_host = true;
|
||||
var http_proxy: ?std.Uri = null;
|
||||
|
||||
while (args.next()) |opt| {
|
||||
if (std.mem.eql(u8, "--dump", opt)) {
|
||||
@@ -411,109 +375,58 @@ fn parseFetchArgs(
|
||||
continue;
|
||||
}
|
||||
|
||||
if (try parseCommonArg(allocator, opt, args, &common)) {
|
||||
if (std.mem.eql(u8, "--insecure_disable_tls_host_verification", opt)) {
|
||||
tls_verify_host = false;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (std.mem.eql(u8, "--http_proxy", opt)) {
|
||||
const str = args.next() orelse {
|
||||
log.err("--http_proxy argument requires an value", .{});
|
||||
return error.InvalidArgument;
|
||||
};
|
||||
http_proxy = try std.Uri.parse(try allocator.dupe(u8, str));
|
||||
continue;
|
||||
}
|
||||
|
||||
if (std.mem.startsWith(u8, opt, "--")) {
|
||||
log.fatal(.app, "unknown argument", .{ .mode = "fetch", .arg = opt });
|
||||
log.err("Unknown option to serve command: '{s}'", .{opt});
|
||||
return error.UnkownOption;
|
||||
}
|
||||
|
||||
if (url != null) {
|
||||
log.fatal(.app, "duplicate fetch url", .{ .help = "only 1 URL can be specified" });
|
||||
log.err("Can only fetch 1 URL", .{});
|
||||
return error.TooManyURLs;
|
||||
}
|
||||
url = try allocator.dupe(u8, opt);
|
||||
}
|
||||
|
||||
if (url == null) {
|
||||
log.fatal(.app, "missing fetch url", .{ .help = "URL to fetch must be provided" });
|
||||
log.err("A URL must be provided to the fetch command", .{});
|
||||
return error.MissingURL;
|
||||
}
|
||||
|
||||
return .{
|
||||
.url = url.?,
|
||||
.dump = dump,
|
||||
.common = common,
|
||||
.http_proxy = http_proxy,
|
||||
.tls_verify_host = tls_verify_host,
|
||||
};
|
||||
}
|
||||
|
||||
fn parseCommonArg(
|
||||
allocator: Allocator,
|
||||
opt: []const u8,
|
||||
args: *std.process.ArgIterator,
|
||||
common: *Command.Common,
|
||||
) !bool {
|
||||
if (std.mem.eql(u8, "--insecure_disable_tls_host_verification", opt)) {
|
||||
common.tls_verify_host = false;
|
||||
return true;
|
||||
var verbose: bool = builtin.mode == .Debug; // In debug mode, force verbose.
|
||||
fn logFn(
|
||||
comptime level: std.log.Level,
|
||||
comptime scope: @Type(.enum_literal),
|
||||
comptime format: []const u8,
|
||||
args: anytype,
|
||||
) void {
|
||||
if (!verbose) {
|
||||
// hide all messages with level greater of equal to debug level.
|
||||
if (@intFromEnum(level) >= @intFromEnum(std.log.Level.debug)) return;
|
||||
}
|
||||
|
||||
if (std.mem.eql(u8, "--http_proxy", opt)) {
|
||||
const str = args.next() orelse {
|
||||
log.fatal(.app, "missing argument value", .{ .arg = "--http_proxy" });
|
||||
return error.InvalidArgument;
|
||||
};
|
||||
common.http_proxy = try std.Uri.parse(try allocator.dupe(u8, str));
|
||||
return true;
|
||||
}
|
||||
|
||||
if (std.mem.eql(u8, "--log_level", opt)) {
|
||||
const str = args.next() orelse {
|
||||
log.fatal(.app, "missing argument value", .{ .arg = "--log_level" });
|
||||
return error.InvalidArgument;
|
||||
};
|
||||
|
||||
common.log_level = std.meta.stringToEnum(log.Level, str) orelse blk: {
|
||||
if (std.mem.eql(u8, str, "error")) {
|
||||
break :blk .err;
|
||||
}
|
||||
log.fatal(.app, "invalid option choice", .{ .arg = "--log_level", .value = str });
|
||||
return error.InvalidArgument;
|
||||
};
|
||||
return true;
|
||||
}
|
||||
|
||||
if (std.mem.eql(u8, "--log_format", opt)) {
|
||||
const str = args.next() orelse {
|
||||
log.fatal(.app, "missing argument value", .{ .arg = "--log_format" });
|
||||
return error.InvalidArgument;
|
||||
};
|
||||
|
||||
common.log_format = std.meta.stringToEnum(log.Format, str) orelse {
|
||||
log.fatal(.app, "invalid option choice", .{ .arg = "--log_format", .value = str });
|
||||
return error.InvalidArgument;
|
||||
};
|
||||
return true;
|
||||
}
|
||||
|
||||
if (std.mem.eql(u8, "--log_filter_scopes", opt)) {
|
||||
if (builtin.mode != .Debug) {
|
||||
log.fatal(.app, "experimental", .{ .help = "log scope filtering is only available in debug builds" });
|
||||
return false;
|
||||
}
|
||||
|
||||
const str = args.next() orelse {
|
||||
// disables the default filters
|
||||
common.log_filter_scopes = &.{};
|
||||
return true;
|
||||
};
|
||||
|
||||
var arr: std.ArrayListUnmanaged(log.Scope) = .empty;
|
||||
|
||||
var it = std.mem.splitScalar(u8, str, ',');
|
||||
while (it.next()) |part| {
|
||||
try arr.append(allocator, std.meta.stringToEnum(log.Scope, part) orelse {
|
||||
log.fatal(.app, "invalid option choice", .{ .arg = "--log_scope_filter", .value = part });
|
||||
return false;
|
||||
});
|
||||
}
|
||||
common.log_filter_scopes = arr.items;
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
// default std log function.
|
||||
std.log.defaultLog(level, scope, format, args);
|
||||
}
|
||||
|
||||
test {
|
||||
@@ -522,10 +435,7 @@ test {
|
||||
|
||||
var test_wg: std.Thread.WaitGroup = .{};
|
||||
test "tests:beforeAll" {
|
||||
try parser.init(std.testing.allocator);
|
||||
log.opts.level = .err;
|
||||
log.opts.format = .logfmt;
|
||||
|
||||
try parser.init();
|
||||
test_wg.startMany(3);
|
||||
_ = try Platform.init();
|
||||
|
||||
|
||||
@@ -113,10 +113,10 @@ fn run(arena: Allocator, test_file: []const u8, loader: *FileLoader, err_out: *?
|
||||
});
|
||||
defer runner.deinit();
|
||||
|
||||
try polyfill.load(arena, runner.page.main_context);
|
||||
try polyfill.load(arena, runner.scope);
|
||||
|
||||
// loop over the scripts.
|
||||
const doc = parser.documentHTMLToDocument(runner.page.window.document);
|
||||
const doc = parser.documentHTMLToDocument(runner.state.window.document);
|
||||
const scripts = try parser.documentGetElementsByTagName(doc, "script");
|
||||
const script_count = try parser.nodeListLength(scripts);
|
||||
for (0..script_count) |i| {
|
||||
@@ -147,7 +147,7 @@ fn run(arena: Allocator, test_file: []const u8, loader: *FileLoader, err_out: *?
|
||||
|
||||
try parser.eventInit(loadevt, "load", .{});
|
||||
_ = try parser.eventTargetDispatchEvent(
|
||||
parser.toEventTarget(@TypeOf(runner.page.window), &runner.page.window),
|
||||
parser.toEventTarget(@TypeOf(runner.window), &runner.window),
|
||||
loadevt,
|
||||
);
|
||||
}
|
||||
@@ -155,9 +155,9 @@ fn run(arena: Allocator, test_file: []const u8, loader: *FileLoader, err_out: *?
|
||||
{
|
||||
// wait for all async executions
|
||||
var try_catch: Env.TryCatch = undefined;
|
||||
try_catch.init(runner.page.main_context);
|
||||
try_catch.init(runner.scope);
|
||||
defer try_catch.deinit();
|
||||
try runner.page.loop.run();
|
||||
try runner.loop.run();
|
||||
|
||||
if (try_catch.hasCaught()) {
|
||||
err_out.* = (try try_catch.err(arena)) orelse "unknwon error";
|
||||
|
||||
@@ -1,12 +1,13 @@
|
||||
const std = @import("std");
|
||||
|
||||
const log = @import("log.zig");
|
||||
const URL = @import("url.zig").URL;
|
||||
const page = @import("browser/page.zig");
|
||||
const http_client = @import("http/client.zig");
|
||||
|
||||
const Allocator = std.mem.Allocator;
|
||||
|
||||
const log = std.log.scoped(.notification);
|
||||
|
||||
const List = std.DoublyLinkedList(Listener);
|
||||
const Node = List.Node;
|
||||
|
||||
@@ -59,7 +60,6 @@ pub const Notification = struct {
|
||||
page_created: List = .{},
|
||||
page_navigate: List = .{},
|
||||
page_navigated: List = .{},
|
||||
http_request_fail: List = .{},
|
||||
http_request_start: List = .{},
|
||||
http_request_complete: List = .{},
|
||||
notification_created: List = .{},
|
||||
@@ -70,7 +70,6 @@ pub const Notification = struct {
|
||||
page_created: *page.Page,
|
||||
page_navigate: *const PageNavigate,
|
||||
page_navigated: *const PageNavigated,
|
||||
http_request_fail: *const RequestFail,
|
||||
http_request_start: *const RequestStart,
|
||||
http_request_complete: *const RequestComplete,
|
||||
notification_created: *Notification,
|
||||
@@ -91,20 +90,13 @@ pub const Notification = struct {
|
||||
};
|
||||
|
||||
pub const RequestStart = struct {
|
||||
arena: Allocator,
|
||||
id: usize,
|
||||
url: *const std.Uri,
|
||||
method: http_client.Request.Method,
|
||||
headers: *std.ArrayListUnmanaged(std.http.Header),
|
||||
headers: []std.http.Header,
|
||||
has_body: bool,
|
||||
};
|
||||
|
||||
pub const RequestFail = struct {
|
||||
id: usize,
|
||||
url: *const std.Uri,
|
||||
err: []const u8,
|
||||
};
|
||||
|
||||
pub const RequestComplete = struct {
|
||||
id: usize,
|
||||
url: *const std.Uri,
|
||||
@@ -215,12 +207,7 @@ pub const Notification = struct {
|
||||
const listener = n.data;
|
||||
const func: EventFunc(event) = @alignCast(@ptrCast(listener.func));
|
||||
func(listener.receiver, data) catch |err| {
|
||||
log.err(.app, "dispatch error", .{
|
||||
.err = err,
|
||||
.event = event,
|
||||
.source = "notification",
|
||||
.listener = listener.struct_name,
|
||||
});
|
||||
log.err("{s} '{s}' dispatch error: {}", .{ listener.struct_name, @tagName(event), err });
|
||||
};
|
||||
node = n.next;
|
||||
}
|
||||
|
||||
@@ -71,8 +71,6 @@ pub fn Union(comptime interfaces: anytype) type {
|
||||
var FT = @field(tuple, field.name);
|
||||
if (@hasDecl(FT, "Self")) {
|
||||
FT = *(@field(FT, "Self"));
|
||||
} else if (!@hasDecl(FT, "union_make_copy")) {
|
||||
FT = *FT;
|
||||
}
|
||||
union_fields[index] = .{
|
||||
.type = FT,
|
||||
@@ -173,7 +171,7 @@ fn filterMap(comptime count: usize, interfaces: [count]type) struct { usize, [co
|
||||
return .{ unfiltered_count, map };
|
||||
}
|
||||
|
||||
test "generate: Union" {
|
||||
test "generate.Union" {
|
||||
const Astruct = struct {
|
||||
pub const Self = Other;
|
||||
const Other = struct {};
|
||||
@@ -190,15 +188,15 @@ test "generate: Union" {
|
||||
const value = Union(.{ Astruct, Bstruct, .{Cstruct} });
|
||||
const ti = @typeInfo(value).@"union";
|
||||
try std.testing.expectEqual(3, ti.fields.len);
|
||||
try std.testing.expectEqualStrings("*runtime.generate.test.generate: Union.Astruct.Other", @typeName(ti.fields[0].type));
|
||||
try std.testing.expectEqualStrings("*runtime.generate.test.generate.Union.Astruct.Other", @typeName(ti.fields[0].type));
|
||||
try std.testing.expectEqualStrings(ti.fields[0].name, "Astruct");
|
||||
try std.testing.expectEqual(*Bstruct, ti.fields[1].type);
|
||||
try std.testing.expectEqual(Bstruct, ti.fields[1].type);
|
||||
try std.testing.expectEqualStrings(ti.fields[1].name, "Bstruct");
|
||||
try std.testing.expectEqual(*Cstruct, ti.fields[2].type);
|
||||
try std.testing.expectEqual(Cstruct, ti.fields[2].type);
|
||||
try std.testing.expectEqualStrings(ti.fields[2].name, "Cstruct");
|
||||
}
|
||||
|
||||
test "generate: Tuple" {
|
||||
test "generate.Tuple" {
|
||||
const Astruct = struct {};
|
||||
|
||||
const Bstruct = struct {
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -20,9 +20,10 @@ const std = @import("std");
|
||||
const builtin = @import("builtin");
|
||||
const MemoryPool = std.heap.MemoryPool;
|
||||
|
||||
const log = @import("../log.zig");
|
||||
pub const IO = @import("tigerbeetle-io").IO;
|
||||
|
||||
const log = std.log.scoped(.loop);
|
||||
|
||||
// SingleThreaded I/O Loop based on Tigerbeetle io_uring loop.
|
||||
// On Linux it's using io_uring.
|
||||
// On MacOS and Windows it's using kqueue/IOCP with a ring design.
|
||||
@@ -34,11 +35,9 @@ pub const Loop = struct {
|
||||
alloc: std.mem.Allocator, // TODO: unmanaged version ?
|
||||
io: IO,
|
||||
|
||||
// number of pending network events we have
|
||||
pending_network_count: usize,
|
||||
|
||||
// number of pending timeout events we have
|
||||
pending_timeout_count: usize,
|
||||
// Used to track how many callbacks are to be called and wait until all
|
||||
// event are finished.
|
||||
events_nb: usize,
|
||||
|
||||
// Used to stop repeating timeouts when loop.run is called.
|
||||
stopping: bool,
|
||||
@@ -68,9 +67,8 @@ pub const Loop = struct {
|
||||
.alloc = alloc,
|
||||
.cancelled = .{},
|
||||
.io = try IO.init(32, 0),
|
||||
.events_nb = 0,
|
||||
.stopping = false,
|
||||
.pending_network_count = 0,
|
||||
.pending_timeout_count = 0,
|
||||
.timeout_pool = MemoryPool(ContextTimeout).init(alloc),
|
||||
.event_callback_pool = MemoryPool(EventCallbackContext).init(alloc),
|
||||
};
|
||||
@@ -81,13 +79,12 @@ pub const Loop = struct {
|
||||
|
||||
// run tail events. We do run the tail events to ensure all the
|
||||
// contexts are correcly free.
|
||||
while (self.pending_network_count != 0 or self.pending_timeout_count != 0) {
|
||||
self.io.run_for_ns(std.time.ns_per_ms * 10) catch |err| {
|
||||
log.err(.loop, "deinit", .{ .err = err });
|
||||
while (self.eventsNb() > 0) {
|
||||
self.io.run_for_ns(10 * std.time.ns_per_ms) catch |err| {
|
||||
log.err("deinit run tail events: {any}", .{err});
|
||||
break;
|
||||
};
|
||||
}
|
||||
|
||||
if (comptime CANCEL_SUPPORTED) {
|
||||
self.io.cancel_all();
|
||||
}
|
||||
@@ -107,14 +104,26 @@ pub const Loop = struct {
|
||||
self.stopping = true;
|
||||
defer self.stopping = false;
|
||||
|
||||
while (self.pending_network_count != 0 or self.pending_timeout_count != 0) {
|
||||
self.io.run_for_ns(std.time.ns_per_ms * 10) catch |err| {
|
||||
log.err(.loop, "deinit", .{ .err = err });
|
||||
break;
|
||||
};
|
||||
while (self.eventsNb() > 0) {
|
||||
try self.io.run_for_ns(10 * std.time.ns_per_ms);
|
||||
// at each iteration we might have new events registred by previous callbacks
|
||||
}
|
||||
}
|
||||
|
||||
// Register events atomically
|
||||
// - add 1 event and return previous value
|
||||
fn addEvent(self: *Self) void {
|
||||
_ = @atomicRmw(usize, &self.events_nb, .Add, 1, .acq_rel);
|
||||
}
|
||||
// - remove 1 event and return previous value
|
||||
fn removeEvent(self: *Self) void {
|
||||
_ = @atomicRmw(usize, &self.events_nb, .Sub, 1, .acq_rel);
|
||||
}
|
||||
// - get the number of current events
|
||||
fn eventsNb(self: *Self) usize {
|
||||
return @atomicLoad(usize, &self.events_nb, .seq_cst);
|
||||
}
|
||||
|
||||
// JS callbacks APIs
|
||||
// -----------------
|
||||
|
||||
@@ -144,7 +153,7 @@ pub const Loop = struct {
|
||||
const loop = ctx.loop;
|
||||
|
||||
if (ctx.initial) {
|
||||
loop.pending_timeout_count -= 1;
|
||||
loop.removeEvent();
|
||||
}
|
||||
|
||||
defer {
|
||||
@@ -167,7 +176,7 @@ pub const Loop = struct {
|
||||
result catch |err| {
|
||||
switch (err) {
|
||||
error.Canceled => {},
|
||||
else => log.err(.loop, "timeout callback error", .{ .err = err }),
|
||||
else => log.err("timeout callback: {any}", .{err}),
|
||||
}
|
||||
return;
|
||||
};
|
||||
@@ -199,7 +208,7 @@ pub const Loop = struct {
|
||||
.callback_node = callback_node,
|
||||
};
|
||||
|
||||
self.pending_timeout_count += 1;
|
||||
self.addEvent();
|
||||
self.scheduleTimeout(nanoseconds, ctx, completion);
|
||||
return @intFromPtr(completion);
|
||||
}
|
||||
@@ -236,8 +245,8 @@ pub const Loop = struct {
|
||||
) !void {
|
||||
const onConnect = struct {
|
||||
fn onConnect(callback: *EventCallbackContext, completion_: *Completion, res: ConnectError!void) void {
|
||||
callback.loop.pending_network_count -= 1;
|
||||
defer callback.loop.event_callback_pool.destroy(callback);
|
||||
callback.loop.removeEvent();
|
||||
cbk(@alignCast(@ptrCast(callback.ctx)), completion_, res);
|
||||
}
|
||||
}.onConnect;
|
||||
@@ -246,7 +255,7 @@ pub const Loop = struct {
|
||||
errdefer self.event_callback_pool.destroy(callback);
|
||||
callback.* = .{ .loop = self, .ctx = ctx };
|
||||
|
||||
self.pending_network_count += 1;
|
||||
self.addEvent();
|
||||
self.io.connect(*EventCallbackContext, callback, onConnect, completion, socket, address);
|
||||
}
|
||||
|
||||
@@ -263,8 +272,8 @@ pub const Loop = struct {
|
||||
) !void {
|
||||
const onSend = struct {
|
||||
fn onSend(callback: *EventCallbackContext, completion_: *Completion, res: SendError!usize) void {
|
||||
callback.loop.pending_network_count -= 1;
|
||||
defer callback.loop.event_callback_pool.destroy(callback);
|
||||
callback.loop.removeEvent();
|
||||
cbk(@alignCast(@ptrCast(callback.ctx)), completion_, res);
|
||||
}
|
||||
}.onSend;
|
||||
@@ -273,7 +282,7 @@ pub const Loop = struct {
|
||||
errdefer self.event_callback_pool.destroy(callback);
|
||||
callback.* = .{ .loop = self, .ctx = ctx };
|
||||
|
||||
self.pending_network_count += 1;
|
||||
self.addEvent();
|
||||
self.io.send(*EventCallbackContext, callback, onSend, completion, socket, buf);
|
||||
}
|
||||
|
||||
@@ -290,8 +299,8 @@ pub const Loop = struct {
|
||||
) !void {
|
||||
const onRecv = struct {
|
||||
fn onRecv(callback: *EventCallbackContext, completion_: *Completion, res: RecvError!usize) void {
|
||||
callback.loop.pending_network_count -= 1;
|
||||
defer callback.loop.event_callback_pool.destroy(callback);
|
||||
callback.loop.removeEvent();
|
||||
cbk(@alignCast(@ptrCast(callback.ctx)), completion_, res);
|
||||
}
|
||||
}.onRecv;
|
||||
@@ -299,7 +308,8 @@ pub const Loop = struct {
|
||||
const callback = try self.event_callback_pool.create();
|
||||
errdefer self.event_callback_pool.destroy(callback);
|
||||
callback.* = .{ .loop = self, .ctx = ctx };
|
||||
self.pending_network_count += 1;
|
||||
|
||||
self.addEvent();
|
||||
self.io.recv(*EventCallbackContext, callback, onRecv, completion, socket, buf);
|
||||
}
|
||||
};
|
||||
|
||||
@@ -23,13 +23,13 @@ const generate = @import("generate.zig");
|
||||
pub const allocator = std.testing.allocator;
|
||||
|
||||
// Very similar to the JSRunner in src/testing.zig, but it isn't tied to the
|
||||
// browser.Env or the *Page state
|
||||
// browser.Env or the browser.SessionState
|
||||
pub fn Runner(comptime State: type, comptime Global: type, comptime types: anytype) type {
|
||||
const AdjustedTypes = if (Global == void) generate.Tuple(.{ types, DefaultGlobal }) else types;
|
||||
|
||||
return struct {
|
||||
env: *Env,
|
||||
js_context: *Env.JsContext,
|
||||
scope: *Env.Scope,
|
||||
executor: Env.ExecutionWorld,
|
||||
|
||||
pub const Env = js.Env(State, struct {
|
||||
@@ -48,7 +48,7 @@ pub fn Runner(comptime State: type, comptime Global: type, comptime types: anyty
|
||||
self.executor = try self.env.newExecutionWorld();
|
||||
errdefer self.executor.deinit();
|
||||
|
||||
self.js_context = try self.executor.createJsContext(
|
||||
self.scope = try self.executor.startScope(
|
||||
if (Global == void) &default_global else global,
|
||||
state,
|
||||
{},
|
||||
@@ -68,10 +68,10 @@ pub fn Runner(comptime State: type, comptime Global: type, comptime types: anyty
|
||||
pub fn testCases(self: *Self, cases: []const Case, _: RunOpts) !void {
|
||||
for (cases, 0..) |case, i| {
|
||||
var try_catch: Env.TryCatch = undefined;
|
||||
try_catch.init(self.js_context);
|
||||
try_catch.init(self.scope);
|
||||
defer try_catch.deinit();
|
||||
|
||||
const value = self.js_context.exec(case.@"0", null) catch |err| {
|
||||
const value = self.scope.exec(case.@"0", null) catch |err| {
|
||||
if (try try_catch.err(allocator)) |msg| {
|
||||
defer allocator.free(msg);
|
||||
if (isExpectedTypeError(case.@"1", msg)) {
|
||||
|
||||
@@ -25,7 +25,6 @@ const posix = std.posix;
|
||||
const Allocator = std.mem.Allocator;
|
||||
const ArenaAllocator = std.heap.ArenaAllocator;
|
||||
|
||||
const log = @import("log.zig");
|
||||
const IO = @import("runtime/loop.zig").IO;
|
||||
const Completion = IO.Completion;
|
||||
const AcceptError = IO.AcceptError;
|
||||
@@ -39,6 +38,8 @@ const CDP = @import("cdp/cdp.zig").CDP;
|
||||
|
||||
const TimeoutCheck = std.time.ns_per_ms * 100;
|
||||
|
||||
const log = std.log.scoped(.server);
|
||||
|
||||
const MAX_HTTP_REQUEST_SIZE = 2048;
|
||||
|
||||
// max message size
|
||||
@@ -66,7 +67,7 @@ const Server = struct {
|
||||
}
|
||||
|
||||
fn queueAccept(self: *Server) void {
|
||||
log.debug(.app, "accepting connection", .{});
|
||||
log.info("accepting new conn...", .{});
|
||||
self.loop.io.accept(
|
||||
*Server,
|
||||
self,
|
||||
@@ -83,7 +84,7 @@ const Server = struct {
|
||||
) void {
|
||||
std.debug.assert(completion == &self.accept_completion);
|
||||
self.doCallbackAccept(result) catch |err| {
|
||||
log.err(.app, "server accept error", .{ .err = err });
|
||||
log.err("accept error: {any}", .{err});
|
||||
self.queueAccept();
|
||||
};
|
||||
}
|
||||
@@ -96,13 +97,7 @@ const Server = struct {
|
||||
const client = try self.allocator.create(Client);
|
||||
client.* = Client.init(socket, self);
|
||||
client.start();
|
||||
|
||||
if (log.enabled(.app, .info)) {
|
||||
var address: std.net.Address = undefined;
|
||||
var socklen: posix.socklen_t = @sizeOf(net.Address);
|
||||
try std.posix.getsockname(socket, &address.any, &socklen);
|
||||
log.info(.app, "client connected", .{ .ip = address });
|
||||
}
|
||||
log.info("client connected", .{});
|
||||
}
|
||||
|
||||
fn releaseClient(self: *Server, client: *Client) void {
|
||||
@@ -223,7 +218,6 @@ pub const Client = struct {
|
||||
}
|
||||
|
||||
fn close(self: *Self) void {
|
||||
log.info(.app, "client disconected", .{});
|
||||
self.connected = false;
|
||||
// recv only, because we might have pending writes we'd like to get
|
||||
// out (like the HTTP error response)
|
||||
@@ -256,7 +250,7 @@ pub const Client = struct {
|
||||
}
|
||||
|
||||
const size = result catch |err| {
|
||||
log.err(.app, "server read error", .{ .err = err });
|
||||
log.err("read error: {any}", .{err});
|
||||
self.close();
|
||||
return;
|
||||
};
|
||||
@@ -319,7 +313,7 @@ pub const Client = struct {
|
||||
error.InvalidVersionHeader => self.writeHTTPErrorResponse(400, "Invalid websocket version"),
|
||||
error.InvalidConnectionHeader => self.writeHTTPErrorResponse(400, "Invalid connection header"),
|
||||
else => {
|
||||
log.err(.app, "server 500", .{ .err = err, .req = request[0..@min(100, request.len)] });
|
||||
log.err("error processing HTTP request: {any}", .{err});
|
||||
self.writeHTTPErrorResponse(500, "Internal Server Error");
|
||||
},
|
||||
}
|
||||
@@ -600,7 +594,6 @@ pub const Client = struct {
|
||||
|
||||
if (result) |_| {
|
||||
if (now().since(self.last_active) >= self.server.timeout) {
|
||||
log.info(.app, "client connection timeout", .{});
|
||||
if (self.mode == .websocket) {
|
||||
self.send(null, &CLOSE_TIMEOUT) catch {};
|
||||
}
|
||||
@@ -608,7 +601,7 @@ pub const Client = struct {
|
||||
return;
|
||||
}
|
||||
} else |err| {
|
||||
log.err(.app, "server timeout error", .{ .err = err });
|
||||
log.err("timeout error: {any}", .{err});
|
||||
}
|
||||
|
||||
self.queueTimeout();
|
||||
@@ -657,7 +650,7 @@ pub const Client = struct {
|
||||
}
|
||||
|
||||
const sent = result catch |err| {
|
||||
log.warn(.app, "server send error", .{ .err = err });
|
||||
log.info("send error: {any}", .{err});
|
||||
self.close();
|
||||
return;
|
||||
};
|
||||
@@ -1043,7 +1036,6 @@ pub fn run(
|
||||
|
||||
// accept an connection
|
||||
server.queueAccept();
|
||||
log.info(.app, "server running", .{ .address = address });
|
||||
|
||||
// infinite loop on I/O events, either:
|
||||
// - cmd from incoming connection on server socket
|
||||
|
||||
@@ -5,11 +5,11 @@ const build_config = @import("build_config");
|
||||
const Thread = std.Thread;
|
||||
const Allocator = std.mem.Allocator;
|
||||
|
||||
const log = @import("../log.zig");
|
||||
const App = @import("../app.zig").App;
|
||||
const telemetry = @import("telemetry.zig");
|
||||
const HttpClient = @import("../http/client.zig").Client;
|
||||
|
||||
const log = std.log.scoped(.telemetry);
|
||||
const URL = "https://telemetry.lightpanda.io";
|
||||
const MAX_BATCH_SIZE = 20;
|
||||
|
||||
@@ -83,7 +83,7 @@ pub const LightPanda = struct {
|
||||
const b = self.collectBatch(&batch);
|
||||
self.mutex.unlock();
|
||||
self.postEvent(b, &arr) catch |err| {
|
||||
log.warn(.telemetry, "post error", .{ .err = err });
|
||||
log.warn("Telementry reporting error: {}", .{err});
|
||||
};
|
||||
self.mutex.lock();
|
||||
}
|
||||
@@ -110,7 +110,7 @@ pub const LightPanda = struct {
|
||||
var res = try req.sendSync(.{});
|
||||
while (try res.next()) |_| {}
|
||||
if (res.header.status != 200) {
|
||||
log.warn(.telemetry, "server error", .{ .status = res.header.status });
|
||||
log.warn("server error status: {d}", .{res.header.status});
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -3,11 +3,11 @@ const builtin = @import("builtin");
|
||||
|
||||
const Allocator = std.mem.Allocator;
|
||||
|
||||
const log = @import("../log.zig");
|
||||
const App = @import("../app.zig").App;
|
||||
const Notification = @import("../notification.zig").Notification;
|
||||
|
||||
const uuidv4 = @import("../id.zig").uuidv4;
|
||||
const log = std.log.scoped(.telemetry);
|
||||
const IID_FILE = "iid";
|
||||
|
||||
pub const Telemetry = TelemetryT(blk: {
|
||||
@@ -32,7 +32,7 @@ fn TelemetryT(comptime P: type) type {
|
||||
pub fn init(app: *App, run_mode: App.RunMode) Self {
|
||||
const disabled = std.process.hasEnvVarConstant("LIGHTPANDA_DISABLE_TELEMETRY");
|
||||
if (builtin.mode != .Debug and builtin.is_test == false) {
|
||||
log.info(.telemetry, "telemetry status", .{ .disabled = disabled });
|
||||
log.info("telemetry {s}", .{if (disabled) "disabled" else "enabled"});
|
||||
}
|
||||
|
||||
return .{
|
||||
@@ -53,7 +53,7 @@ fn TelemetryT(comptime P: type) type {
|
||||
}
|
||||
const iid: ?[]const u8 = if (self.iid) |*iid| iid else null;
|
||||
self.provider.send(iid, self.run_mode, event) catch |err| {
|
||||
log.warn(.telemetry, "record error", .{ .err = err, .type = @tagName(std.meta.activeTag(event)) });
|
||||
log.warn("failed to record event: {}", .{err});
|
||||
};
|
||||
}
|
||||
|
||||
@@ -94,7 +94,7 @@ fn getOrCreateId(app_dir_path_: ?[]const u8) ?[36]u8 {
|
||||
|
||||
var buf: [37]u8 = undefined;
|
||||
var dir = std.fs.openDirAbsolute(app_dir_path, .{}) catch |err| {
|
||||
log.warn(.telemetry, "data directory open error", .{ .path = app_dir_path, .err = err });
|
||||
log.warn("failed to open data directory '{s}': {}", .{ app_dir_path, err });
|
||||
return null;
|
||||
};
|
||||
defer dir.close();
|
||||
@@ -102,7 +102,7 @@ fn getOrCreateId(app_dir_path_: ?[]const u8) ?[36]u8 {
|
||||
const data = dir.readFile(IID_FILE, &buf) catch |err| switch (err) {
|
||||
error.FileNotFound => &.{},
|
||||
else => {
|
||||
log.warn(.telemetry, "ID read error", .{ .path = app_dir_path, .err = err });
|
||||
log.warn("failed to open id file: {}", .{err});
|
||||
return null;
|
||||
},
|
||||
};
|
||||
@@ -115,7 +115,7 @@ fn getOrCreateId(app_dir_path_: ?[]const u8) ?[36]u8 {
|
||||
|
||||
uuidv4(&id);
|
||||
dir.writeFile(.{ .sub_path = IID_FILE, .data = &id }) catch |err| {
|
||||
log.warn(.telemetry, "ID write error", .{ .path = app_dir_path, .err = err });
|
||||
log.warn("failed to write to id file: {}", .{err});
|
||||
return null;
|
||||
};
|
||||
return id;
|
||||
@@ -183,7 +183,7 @@ test "telemetry: getOrCreateId" {
|
||||
}
|
||||
|
||||
test "telemetry: sends event to provider" {
|
||||
var app = testing.createApp(.{});
|
||||
var app = testing.app(.{});
|
||||
defer app.deinit();
|
||||
|
||||
var telemetry = TelemetryT(MockProvider).init(app, .serve);
|
||||
|
||||
136
src/testing.zig
136
src/testing.zig
@@ -171,7 +171,7 @@ pub fn print(comptime fmt: []const u8, args: anytype) void {
|
||||
}
|
||||
|
||||
// dummy opts incase we want to add something, and not have to break all the callers
|
||||
pub fn createApp(_: anytype) *App {
|
||||
pub fn app(_: anytype) *App {
|
||||
return App.init(allocator, .{ .run_mode = .serve }) catch unreachable;
|
||||
}
|
||||
|
||||
@@ -211,16 +211,14 @@ pub const Document = struct {
|
||||
arena: std.heap.ArenaAllocator,
|
||||
|
||||
pub fn init(html: []const u8) !Document {
|
||||
var arena = std.heap.ArenaAllocator.init(allocator);
|
||||
parser.deinit();
|
||||
try parser.init(arena.allocator());
|
||||
try parser.init();
|
||||
|
||||
var fbs = std.io.fixedBufferStream(html);
|
||||
const Elements = @import("browser/html/elements.zig");
|
||||
const html_doc = try parser.documentHTMLParse(fbs.reader(), "utf-8", &Elements.createElement);
|
||||
const html_doc = try parser.documentHTMLParse(fbs.reader(), "utf-8");
|
||||
|
||||
return .{
|
||||
.arena = arena,
|
||||
.arena = std.heap.ArenaAllocator.init(allocator),
|
||||
.doc = html_doc,
|
||||
};
|
||||
}
|
||||
@@ -369,79 +367,114 @@ pub const tracking_allocator = @import("root").tracking_allocator.allocator();
|
||||
pub const JsRunner = struct {
|
||||
const URL = @import("url.zig").URL;
|
||||
const Env = @import("browser/env.zig").Env;
|
||||
const Page = @import("browser/page.zig").Page;
|
||||
const Browser = @import("browser/browser.zig").Browser;
|
||||
const Loop = @import("runtime/loop.zig").Loop;
|
||||
const HttpClient = @import("http/client.zig").Client;
|
||||
const storage = @import("browser/storage/storage.zig");
|
||||
const Window = @import("browser/html/window.zig").Window;
|
||||
const Renderer = @import("browser/renderer.zig").Renderer;
|
||||
const SessionState = @import("browser/env.zig").SessionState;
|
||||
|
||||
app: *App,
|
||||
page: *Page,
|
||||
browser: *Browser,
|
||||
url: URL,
|
||||
env: *Env,
|
||||
loop: Loop,
|
||||
window: Window,
|
||||
state: SessionState,
|
||||
arena: Allocator,
|
||||
renderer: Renderer,
|
||||
http_client: HttpClient,
|
||||
scope: *Env.Scope,
|
||||
executor: Env.ExecutionWorld,
|
||||
storage_shelf: storage.Shelf,
|
||||
cookie_jar: storage.CookieJar,
|
||||
|
||||
fn init(alloc: Allocator, opts: RunnerOpts) !JsRunner {
|
||||
fn init(parent_allocator: Allocator, opts: RunnerOpts) !*JsRunner {
|
||||
parser.deinit();
|
||||
try parser.init();
|
||||
|
||||
var app = try App.init(alloc, .{
|
||||
.run_mode = .serve,
|
||||
.tls_verify_host = false,
|
||||
});
|
||||
errdefer app.deinit();
|
||||
const aa = try parent_allocator.create(std.heap.ArenaAllocator);
|
||||
aa.* = std.heap.ArenaAllocator.init(parent_allocator);
|
||||
errdefer aa.deinit();
|
||||
|
||||
const browser = try alloc.create(Browser);
|
||||
errdefer alloc.destroy(browser);
|
||||
const arena = aa.allocator();
|
||||
const self = try arena.create(JsRunner);
|
||||
self.arena = arena;
|
||||
|
||||
browser.* = try Browser.init(app);
|
||||
errdefer browser.deinit();
|
||||
self.env = try Env.init(arena, .{});
|
||||
errdefer self.env.deinit();
|
||||
|
||||
var session = try browser.newSession();
|
||||
self.url = try URL.parse(opts.url, null);
|
||||
|
||||
var page = try session.createPage();
|
||||
|
||||
// a bit hacky, but since we aren't going through page.navigate, there's
|
||||
// some minimum setup we need to do
|
||||
page.url = try URL.parse(opts.url, null);
|
||||
try page.window.replaceLocation(.{
|
||||
.url = try page.url.toWebApi(page.arena),
|
||||
});
|
||||
self.renderer = Renderer.init(arena);
|
||||
self.cookie_jar = storage.CookieJar.init(arena);
|
||||
self.loop = try Loop.init(arena);
|
||||
errdefer self.loop.deinit();
|
||||
|
||||
var html = std.io.fixedBufferStream(opts.html);
|
||||
try page.loadHTMLDoc(html.reader(), "UTF-8");
|
||||
const document = try parser.documentHTMLParse(html.reader(), "UTF-8");
|
||||
|
||||
return .{
|
||||
.app = app,
|
||||
.page = page,
|
||||
.browser = browser,
|
||||
self.window = try Window.create(null, null);
|
||||
try self.window.replaceDocument(document);
|
||||
try self.window.replaceLocation(.{
|
||||
.url = try self.url.toWebApi(arena),
|
||||
});
|
||||
|
||||
self.http_client = try HttpClient.init(arena, 1, .{
|
||||
.tls_verify_host = false,
|
||||
});
|
||||
|
||||
self.state = .{
|
||||
.arena = arena,
|
||||
.loop = &self.loop,
|
||||
.url = &self.url,
|
||||
.window = &self.window,
|
||||
.renderer = &self.renderer,
|
||||
.cookie_jar = &self.cookie_jar,
|
||||
.request_factory = self.http_client.requestFactory(null),
|
||||
};
|
||||
|
||||
self.storage_shelf = storage.Shelf.init(arena);
|
||||
self.window.setStorageShelf(&self.storage_shelf);
|
||||
|
||||
self.executor = try self.env.newExecutionWorld();
|
||||
errdefer self.executor.deinit();
|
||||
|
||||
self.scope = try self.executor.startScope(&self.window, &self.state, {}, true);
|
||||
return self;
|
||||
}
|
||||
|
||||
pub fn deinit(self: *JsRunner) void {
|
||||
self.browser.deinit();
|
||||
self.app.allocator.destroy(self.browser);
|
||||
self.app.deinit();
|
||||
self.loop.deinit();
|
||||
self.executor.deinit();
|
||||
self.env.deinit();
|
||||
self.http_client.deinit();
|
||||
self.storage_shelf.deinit();
|
||||
|
||||
const arena: *std.heap.ArenaAllocator = @ptrCast(@alignCast(self.arena.ptr));
|
||||
arena.deinit();
|
||||
arena.child_allocator.destroy(arena);
|
||||
}
|
||||
|
||||
const RunOpts = struct {};
|
||||
pub const Case = std.meta.Tuple(&.{ []const u8, ?[]const u8 });
|
||||
pub fn testCases(self: *JsRunner, cases: []const Case, _: RunOpts) !void {
|
||||
const js_context = self.page.main_context;
|
||||
const arena = self.page.arena;
|
||||
|
||||
const start = try std.time.Instant.now();
|
||||
|
||||
for (cases, 0..) |case, i| {
|
||||
var try_catch: Env.TryCatch = undefined;
|
||||
try_catch.init(js_context);
|
||||
try_catch.init(self.scope);
|
||||
defer try_catch.deinit();
|
||||
|
||||
const value = js_context.exec(case.@"0", null) catch |err| {
|
||||
if (try try_catch.err(arena)) |msg| {
|
||||
const value = self.scope.exec(case.@"0", null) catch |err| {
|
||||
if (try try_catch.err(self.arena)) |msg| {
|
||||
std.debug.print("{s}\n\nCase: {d}\n{s}\n", .{ msg, i + 1, case.@"0" });
|
||||
}
|
||||
return err;
|
||||
};
|
||||
try self.page.loop.run();
|
||||
try self.loop.run();
|
||||
@import("root").js_runner_duration += std.time.Instant.since(try std.time.Instant.now(), start);
|
||||
|
||||
if (case.@"1") |expected| {
|
||||
const actual = try value.toString(arena);
|
||||
const actual = try value.toString(self.arena);
|
||||
if (std.mem.eql(u8, expected, actual) == false) {
|
||||
std.debug.print("Expected:\n{s}\n\nGot:\n{s}\n\nCase: {d}\n{s}\n", .{ expected, actual, i + 1, case.@"0" });
|
||||
return error.UnexpectedResult;
|
||||
@@ -455,15 +488,12 @@ pub const JsRunner = struct {
|
||||
}
|
||||
|
||||
pub fn eval(self: *JsRunner, src: []const u8, name: ?[]const u8, err_msg: *?[]const u8) !Env.Value {
|
||||
const js_context = self.page.main_context;
|
||||
const arena = self.page.arena;
|
||||
|
||||
var try_catch: Env.TryCatch = undefined;
|
||||
try_catch.init(js_context);
|
||||
try_catch.init(self.scope);
|
||||
defer try_catch.deinit();
|
||||
|
||||
return js_context.exec(src, name) catch |err| {
|
||||
if (try try_catch.err(arena)) |msg| {
|
||||
return self.scope.exec(src, name) catch |err| {
|
||||
if (try try_catch.err(self.arena)) |msg| {
|
||||
err_msg.* = msg;
|
||||
std.debug.print("Error running script: {s}\n", .{msg});
|
||||
}
|
||||
@@ -487,6 +517,6 @@ const RunnerOpts = struct {
|
||||
,
|
||||
};
|
||||
|
||||
pub fn jsRunner(alloc: Allocator, opts: RunnerOpts) !JsRunner {
|
||||
pub fn jsRunner(alloc: Allocator, opts: RunnerOpts) !*JsRunner {
|
||||
return JsRunner.init(alloc, opts);
|
||||
}
|
||||
|
||||
171
src/url.zig
171
src/url.zig
@@ -4,8 +4,6 @@ const Uri = std.Uri;
|
||||
const Allocator = std.mem.Allocator;
|
||||
const WebApiURL = @import("browser/url/url.zig").URL;
|
||||
|
||||
pub const stitch = URL.stitch;
|
||||
|
||||
pub const URL = struct {
|
||||
uri: Uri,
|
||||
raw: []const u8,
|
||||
@@ -72,7 +70,7 @@ pub const URL = struct {
|
||||
}
|
||||
|
||||
pub fn resolve(self: *const URL, arena: Allocator, url: []const u8) !URL {
|
||||
var buf = try arena.alloc(u8, 4096);
|
||||
var buf = try arena.alloc(u8, 1024);
|
||||
const new_uri = try self.uri.resolve_inplace(url, &buf);
|
||||
return fromURI(arena, &new_uri);
|
||||
}
|
||||
@@ -85,37 +83,14 @@ pub const URL = struct {
|
||||
return WebApiURL.init(allocator, self.uri);
|
||||
}
|
||||
|
||||
const StitchOpts = struct {
|
||||
alloc: AllocWhen = .always,
|
||||
|
||||
const AllocWhen = enum {
|
||||
always,
|
||||
if_needed,
|
||||
};
|
||||
};
|
||||
|
||||
/// Properly stitches two URL fragments together.
|
||||
///
|
||||
/// For URLs with a path, it will replace the last entry with the src.
|
||||
/// For URLs without a path, it will add src as the path.
|
||||
pub fn stitch(
|
||||
allocator: Allocator,
|
||||
src: []const u8,
|
||||
base: []const u8,
|
||||
opts: StitchOpts,
|
||||
) ![]const u8 {
|
||||
if (base.len == 0 or isURL(src)) {
|
||||
if (opts.alloc == .always) {
|
||||
return allocator.dupe(u8, src);
|
||||
}
|
||||
pub fn stitch(allocator: std.mem.Allocator, src: []const u8, base: []const u8) ![]const u8 {
|
||||
if (base.len == 0) {
|
||||
return src;
|
||||
}
|
||||
if (src.len == 0) {
|
||||
if (opts.alloc == .always) {
|
||||
return allocator.dupe(u8, base);
|
||||
}
|
||||
return base;
|
||||
}
|
||||
|
||||
const protocol_end: usize = blk: {
|
||||
if (std.mem.indexOf(u8, base, "://")) |protocol_index| {
|
||||
@@ -125,86 +100,27 @@ pub const URL = struct {
|
||||
}
|
||||
};
|
||||
|
||||
const normalized_src = if (src[0] == '/') src[1..] else src;
|
||||
|
||||
if (std.mem.lastIndexOfScalar(u8, base[protocol_end..], '/')) |index| {
|
||||
const last_slash_pos = index + protocol_end;
|
||||
if (last_slash_pos == base.len - 1) {
|
||||
return std.fmt.allocPrint(allocator, "{s}{s}", .{ base, normalized_src });
|
||||
}
|
||||
return std.fmt.allocPrint(allocator, "{s}/{s}", .{ base[0..last_slash_pos], normalized_src });
|
||||
}
|
||||
return std.fmt.allocPrint(allocator, "{s}/{s}", .{ base, normalized_src });
|
||||
}
|
||||
|
||||
pub fn concatQueryString(arena: Allocator, url: []const u8, query_string: []const u8) ![]const u8 {
|
||||
std.debug.assert(url.len != 0);
|
||||
|
||||
if (query_string.len == 0) {
|
||||
return url;
|
||||
}
|
||||
|
||||
var buf: std.ArrayListUnmanaged(u8) = .empty;
|
||||
|
||||
// the most space well need is the url + ('?' or '&') + the query_string
|
||||
try buf.ensureTotalCapacity(arena, url.len + 1 + query_string.len);
|
||||
buf.appendSliceAssumeCapacity(url);
|
||||
|
||||
if (std.mem.indexOfScalar(u8, url, '?')) |index| {
|
||||
const last_index = url.len - 1;
|
||||
if (index != last_index and url[last_index] != '&') {
|
||||
buf.appendAssumeCapacity('&');
|
||||
return std.fmt.allocPrint(allocator, "{s}{s}", .{ base, src });
|
||||
} else {
|
||||
return std.fmt.allocPrint(allocator, "{s}/{s}", .{ base[0..last_slash_pos], src });
|
||||
}
|
||||
} else {
|
||||
buf.appendAssumeCapacity('?');
|
||||
return std.fmt.allocPrint(allocator, "{s}/{s}", .{ base, src });
|
||||
}
|
||||
buf.appendSliceAssumeCapacity(query_string);
|
||||
return buf.items;
|
||||
}
|
||||
};
|
||||
|
||||
fn isURL(url: []const u8) bool {
|
||||
if (std.mem.startsWith(u8, url, "://")) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (url.len < 8) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!std.ascii.startsWithIgnoreCase(url, "http")) {
|
||||
return false;
|
||||
}
|
||||
|
||||
var pos: usize = 4;
|
||||
if (url[4] == 's' or url[4] == 'S') {
|
||||
pos = 5;
|
||||
}
|
||||
return std.mem.startsWith(u8, url[pos..], "://");
|
||||
}
|
||||
|
||||
const testing = @import("testing.zig");
|
||||
test "URL: isURL" {
|
||||
try testing.expectEqual(true, isURL("://lightpanda.io"));
|
||||
try testing.expectEqual(true, isURL("://lightpanda.io/about"));
|
||||
try testing.expectEqual(true, isURL("http://lightpanda.io/about"));
|
||||
try testing.expectEqual(true, isURL("HttP://lightpanda.io/about"));
|
||||
try testing.expectEqual(true, isURL("httpS://lightpanda.io/about"));
|
||||
try testing.expectEqual(true, isURL("HTTPs://lightpanda.io/about"));
|
||||
|
||||
try testing.expectEqual(false, isURL("/lightpanda.io"));
|
||||
try testing.expectEqual(false, isURL("../../about"));
|
||||
try testing.expectEqual(false, isURL("about"));
|
||||
}
|
||||
|
||||
test "URL: resolve size" {
|
||||
test "Url resolve size" {
|
||||
const base = "https://www.lightpande.io";
|
||||
const url = try URL.parse(base, null);
|
||||
|
||||
var url_string: [511]u8 = undefined; // Currently this is the largest url we support, it is however recommmended to at least support 2000 characters
|
||||
@memset(&url_string, 'a');
|
||||
|
||||
var buf: [8192]u8 = undefined; // This is approximately the required size to support the current largest supported URL
|
||||
var buf: [2048]u8 = undefined; // This is approximately the required size to support the current largest supported URL
|
||||
var fba = std.heap.FixedBufferAllocator.init(&buf);
|
||||
const out_url = try url.resolve(fba.allocator(), &url_string);
|
||||
|
||||
@@ -213,12 +129,14 @@ test "URL: resolve size" {
|
||||
try std.testing.expectEqualStrings(out_url.raw[26..], &url_string);
|
||||
}
|
||||
|
||||
const testing = @import("testing.zig");
|
||||
|
||||
test "URL: Stitching Base & Src URLs (Basic)" {
|
||||
const allocator = testing.allocator;
|
||||
|
||||
const base = "https://www.google.com/xyz/abc/123";
|
||||
const src = "something.js";
|
||||
const result = try URL.stitch(allocator, src, base, .{});
|
||||
const result = try URL.stitch(allocator, src, base);
|
||||
defer allocator.free(result);
|
||||
try testing.expectString("https://www.google.com/xyz/abc/something.js", result);
|
||||
}
|
||||
@@ -228,17 +146,7 @@ test "URL: Stitching Base & Src URLs (Just Ending Slash)" {
|
||||
|
||||
const base = "https://www.google.com/";
|
||||
const src = "something.js";
|
||||
const result = try URL.stitch(allocator, src, base, .{});
|
||||
defer allocator.free(result);
|
||||
try testing.expectString("https://www.google.com/something.js", result);
|
||||
}
|
||||
|
||||
test "URL: Stitching Base & Src URLs with leading slash" {
|
||||
const allocator = testing.allocator;
|
||||
|
||||
const base = "https://www.google.com/";
|
||||
const src = "/something.js";
|
||||
const result = try URL.stitch(allocator, src, base, .{});
|
||||
const result = try URL.stitch(allocator, src, base);
|
||||
defer allocator.free(result);
|
||||
try testing.expectString("https://www.google.com/something.js", result);
|
||||
}
|
||||
@@ -248,7 +156,7 @@ test "URL: Stitching Base & Src URLs (No Ending Slash)" {
|
||||
|
||||
const base = "https://www.google.com";
|
||||
const src = "something.js";
|
||||
const result = try URL.stitch(allocator, src, base, .{});
|
||||
const result = try URL.stitch(allocator, src, base);
|
||||
defer allocator.free(result);
|
||||
try testing.expectString("https://www.google.com/something.js", result);
|
||||
}
|
||||
@@ -258,56 +166,7 @@ test "URL: Stiching Base & Src URLs (Both Local)" {
|
||||
|
||||
const base = "./abcdef/123.js";
|
||||
const src = "something.js";
|
||||
const result = try URL.stitch(allocator, src, base, .{});
|
||||
const result = try URL.stitch(allocator, src, base);
|
||||
defer allocator.free(result);
|
||||
try testing.expectString("./abcdef/something.js", result);
|
||||
}
|
||||
|
||||
test "URL: Stiching src as full path" {
|
||||
const allocator = testing.allocator;
|
||||
|
||||
const base = "https://www.lightpanda.io/";
|
||||
const src = "https://lightpanda.io/something.js";
|
||||
const result = try URL.stitch(allocator, src, base, .{ .alloc = .if_needed });
|
||||
try testing.expectString("https://lightpanda.io/something.js", result);
|
||||
}
|
||||
|
||||
test "URL: Stitching Base & Src URLs (empty src)" {
|
||||
const allocator = testing.allocator;
|
||||
|
||||
const base = "https://www.google.com/xyz/abc/123";
|
||||
const src = "";
|
||||
const result = try URL.stitch(allocator, src, base, .{});
|
||||
defer allocator.free(result);
|
||||
try testing.expectString("https://www.google.com/xyz/abc/123", result);
|
||||
}
|
||||
|
||||
test "URL: concatQueryString" {
|
||||
defer testing.reset();
|
||||
const arena = testing.arena_allocator;
|
||||
|
||||
{
|
||||
const url = try URL.concatQueryString(arena, "https://www.lightpanda.io/", "");
|
||||
try testing.expectEqual("https://www.lightpanda.io/", url);
|
||||
}
|
||||
|
||||
{
|
||||
const url = try URL.concatQueryString(arena, "https://www.lightpanda.io/index?", "");
|
||||
try testing.expectEqual("https://www.lightpanda.io/index?", url);
|
||||
}
|
||||
|
||||
{
|
||||
const url = try URL.concatQueryString(arena, "https://www.lightpanda.io/index?", "a=b");
|
||||
try testing.expectEqual("https://www.lightpanda.io/index?a=b", url);
|
||||
}
|
||||
|
||||
{
|
||||
const url = try URL.concatQueryString(arena, "https://www.lightpanda.io/index?1=2", "a=b");
|
||||
try testing.expectEqual("https://www.lightpanda.io/index?1=2&a=b", url);
|
||||
}
|
||||
|
||||
{
|
||||
const url = try URL.concatQueryString(arena, "https://www.lightpanda.io/index?1=2&", "a=b");
|
||||
try testing.expectEqual("https://www.lightpanda.io/index?1=2&a=b", url);
|
||||
}
|
||||
}
|
||||
|
||||
2
vendor/netsurf/libdom
vendored
2
vendor/netsurf/libdom
vendored
Submodule vendor/netsurf/libdom updated: f22449c52e...b0f4a43147
Reference in New Issue
Block a user